text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import {toID, BasicEffect} from './dex-data';
interface SpeciesAbility {
0: string;
1?: string;
H?: string;
S?: string;
}
type SpeciesTag = "Mythical" | "Restricted Legendary" | "Sub-Legendary";
export interface SpeciesData extends Partial<Species> {
name: string;
/** National Dex number */
num: number;
types: string[];
abilities: SpeciesAbility;
baseStats: StatsTable;
eggGroups: string[];
weightkg: number;
}
export type ModdedSpeciesData = SpeciesData | Partial<Omit<SpeciesData, 'name'>> & {inherit: true};
export interface SpeciesFormatsData {
comboMoves?: readonly string[];
doublesTier?: TierTypes.Doubles | TierTypes.Other;
essentialMove?: string;
exclusiveMoves?: readonly string[];
gmaxUnreleased?: boolean;
isNonstandard?: Nonstandard | null;
randomBattleMoves?: readonly string[];
randomBattleLevel?: number;
randomDoubleBattleMoves?: readonly string[];
randomDoubleBattleLevel?: number;
randomBattleNoDynamaxMoves?: readonly string[];
tier?: TierTypes.Singles | TierTypes.Other;
}
export type ModdedSpeciesFormatsData = SpeciesFormatsData & {inherit?: true};
export interface LearnsetData {
learnset?: {[moveid: string]: MoveSource[]};
eventData?: EventInfo[];
eventOnly?: boolean;
encounters?: EventInfo[];
exists?: boolean;
}
export type ModdedLearnsetData = LearnsetData & {inherit?: true};
export class Species extends BasicEffect implements Readonly<BasicEffect & SpeciesFormatsData> {
declare readonly effectType: 'Pokemon';
/**
* Species ID. Identical to ID. Note that this is the full ID, e.g.
* 'basculinbluestriped'. To get the base species ID, you need to
* manually read toID(species.baseSpecies).
*/
declare readonly id: ID;
/**
* Name. Note that this is the full name with forme,
* e.g. 'Basculin-Blue-Striped'. To get the name without forme, see
* `species.baseSpecies`.
*/
declare readonly name: string;
/**
* Base species. Species, but without the forme name.
*
* DO NOT ASSUME A POKEMON CAN TRANSFORM FROM `baseSpecies` TO
* `species`. USE `changesFrom` FOR THAT.
*/
readonly baseSpecies: string;
/**
* Forme name. If the forme exists,
* `species.name === species.baseSpecies + '-' + species.forme`
*
* The games make a distinction between Forme (foorumu) (legendary Pokémon)
* and Form (sugata) (non-legendary Pokémon). PS does not use the same
* distinction – they're all "Forme" to PS, reflecting current community
* use of the term.
*
* This property only tracks non-cosmetic formes, and will be `''` for
* cosmetic formes.
*/
readonly forme: string;
/**
* Base forme name (e.g. 'Altered' for Giratina).
*/
readonly baseForme: string;
/**
* Other forms. List of names of cosmetic forms. These should have
* `aliases.js` aliases to this entry, but not have their own
* entry in `pokedex.js`.
*/
readonly cosmeticFormes?: string[];
/**
* Other formes. List of names of formes, appears only on the base
* forme. Unlike forms, these have their own entry in `pokedex.js`.
*/
readonly otherFormes?: string[];
/**
* List of forme speciesNames in the order they appear in the game data -
* the union of baseSpecies, otherFormes and cosmeticFormes. Appears only on
* the base species forme.
*
* A species's alternate formeindex may change from generation to generation -
* the forme with index N in Gen A is not guaranteed to be the same forme as the
* forme with index in Gen B.
*
* Gigantamaxes are not considered formes by the game (see data/FORMES.md - PS
* labels them as such for convenience) - Gigantamax "formes" are instead included at
* the end of the formeOrder list so as not to interfere with the correct index numbers.
*/
readonly formeOrder?: string[];
/**
* Sprite ID. Basically the same as ID, but with a dash between
* species and forme.
*/
readonly spriteid: string;
/** Abilities. */
readonly abilities: SpeciesAbility;
/** Types. */
readonly types: string[];
/** Added type (added by Trick-Or-Treat or Forest's Curse, but only listed in species by OMs). */
readonly addedType?: string;
/** Pre-evolution. '' if nothing evolves into this Pokemon. */
readonly prevo: string;
/** Evolutions. Array because many Pokemon have multiple evolutions. */
readonly evos: string[];
readonly evoType?: 'trade' | 'useItem' | 'levelMove' | 'levelExtra' | 'levelFriendship' | 'levelHold' | 'other';
/** Evolution condition. falsy if doesn't evolve. */
declare readonly evoCondition?: string;
/** Evolution item. falsy if doesn't evolve. */
declare readonly evoItem?: string;
/** Evolution move. falsy if doesn't evolve. */
readonly evoMove?: string;
/** Evolution level. falsy if doesn't evolve. */
readonly evoLevel?: number;
/** Is NFE? True if this Pokemon can evolve (Mega evolution doesn't count). */
readonly nfe: boolean;
/** Egg groups. */
readonly eggGroups: string[];
/** True if this species can hatch from an Egg. */
readonly canHatch: boolean;
/**
* Gender. M = always male, F = always female, N = always
* genderless, '' = sometimes male sometimes female.
*/
readonly gender: GenderName;
/** Gender ratio. Should add up to 1 unless genderless. */
readonly genderRatio: {M: number, F: number};
/** Base stats. */
readonly baseStats: StatsTable;
/** Max HP. Overrides usual HP calculations (for Shedinja). */
readonly maxHP?: number;
/** A Pokemon's Base Stat Total */
readonly bst: number;
/** Weight (in kg). Not valid for OMs; use weighthg / 10 instead. */
readonly weightkg: number;
/** Weight (in integer multiples of 0.1kg). */
readonly weighthg: number;
/** Height (in m). */
readonly heightm: number;
/** Color. */
readonly color: string;
/**
* Tags, boolean data. Currently just legendary/mythical status.
*/
readonly tags: SpeciesTag[];
/** Does this Pokemon have an unreleased hidden ability? */
readonly unreleasedHidden: boolean | 'Past';
/**
* Is it only possible to get the hidden ability on a male pokemon?
* This is mainly relevant to Gen 5.
*/
readonly maleOnlyHidden: boolean;
/** True if a pokemon is mega. */
readonly isMega?: boolean;
/** True if a pokemon is primal. */
declare readonly isPrimal?: boolean;
/** Name of its Gigantamax move, if a pokemon is capable of gigantamaxing. */
readonly canGigantamax?: string;
/** If this Pokemon can gigantamax, is its gigantamax released? */
readonly gmaxUnreleased?: boolean;
/** True if a Pokemon species is incapable of dynamaxing */
readonly cannotDynamax?: boolean;
/** What it transforms from, if a pokemon is a forme that is only accessible in battle. */
readonly battleOnly?: string | string[];
/** Required item. Do not use this directly; see requiredItems. */
readonly requiredItem?: string;
/** Required move. Move required to use this forme in-battle. */
declare readonly requiredMove?: string;
/** Required ability. Ability required to use this forme in-battle. */
declare readonly requiredAbility?: string;
/**
* Required items. Items required to be in this forme, e.g. a mega
* stone, or Griseous Orb. Array because Arceus formes can hold
* either a Plate or a Z-Crystal.
*/
readonly requiredItems?: string[];
/**
* Formes that can transform into this Pokemon, to inherit learnsets
* from. (Like `prevo`, but for transformations that aren't
* technically evolution. Includes in-battle transformations like
* Zen Mode and out-of-battle transformations like Rotom.)
*
* Not filled out for megas/primals - fall back to baseSpecies
* for in-battle formes.
*/
readonly changesFrom?: string;
/**
* Singles Tier. The Pokemon's location in the Smogon tier system.
*/
readonly tier: TierTypes.Singles | TierTypes.Other;
/**
* Doubles Tier. The Pokemon's location in the Smogon doubles tier system.
*/
readonly doublesTier: TierTypes.Doubles | TierTypes.Other;
declare readonly randomBattleMoves?: readonly ID[];
declare readonly randomBattleLevel?: number;
declare readonly randomDoubleBattleMoves?: readonly ID[];
declare readonly randomDoubleBattleLevel?: number;
declare readonly randomBattleNoDynamaxMoves?: readonly ID[];
declare readonly exclusiveMoves?: readonly ID[];
declare readonly comboMoves?: readonly ID[];
declare readonly essentialMove?: ID;
constructor(data: AnyObject) {
super(data);
data = this;
this.fullname = `pokemon: ${data.name}`;
this.effectType = 'Pokemon';
this.baseSpecies = data.baseSpecies || this.name;
this.forme = data.forme || '';
this.baseForme = data.baseForme || '';
this.cosmeticFormes = data.cosmeticFormes || undefined;
this.otherFormes = data.otherFormes || undefined;
this.formeOrder = data.formeOrder || undefined;
this.spriteid = data.spriteid ||
(toID(this.baseSpecies) + (this.baseSpecies !== this.name ? `-${toID(this.forme)}` : ''));
this.abilities = data.abilities || {0: ""};
this.types = data.types || ['???'];
this.addedType = data.addedType || undefined;
this.prevo = data.prevo || '';
this.tier = data.tier || '';
this.doublesTier = data.doublesTier || '';
this.evos = data.evos || [];
this.evoType = data.evoType || undefined;
this.evoMove = data.evoMove || undefined;
this.evoLevel = data.evoLevel || undefined;
this.nfe = data.nfe || false;
this.eggGroups = data.eggGroups || [];
this.canHatch = data.canHatch || false;
this.gender = data.gender || '';
this.genderRatio = data.genderRatio || (this.gender === 'M' ? {M: 1, F: 0} :
this.gender === 'F' ? {M: 0, F: 1} :
this.gender === 'N' ? {M: 0, F: 0} :
{M: 0.5, F: 0.5});
this.requiredItem = data.requiredItem || undefined;
this.requiredItems = this.requiredItems || (this.requiredItem ? [this.requiredItem] : undefined);
this.baseStats = data.baseStats || {hp: 0, atk: 0, def: 0, spa: 0, spd: 0, spe: 0};
this.bst = this.baseStats.hp + this.baseStats.atk + this.baseStats.def +
this.baseStats.spa + this.baseStats.spd + this.baseStats.spe;
this.weightkg = data.weightkg || 0;
this.weighthg = this.weightkg * 10;
this.heightm = data.heightm || 0;
this.color = data.color || '';
this.tags = data.tags || [];
this.unreleasedHidden = data.unreleasedHidden || false;
this.maleOnlyHidden = !!data.maleOnlyHidden;
this.maxHP = data.maxHP || undefined;
this.isMega = !!(this.forme && ['Mega', 'Mega-X', 'Mega-Y'].includes(this.forme)) || undefined;
this.canGigantamax = data.canGigantamax || undefined;
this.gmaxUnreleased = !!data.gmaxUnreleased;
this.cannotDynamax = !!data.cannotDynamax;
this.battleOnly = data.battleOnly || (this.isMega ? this.baseSpecies : undefined);
this.changesFrom = data.changesFrom ||
(this.battleOnly !== this.baseSpecies ? this.battleOnly : this.baseSpecies);
if (Array.isArray(data.changesFrom)) this.changesFrom = data.changesFrom[0];
if (!this.gen && this.num >= 1) {
if (this.num >= 810 || ['Gmax', 'Galar', 'Galar-Zen'].includes(this.forme)) {
this.gen = 8;
} else if (this.num >= 722 || this.forme.startsWith('Alola') || this.forme === 'Starter') {
this.gen = 7;
} else if (this.forme === 'Primal') {
this.gen = 6;
this.isPrimal = true;
this.battleOnly = this.baseSpecies;
} else if (this.num >= 650 || this.isMega) {
this.gen = 6;
} else if (this.num >= 494) {
this.gen = 5;
} else if (this.num >= 387) {
this.gen = 4;
} else if (this.num >= 252) {
this.gen = 3;
} else if (this.num >= 152) {
this.gen = 2;
} else {
this.gen = 1;
}
}
}
}
export class Learnset {
readonly effectType: 'Learnset';
/**
* Keeps track of exactly how a pokemon might learn a move, in the
* form moveid:sources[].
*/
readonly learnset?: {[moveid: string]: MoveSource[]};
/** True if the only way to get this Pokemon is from events. */
readonly eventOnly: boolean;
/** List of event data for each event. */
readonly eventData?: EventInfo[];
readonly encounters?: EventInfo[];
readonly exists: boolean;
constructor(data: AnyObject) {
this.exists = true;
this.effectType = 'Learnset';
this.learnset = data.learnset || undefined;
this.eventOnly = !!data.eventOnly;
this.eventData = data.eventData || undefined;
this.encounters = data.encounters || undefined;
}
}
export class DexSpecies {
readonly dex: ModdedDex;
readonly speciesCache = new Map<ID, Species>();
readonly learnsetCache = new Map<ID, Learnset>();
allCache: readonly Species[] | null = null;
constructor(dex: ModdedDex) {
this.dex = dex;
}
get(name?: string | Species): Species {
if (name && typeof name !== 'string') return name;
name = (name || '').trim();
let id = toID(name);
if (id === 'nidoran' && name.endsWith('♀')) {
id = 'nidoranf' as ID;
} else if (id === 'nidoran' && name.endsWith('♂')) {
id = 'nidoranm' as ID;
}
return this.getByID(id);
}
getByID(id: ID): Species {
let species: Mutable<Species> | undefined = this.speciesCache.get(id);
if (species) return species;
if (this.dex.data.Aliases.hasOwnProperty(id)) {
if (this.dex.data.FormatsData.hasOwnProperty(id)) {
// special event ID, like Rockruff-Dusk
const baseId = toID(this.dex.data.Aliases[id]);
species = new Species({
...this.dex.data.Pokedex[baseId],
...this.dex.data.FormatsData[id],
name: id,
});
species.abilities = {0: species.abilities['S']!};
} else {
species = this.get(this.dex.data.Aliases[id]);
if (species.cosmeticFormes) {
for (const forme of species.cosmeticFormes) {
if (toID(forme) === id) {
species = new Species({
...species,
name: forme,
forme: forme.slice(species.name.length + 1),
baseForme: "",
baseSpecies: species.name,
otherFormes: null,
cosmeticFormes: null,
});
break;
}
}
}
}
this.speciesCache.set(id, species);
return species;
}
if (!this.dex.data.Pokedex.hasOwnProperty(id)) {
let aliasTo = '';
const formeNames: {[k: string]: string[]} = {
alola: ['a', 'alola', 'alolan'],
galar: ['g', 'galar', 'galarian'],
mega: ['m', 'mega'],
primal: ['p', 'primal'],
};
for (const forme in formeNames) {
let pokeName = '';
for (const i of formeNames[forme]) {
if (id.startsWith(i)) {
pokeName = id.slice(i.length);
} else if (id.endsWith(i)) {
pokeName = id.slice(0, -i.length);
}
}
if (this.dex.data.Aliases.hasOwnProperty(pokeName)) pokeName = toID(this.dex.data.Aliases[pokeName]);
if (this.dex.data.Pokedex[pokeName + forme]) {
aliasTo = pokeName + forme;
break;
}
}
if (aliasTo) {
species = this.get(aliasTo);
if (species.exists) {
this.speciesCache.set(id, species);
return species;
}
}
}
if (id && this.dex.data.Pokedex.hasOwnProperty(id)) {
const pokedexData = this.dex.data.Pokedex[id];
const baseSpeciesTags = pokedexData.baseSpecies && this.dex.data.Pokedex[toID(pokedexData.baseSpecies)].tags;
species = new Species({
tags: baseSpeciesTags,
...pokedexData,
...this.dex.data.FormatsData[id],
});
// Inherit any statuses from the base species (Arceus, Silvally).
const baseSpeciesStatuses = this.dex.data.Conditions[toID(species.baseSpecies)];
if (baseSpeciesStatuses !== undefined) {
let key: keyof EffectData;
for (key in baseSpeciesStatuses) {
if (!(key in species)) (species as any)[key] = baseSpeciesStatuses[key];
}
}
if (!species.tier && !species.doublesTier && species.baseSpecies !== species.name) {
if (species.baseSpecies === 'Mimikyu') {
species.tier = this.dex.data.FormatsData[toID(species.baseSpecies)].tier || 'Illegal';
species.doublesTier = this.dex.data.FormatsData[toID(species.baseSpecies)].doublesTier || 'Illegal';
} else if (species.id.endsWith('totem')) {
species.tier = this.dex.data.FormatsData[species.id.slice(0, -5)].tier || 'Illegal';
species.doublesTier = this.dex.data.FormatsData[species.id.slice(0, -5)].doublesTier || 'Illegal';
} else if (species.battleOnly) {
species.tier = this.dex.data.FormatsData[toID(species.battleOnly)].tier || 'Illegal';
species.doublesTier = this.dex.data.FormatsData[toID(species.battleOnly)].doublesTier || 'Illegal';
} else {
const baseFormatsData = this.dex.data.FormatsData[toID(species.baseSpecies)];
if (!baseFormatsData) {
throw new Error(`${species.baseSpecies} has no formats-data entry`);
}
species.tier = baseFormatsData.tier || 'Illegal';
species.doublesTier = baseFormatsData.doublesTier || 'Illegal';
}
}
if (!species.tier) species.tier = 'Illegal';
if (!species.doublesTier) species.doublesTier = species.tier as any;
if (species.gen > this.dex.gen) {
species.tier = 'Illegal';
species.doublesTier = 'Illegal';
species.isNonstandard = 'Future';
}
if (this.dex.currentMod === 'gen7letsgo' && !species.isNonstandard) {
const isLetsGo = (
(species.num <= 151 || ['Meltan', 'Melmetal'].includes(species.name)) &&
(!species.forme || (['Alola', 'Mega', 'Mega-X', 'Mega-Y', 'Starter'].includes(species.forme) &&
species.name !== 'Pikachu-Alola'))
);
if (!isLetsGo) species.isNonstandard = 'Past';
}
if (this.dex.currentMod === 'gen8bdsp' &&
(!species.isNonstandard || ["Gigantamax", "CAP"].includes(species.isNonstandard))) {
if (species.gen > 4 || (species.num < 1 && species.isNonstandard !== 'CAP') ||
species.id === 'pichuspikyeared') {
species.isNonstandard = 'Future';
species.tier = species.doublesTier = 'Illegal';
}
}
species.nfe = !!(species.evos.length && this.get(species.evos[0]).gen <= this.dex.gen);
species.canHatch = species.canHatch ||
(!['Ditto', 'Undiscovered'].includes(species.eggGroups[0]) && !species.prevo && species.name !== 'Manaphy');
if (this.dex.gen === 1) species.bst -= species.baseStats.spd;
if (this.dex.gen < 5) delete species.abilities['H'];
if (this.dex.gen === 3 && this.dex.abilities.get(species.abilities['1']).gen === 4) delete species.abilities['1'];
} else {
species = new Species({
id, name: id,
exists: false, tier: 'Illegal', doublesTier: 'Illegal', isNonstandard: 'Custom',
});
}
if (species.exists) this.speciesCache.set(id, species);
return species;
}
getLearnset(id: ID): Learnset['learnset'] {
return this.getLearnsetData(id).learnset;
}
getLearnsetData(id: ID): Learnset {
let learnsetData = this.learnsetCache.get(id);
if (learnsetData) return learnsetData;
if (!this.dex.data.Learnsets.hasOwnProperty(id)) {
return new Learnset({exists: false});
}
learnsetData = new Learnset(this.dex.data.Learnsets[id]);
this.learnsetCache.set(id, learnsetData);
return learnsetData;
}
all(): readonly Species[] {
if (this.allCache) return this.allCache;
const species = [];
for (const id in this.dex.data.Pokedex) {
species.push(this.getByID(id as ID));
}
this.allCache = species;
return this.allCache;
}
} | the_stack |
import {
BINARY_ENCODING,
IExecuteFunctions,
} from 'n8n-core';
import {
cortexApiRequest,
getEntityLabel,
prepareParameters,
splitTags,
} from './GenericFunctions';
import {
analyzerFields,
analyzersOperations,
} from './AnalyzerDescriptions';
import {
IBinaryData,
IDataObject,
ILoadOptionsFunctions,
INodeExecutionData,
INodePropertyOptions,
INodeType,
INodeTypeDescription,
NodeOperationError,
} from 'n8n-workflow';
import {
responderFields,
respondersOperations,
} from './ResponderDescription';
import {
jobFields,
jobOperations,
} from './JobDescription';
import {
upperFirst,
} from 'lodash';
import {
IJob,
} from './AnalyzerInterface';
import {
createHash,
} from 'crypto';
import * as changeCase from 'change-case';
export class Cortex implements INodeType {
description: INodeTypeDescription = {
displayName: 'Cortex',
name: 'cortex',
icon: 'file:cortex.svg',
group: ['transform'],
subtitle: '={{$parameter["resource"]+ ": " + $parameter["operation"]}}',
version: 1,
description: 'Apply the Cortex analyzer/responder on the given entity',
defaults: {
name: 'Cortex',
},
inputs: ['main'],
outputs: ['main'],
credentials: [
{
name: 'cortexApi',
required: true,
},
],
properties: [
// Node properties which the user gets displayed and
// can change on the node.
{
displayName: 'Resource',
name: 'resource',
type: 'options',
options: [
{
name: 'Analyzer',
value: 'analyzer',
},
{
name: 'Job',
value: 'job',
},
{
name: 'Responder',
value: 'responder',
},
],
default: 'analyzer',
description: 'Choose a resource',
required: true,
},
...analyzersOperations,
...analyzerFields,
...respondersOperations,
...responderFields,
...jobOperations,
...jobFields,
],
};
methods = {
loadOptions: {
async loadActiveAnalyzers(this: ILoadOptionsFunctions): Promise<INodePropertyOptions[]> {
// request the enabled analyzers from instance
const requestResult = await cortexApiRequest.call(
this,
'POST',
`/analyzer/_search`,
);
const returnData: INodePropertyOptions[] = [];
for (const analyzer of requestResult) {
returnData.push({
name: analyzer.name as string,
value: `${analyzer.id as string}::${analyzer.name as string}`,
description: analyzer.description as string,
});
}
return returnData;
},
async loadActiveResponders(this: ILoadOptionsFunctions): Promise<INodePropertyOptions[]> {
// request the enabled responders from instance
const requestResult = await cortexApiRequest.call(
this,
'GET',
`/responder`,
);
const returnData: INodePropertyOptions[] = [];
for (const responder of requestResult) {
returnData.push({
name: responder.name as string,
value: `${responder.id as string}::${responder.name as string}`,
description: responder.description as string,
});
}
return returnData;
},
async loadObservableOptions(this: ILoadOptionsFunctions): Promise<INodePropertyOptions[]> {
const selectedAnalyzerId = (this.getNodeParameter('analyzer') as string).split('::')[0];
// request the analyzers from instance
const requestResult = await cortexApiRequest.call(
this,
'GET',
`/analyzer/${selectedAnalyzerId}`,
);
// parse supported observable types into options
const returnData: INodePropertyOptions[] = [];
for (const dataType of requestResult.dataTypeList) {
returnData.push(
{
name: upperFirst(dataType as string),
value: dataType as string,
},
);
}
return returnData;
},
async loadDataTypeOptions(this: ILoadOptionsFunctions): Promise<INodePropertyOptions[]> {
const selectedResponderId = (this.getNodeParameter('responder') as string).split('::')[0];
// request the responder from instance
const requestResult = await cortexApiRequest.call(
this,
'GET',
`/responder/${selectedResponderId}`,
);
// parse the accepted dataType into options
const returnData: INodePropertyOptions[] = [];
for (const dataType of requestResult.dataTypeList) {
returnData.push(
{
value: (dataType as string).split(':')[1],
name: changeCase.capitalCase((dataType as string).split(':')[1]),
},
);
}
return returnData;
},
},
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const returnData: IDataObject[] = [];
const length = (items.length as unknown) as number;
const qs: IDataObject = {};
let responseData;
const resource = this.getNodeParameter('resource', 0) as string;
const operation = this.getNodeParameter('operation', 0) as string;
for (let i = 0; i < length; i++) {
try {
if (resource === 'analyzer') {
//https://github.com/TheHive-Project/CortexDocs/blob/master/api/api-guide.md#run
if (operation === 'execute') {
let force = false;
const analyzer = this.getNodeParameter('analyzer', i) as string;
const observableType = this.getNodeParameter('observableType', i) as string;
const additionalFields = this.getNodeParameter('additionalFields', i) as IDataObject;
const tlp = this.getNodeParameter('tlp', i) as string;
const body: IDataObject = {
dataType: observableType,
tlp,
};
if (additionalFields.force === true) {
force = true;
}
if (observableType === 'file') {
const item = items[i];
if (item.binary === undefined) {
throw new NodeOperationError(this.getNode(), 'No binary data exists on item!');
}
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i) as string;
if (item.binary[binaryPropertyName] === undefined) {
throw new NodeOperationError(this.getNode(), `No binary data property "${binaryPropertyName}" does not exists on item!`);
}
const fileBufferData = Buffer.from(item.binary[binaryPropertyName].data, BINARY_ENCODING);
const options = {
formData: {
data: {
value: fileBufferData,
options: {
contentType: item.binary[binaryPropertyName].mimeType,
filename: item.binary[binaryPropertyName].fileName,
},
},
_json: JSON.stringify({
dataType: observableType,
tlp,
}),
},
};
responseData = await cortexApiRequest.call(
this,
'POST',
`/analyzer/${analyzer.split('::')[0]}/run`,
{},
{ force },
'',
options,
) as IJob;
continue;
} else {
const observableValue = this.getNodeParameter('observableValue', i) as string;
body.data = observableValue;
responseData = await cortexApiRequest.call(
this,
'POST',
`/analyzer/${analyzer.split('::')[0]}/run`,
body,
{ force },
) as IJob;
}
if (additionalFields.timeout) {
responseData = await cortexApiRequest.call(
this,
'GET',
`/job/${responseData.id}/waitreport`,
{},
{ atMost: `${additionalFields.timeout}second` },
);
}
}
}
if (resource === 'job') {
//https://github.com/TheHive-Project/CortexDocs/blob/master/api/api-guide.md#get-details-1
if (operation === 'get') {
const jobId = this.getNodeParameter('jobId', i) as string;
responseData = await cortexApiRequest.call(
this,
'GET',
`/job/${jobId}`,
);
}
//https://github.com/TheHive-Project/CortexDocs/blob/master/api/api-guide.md#get-details-and-report
if (operation === 'report') {
const jobId = this.getNodeParameter('jobId', i) as string;
responseData = await cortexApiRequest.call(
this,
'GET',
`/job/${jobId}/report`,
);
}
}
if (resource === 'responder') {
if (operation === 'execute') {
const responderId = (this.getNodeParameter('responder', i) as string).split('::')[0];
const entityType = this.getNodeParameter('entityType', i) as string;
const isJSON = this.getNodeParameter('jsonObject', i) as boolean;
let body: IDataObject;
if (isJSON) {
const entityJson = JSON.parse(this.getNodeParameter('objectData', i) as string);
body = {
responderId,
label: getEntityLabel(entityJson),
dataType: `thehive:${entityType}`,
data: entityJson,
tlp: entityJson.tlp || 2,
pap: entityJson.pap || 2,
message: entityJson.message || '',
parameters: [],
};
} else {
const values = (this.getNodeParameter('parameters', i) as IDataObject).values as IDataObject;
body = {
responderId,
dataType: `thehive:${entityType}`,
data: {
_type: entityType,
...prepareParameters(values),
},
};
if (entityType === 'alert') {
// deal with alert artifacts
const artifacts = (body.data as IDataObject).artifacts as IDataObject;
if (artifacts) {
const artifactValues = (artifacts as IDataObject).artifactValues as IDataObject[];
if (artifactValues) {
const artifactData = [];
for (const artifactvalue of artifactValues) {
const element: IDataObject = {};
element.message = artifactvalue.message as string;
element.tags = splitTags(artifactvalue.tags as string) as string[];
element.dataType = artifactvalue.dataType as string;
element.data = artifactvalue.data as string;
if (artifactvalue.dataType === 'file') {
const item = items[i];
if (item.binary === undefined) {
throw new NodeOperationError(this.getNode(), 'No binary data exists on item!');
}
const binaryPropertyName = artifactvalue.binaryProperty as string;
if (item.binary[binaryPropertyName] === undefined) {
throw new NodeOperationError(this.getNode(), `No binary data property '${binaryPropertyName}' does not exists on item!`);
}
const binaryData = item.binary[binaryPropertyName] as IBinaryData;
element.data = `${binaryData.fileName};${binaryData.mimeType};${binaryData.data}`;
}
artifactData.push(element);
}
(body.data as IDataObject).artifacts = artifactData;
}
}
}
if (entityType === 'case_artifact') {
// deal with file observable
if ((body.data as IDataObject).dataType === 'file') {
const item = items[i];
if (item.binary === undefined) {
throw new NodeOperationError(this.getNode(), 'No binary data exists on item!');
}
const binaryPropertyName = (body.data as IDataObject).binaryPropertyName as string;
if (item.binary[binaryPropertyName] === undefined) {
throw new NodeOperationError(this.getNode(), `No binary data property "${binaryPropertyName}" does not exists on item!`);
}
const fileBufferData = Buffer.from(item.binary[binaryPropertyName].data, BINARY_ENCODING);
const sha256 = createHash('sha256').update(fileBufferData).digest('hex');
(body.data as IDataObject).attachment = {
name: item.binary[binaryPropertyName].fileName,
hashes: [
sha256,
createHash('sha1').update(fileBufferData).digest('hex'),
createHash('md5').update(fileBufferData).digest('hex'),
],
size: fileBufferData.byteLength,
contentType: item.binary[binaryPropertyName].mimeType,
id: sha256,
};
delete (body.data as IDataObject).binaryPropertyName;
}
}
// add the job label after getting all entity attributes
body = {
label: getEntityLabel(body.data as IDataObject),
...body,
};
}
responseData = await cortexApiRequest.call(
this,
'POST',
`/responder/${responderId}/run`,
body,
) as IJob;
}
}
if (Array.isArray(responseData)) {
returnData.push.apply(returnData, responseData as IDataObject[]);
} else if (responseData !== undefined) {
returnData.push(responseData as IDataObject);
}
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ error: error.message });
continue;
}
throw error;
}
}
return [this.helpers.returnJsonArray(returnData)];
}
} | the_stack |
export class Broadcast<MsgKeyType extends broadcast.IMsgKey, ValueType = any, ResultType = any>
implements broadcast.IBroadcast<MsgKeyType, ValueType, ResultType>{
public keys: { [key in keyof MsgKeyType]: MsgKeyType[key] };
private _valueMap: { [key in keyof MsgKeyType]: any };
private _handlerMap: { [key in keyof MsgKeyType]: broadcast.IListenerHandler | broadcast.IListenerHandler[] };
private _stickHandlersMap: { [key in keyof MsgKeyType]: broadcast.IStickyHandler[] };
protected _unuseHandlers: any[]
constructor() {
this.keys = new Proxy({} as any, {
get: (target, p) => {
return p;
}
})
this._valueMap = {} as any;
this._unuseHandlers = [];
}
//注册
/**
* 注册事件,可以注册多个
* @param key 事件名
* @param listener 监听回调
* @param context 上下文
* @param args 透传参数
* @param once 是否监听一次
*
*/
public on<keyType extends keyof MsgKeyType = any>(
handler: keyType | broadcast.IListenerHandler<keyType, ValueType, ResultType> | broadcast.IListenerHandler<keyType, ValueType, ResultType>[],
listener?: broadcast.Listener<ValueType[broadcast.ToAnyIndexKey<keyType, ValueType>], ResultType[broadcast.ToAnyIndexKey<keyType, ResultType>]>,
context?: any,
once?: boolean,
args?: any[]
) {
if (typeof handler === "string") {
if (!listener) return;
this._addHandler(this._getHandler(handler, listener, context, once, args));
} else {
if (this._isArr(handler)) {
const handlers: broadcast.IListenerHandler[] = handler as any;
for (let i = 0; i < handlers.length; i++) {
this._addHandler(handlers[i]);
}
} else {
this._addHandler(handler as any);
}
}
}
public has(key: keyof MsgKeyType) {
return this._handlerMap && !!this._handlerMap[key]
}
public offAllByContext(context: any) {
const handlerMap = this._handlerMap;
if (context && handlerMap) {
for (const key in handlerMap) {
if (handlerMap[key]) {
this.off(key, null, context);
}
}
}
}
/**
* 注销指定事件的所有监听
* @param key
*/
public offAll(key?: keyof MsgKeyType) {
if (this._isStringNull(key)) {
return;
}
const handlerMap = this._handlerMap;
const stickyMap = this._stickHandlersMap;
const valueMap = this._valueMap;
if (stickyMap) stickyMap[key] = undefined;
if (handlerMap) {
const handlers: broadcast.IListenerHandler[] = handlerMap[key] as any;
if (this._isArr(handlers)) {
for (let i = 0; i < handlers.length; i++) {
this._recoverHandler(handlers[i]);
}
} else {
this._recoverHandler(handlers as any);
}
handlerMap[key] = undefined
}
if (valueMap) valueMap[key] = undefined;
}
public off(key: keyof MsgKeyType, listener: broadcast.Listener, context?: any, onceOnly?: boolean) {
if (this._isStringNull(key)) return;
const handlerMap = this._handlerMap;
if (!handlerMap || !handlerMap[key]) return this;
let handler: broadcast.IListenerHandler = handlerMap[key] as any;
if (handler !== undefined && handler !== null) {
let handlers: broadcast.IListenerHandler[];
if (!this._isArr(handler)) {
if ((!context || handler.context === context)
&& (listener == null || handler.listener === listener)
&& (!onceOnly || handler.once)) {
this._recoverHandler(handler);
handlerMap[key] = undefined;
}
} else {
handlers = handler as any;
//倒序遍历做删除,将要删除的移到末尾,pop出去,时间复杂度O(1)
let endIndex = handlers.length - 1;
for (let i = endIndex; i >= 0; i--) {
handler = handlers[i];
if (handler && (!context || handler.context === context)
&& (listener == null || handler.listener === listener)
&& (!onceOnly || handler.once)) {
endIndex = handlers.length - 1;
if (i !== endIndex) {
handler = handlers[endIndex];
handlers[endIndex] = handlers[i];
handlers[i] = handler;
}
this._recoverHandler(handlers.pop());
}
}
if (!handlers.length) {
handlerMap[key] = undefined;
}
// let count: number = 0;
// for (let i: number = 0; i < handlers.length; i++) {
// const item: IListenerHandler<KeyType> = handlers[i];
// if (!item) {
// count++;
// continue;
// }
// if (item && (!context || item.context === context)
// && (listener == null || item.listener === listener)
// && (!onceOnly || item.once)) {
// count++;
// handlers[i] = undefined;
// }
// }
// //如果全部移除,则删除索引
// if (count === handlers.length) {
// handlerMap[key] = undefined;
// } else {
// const newHandlers: IListenerHandler<KeyType>[] = [];
// for (let i = 0; i < handlers.length; i++) {
// handlers[i] && newHandlers.push(handlers[i]);
// }
// handlerMap[key] = newHandlers;
// }
}
}
return this;
}
//广播
/**
* 广播
* @param key 事件名
* @param value 数据
* @param callback 回调
* @param persistence 是否持久化数据
*/
public broadcast<keyType extends keyof MsgKeyType = any>(
key: keyType, value?: ValueType[broadcast.ToAnyIndexKey<keyType, ValueType>],
callback?: broadcast.ResultCallBack<ResultType[broadcast.ToAnyIndexKey<keyType, ResultType>]>,
persistence?: boolean) {
const handlerMap = this._handlerMap;
if (!handlerMap) return;
const handlers = handlerMap[key];
if (persistence) {
let valueMap = this._valueMap;
if (!valueMap) {
valueMap = {} as any;
this._valueMap = valueMap;
}
valueMap[key] = value;
}
if (!handlers) return;
if (!this._isArr(handlers)) {
const handler = handlers as broadcast.IListenerHandler;
value ? Broadcast._runHandlerWithData(handler, value, callback) : Broadcast._runHandler(handler, callback);
if (handler.once) {
this._recoverHandler(handler);
this._handlerMap[key] = undefined;
}
} else {
const handlerArr = handlers as broadcast.IListenerHandler[];
let handler: broadcast.IListenerHandler;
let endIndex = handlerArr.length - 1;
for (let i = endIndex; i >= 0; i--) {
handler = handlerArr[i];
value ? Broadcast._runHandlerWithData(handler, value, callback) : Broadcast._runHandler(handler, callback);
if (handler.once) {
endIndex = handlerArr.length - 1;
handler = handlerArr[endIndex];
handlerArr[endIndex] = handlerArr[i];
handlerArr[i] = handler;
this._recoverHandler(handlerArr.pop());
}
}
if (!handlerArr.length) {
this._handlerMap[key] = undefined;
}
}
}
/**
* 广播一条 指定 [key] 的粘性消息
* 如果广播系统中没有注册该类型的接收者,本条信息将被滞留在系统中。一旦有该类型接收者被注册,本条消息将会被立即发送给接收者
* 如果系统中已经注册有该类型的接收者,本条消息将会被立即发送给接收者。
*
* @param key 消息类型
* @param value 消息携带的数据。可以是任意类型或是null
* @param callback 能够收到接收器返回的消息
* @param persistence 是否持久化消息类型。持久化的消息可以在任意时刻通过 broadcast.value(key) 获取当前消息的数据包。默认情况下,未持久化的消息类型在没有接收者的时候会被移除,而持久化的消息类型则不会。开发者可以通过 [clear] 函数来移除持久化的消息类型。
*/
public stickyBroadcast<keyType extends keyof MsgKeyType = any>(
key: keyType,
value?: ValueType[broadcast.ToAnyIndexKey<keyType, ValueType>],
callback?: broadcast.ResultCallBack<ResultType[broadcast.ToAnyIndexKey<keyType, ResultType>]>,
persistence?: boolean
) {
if (this._isStringNull(key)) return;
const handlerMap = this._handlerMap;
if (handlerMap && handlerMap[key]) {
this.broadcast(key, value, callback, persistence);
} else {
let stickyMap = this._stickHandlersMap;
if (!stickyMap) {
stickyMap = {} as any;
this._stickHandlersMap = stickyMap;
}
const stickyHandlers = stickyMap[key];
const handler: broadcast.IStickyHandler = {
key: key as any,
value: value,
callback: callback,
persistence: persistence
};
if (!stickyHandlers) {
stickyMap[key] = [handler]
} else {
stickyHandlers.push(handler)
}
}
}
/**
* 字符串是否为空 undefined null ""
* @param str
*/
protected _isStringNull(str: string | any) {
return !str || str.trim() === "";
}
/**
* 是否是数组
* @param target
*/
protected _isArr(target: any) {
return Object.prototype.toString.call(target) === "[object Array]";
}
/**
* 将广播的数据作为参数,执行广播监听器的逻辑
* @param handler 广播监听器
* @param data 广播的消息数据
*/
protected static _runHandlerWithData(handler: broadcast.IListenerHandler, data: any, callback: broadcast.Listener) {
if (handler.listener == null) return null;
let result: any;
if (data == null) {
const args = handler.args ? handler.args.unshift(callback) : [callback];
result = handler.listener.apply(handler.context, args);
}
else if (!handler.args && !data.unshift) result = handler.listener.apply(handler.context, [data, callback]);
else if (handler.args) result = handler.listener.apply(handler.context, [data, callback].concat(handler.args));
else result = handler.listener.apply(handler.context, [data, callback]);
return result;
}
/**
* 执行广播监听者的逻辑
* @param handler
*/
protected static _runHandler(handler: broadcast.IListenerHandler, callback: broadcast.Listener) {
if (handler.listener == null) return null;
const args = handler.args ? handler.args.unshift(callback) : [callback];
const result: any = handler.listener.apply(handler.context, args);
return result;
}
/**
* 回收handler
* @param handler
*/
protected _recoverHandler(handler: broadcast.IListenerHandler) {
handler.args = undefined;
handler.context = undefined;
handler.listener = undefined;
handler.key = undefined;
this._unuseHandlers.push(handler);
}
/**
* 获取handler
* @param key
* @param listener
* @param context
* @param once
* @param args
*/
protected _getHandler(key: string, listener: any, context: any, once: boolean, args: any[]) {
const unuseHandlers = this._unuseHandlers;
let handler: broadcast.IListenerHandler;
if (unuseHandlers.length) {
handler = unuseHandlers.pop();
} else {
handler = {} as any;
}
handler.key = key;
handler.listener = listener;
handler.context = context;
handler.once = once;
handler.args = args;
return handler;
}
/**
* 添加广播监听
* 如果是监听1次,则会移除上一次相同的监听
* 会判断是否有粘性广播,如果有就会触发广播
* @param handler
*/
protected _addHandler(handler: broadcast.IListenerHandler) {
let handlerMap = this._handlerMap;
if (handler.once) {
this.off(handler.key, handler.listener, handler.context, handler.once);
}
if (!handlerMap) {
handlerMap = {} as any;
this._handlerMap = handlerMap;
}
const events = handlerMap[handler.key];
if (events) {
if (this._isArr(events)) {
(events as broadcast.IListenerHandler[]).push(handler);
} else {
handlerMap[handler.key] = [events as any, handler];
}
} else {
handlerMap[handler.key] = handler;
}
const stickyMap = this._stickHandlersMap;
if (stickyMap) {
const stickyHandlers = stickyMap[handler.key];
if (stickyHandlers) {
let handler: broadcast.IStickyHandler;
for (let i = 0; i < stickyHandlers.length; i++) {
handler = stickyHandlers[i];
this.broadcast(handler.key as any, handler.value, handler.callback, handler.persistence);
}
stickyMap[handler.key] = undefined;
}
}
if (handler.key !== this.keys.onListenerOn) {
this.broadcast(this.keys.onListenerOn, handler.key);
}
}
/**
* 取值
* @param key
*/
public value<keyType extends keyof MsgKeyType = any>(key: keyType): ValueType[broadcast.ToAnyIndexKey<keyType, ValueType>] {
return this._valueMap && this._valueMap[key];
}
/**
* 销毁广播系统
*/
public dispose() {
this._handlerMap = undefined;
this._stickHandlersMap = undefined;
this._valueMap = undefined;
}
} | the_stack |
import { nextTick } from 'vue'
import { mount } from '@vue/test-utils'
import dayjs from 'dayjs'
import ConfigProvider from '@element-plus/components/config-provider'
import { CommonPicker } from '@element-plus/components/time-picker'
import Input from '@element-plus/components/input'
import zhCn from '@element-plus/locale/lang/zh-cn'
import enUs from '@element-plus/locale/lang/en'
import 'dayjs/locale/zh-cn'
import DatePicker from '../src/date-picker'
const _mount = (template: string, data = () => ({}), otherObj?) =>
mount(
{
components: {
'el-date-picker': DatePicker,
},
template,
data,
...otherObj,
},
{
attachTo: 'body',
}
)
afterEach(() => {
document.documentElement.innerHTML = ''
})
describe('DatePicker', () => {
it('create & custom class & style', async () => {
const popperClassName = 'popper-class-test'
const customClassName = 'custom-class-test'
const wrapper = _mount(
`<el-date-picker
:readonly="true"
placeholder='test_'
format='HH-mm-ss'
:style="{color:'red'}"
:class="customClassName"
:popperClass="popperClassName"
/>`,
() => ({ popperClassName, customClassName })
)
const input = wrapper.find('input')
expect(input.attributes('placeholder')).toBe('test_')
expect(input.attributes('readonly')).not.toBeUndefined()
const outterInput = wrapper.find('.el-input')
expect(outterInput.classes()).toContain(customClassName)
expect(outterInput.attributes().style).toBeDefined()
input.trigger('blur')
input.trigger('focus')
await nextTick()
expect(
document
.querySelector('.el-picker__popper')
.classList.contains(popperClassName)
).toBe(true)
})
it('select date', async () => {
const wrapper = _mount(
`<el-date-picker
v-model="value"
/>`,
() => ({ value: '' })
)
const date = dayjs()
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
const spans = document.querySelectorAll('.el-date-picker__header-label')
const arrowLeftElm = document.querySelector(
'.el-date-picker__prev-btn.el-icon-arrow-left'
) as HTMLElement
const arrowRightElm = document.querySelector(
'.el-date-picker__next-btn.el-icon-arrow-right'
) as HTMLElement
expect(spans[0].textContent).toContain(date.year())
expect(spans[1].textContent).toContain(date.format('MMMM'))
const arrowLeftYeayElm = document.querySelector(
'.el-date-picker__prev-btn.el-icon-d-arrow-left'
) as HTMLElement
arrowLeftYeayElm.click()
let count = 20
while (--count) {
arrowLeftElm.click()
}
count = 20
while (--count) {
arrowRightElm.click()
}
await nextTick()
expect(spans[0].textContent).toContain(date.add(-1, 'year').year())
expect(spans[1].textContent).toContain(date.format('MMMM'))
;(document.querySelector('td.available') as HTMLElement).click()
await nextTick()
const vm = wrapper.vm as any
expect(vm.value).toBeDefined()
})
it('defaultTime and clear value', async () => {
const wrapper = _mount(
`<el-date-picker
v-model="value"
:default-time="new Date(2011,1,1,12,0,1)"
/>`,
() => ({ value: '' })
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
;(document.querySelector('td.available') as HTMLElement).click()
await nextTick()
const vm = wrapper.vm as any
expect(vm.value).toBeDefined()
expect(vm.value.getHours()).toBe(12)
expect(vm.value.getMinutes()).toBe(0)
expect(vm.value.getSeconds()).toBe(1)
const picker = wrapper.findComponent(CommonPicker)
;(picker.vm as any).showClose = true
await nextTick()
;(document.querySelector('.el-icon-circle-close') as HTMLElement).click()
expect(vm.value).toBeNull()
})
it('event change, focus, blur', async () => {
const changeHandler = jest.fn()
const focusHandler = jest.fn()
const blurHandler = jest.fn()
let onChangeValue
const wrapper = _mount(
`<el-date-picker
v-model="value"
@change="onChange"
@focus="onFocus"
@blur="onBlur"
/>`,
() => ({ value: new Date(2016, 9, 10, 18, 40) }),
{
methods: {
onChange(e) {
onChangeValue = e
return changeHandler(e)
},
onFocus(e) {
return focusHandler(e)
},
onBlur(e) {
return blurHandler(e)
},
},
}
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
expect(focusHandler).toHaveBeenCalledTimes(1)
;(document.querySelector('td.available') as HTMLElement).click()
await nextTick()
await nextTick() // onchange is triggered by props.modelValue update
expect(changeHandler).toHaveBeenCalledTimes(1)
expect(blurHandler).toHaveBeenCalledTimes(1)
expect(onChangeValue.getTime()).toBe(new Date(2016, 9, 1).getTime())
})
it('shortcuts', async () => {
const text = 'Yesterday'
const value = new Date(Date.now() - 86400000)
value.setHours(0, 0, 0, 0)
const wrapper = _mount(
`<el-date-picker
v-model="value"
:shortcuts="shortcuts"
/>`,
() => ({
value: '',
shortcuts: [
{
text,
value,
},
],
})
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
const shortcut = document.querySelector('.el-picker-panel__shortcut')
expect(shortcut.textContent).toBe(text)
expect(document.querySelector('.el-picker-panel__sidebar')).not.toBeNull()
;(shortcut as HTMLElement).click()
await nextTick()
const vm = wrapper.vm as any
expect(vm.value.valueOf()).toBe(value.valueOf())
})
it('disabledDate', async () => {
const wrapper = _mount(
`<el-date-picker
v-model="value"
:disabledDate="disabledDate"
/>`,
() => ({
value: '',
disabledDate(time) {
return time.getTime() < Date.now() - 8.64e7
},
})
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
expect(document.querySelector('.disabled')).not.toBeNull()
})
it('ref focus', async () => {
_mount(
`<el-date-picker
v-model="value"
ref="input"
/>`,
() => ({ value: '' }),
{
mounted() {
this.$refs.input.focus()
},
}
)
await nextTick()
const popperEl = document.querySelector('.el-picker__popper')
const attr = popperEl.getAttribute('aria-hidden')
expect(attr).toEqual('false')
})
describe('value-format', () => {
it('with literal string', async () => {
const day = dayjs()
const format = 'YYYY-MM-DD'
const valueFormat = '[Element-Plus] DD/MM YYYY'
const value = day.format(valueFormat)
const wrapper = _mount(
`
<el-date-picker
ref="compo"
v-model="value"
type="date"
format="${format}"
value-format="${valueFormat}" />
<button @click="changeValue">click</button>
`,
() => {
return {
value,
}
},
{
methods: {
changeValue() {
this.value = '[Element-Plus] 31/05 2021'
},
},
}
)
const vm = wrapper.vm as any
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
{
;(document.querySelector('td.available') as HTMLElement).click()
}
await nextTick()
expect(vm.value).toBe(
dayjs(
`[Element-Plus] 01/${`0${day.month() + 1}`.slice(-2)} ${day.year()}`,
valueFormat
).format(valueFormat)
)
wrapper.find('button').trigger('click')
await nextTick()
expect(wrapper.findComponent(Input).vm.modelValue).toBe('2021-05-31')
})
})
})
describe('DatePicker Navigation', () => {
let prevMonth, prevYear, nextMonth, nextYear, getYearLabel, getMonthLabel
const initNavigationTest = async (value) => {
const wrapper = _mount(
`<el-date-picker
v-model="value"
/>`,
() => ({ value })
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
prevMonth = document.querySelector('button.el-icon-arrow-left')
prevYear = document.querySelector('button.el-icon-d-arrow-left')
nextMonth = document.querySelector('button.el-icon-arrow-right')
nextYear = document.querySelector('button.el-icon-d-arrow-right')
getYearLabel = () =>
document.querySelectorAll('.el-date-picker__header-label')[0].textContent
getMonthLabel = () =>
document.querySelectorAll('.el-date-picker__header-label')[1].textContent
}
it('month, year', async () => {
await initNavigationTest(new Date(2000, 0, 1))
expect(getYearLabel()).toContain('2000')
expect(getMonthLabel()).toContain('January')
prevMonth.click()
await nextTick()
expect(getYearLabel()).toContain('1999')
expect(getMonthLabel()).toContain('December')
prevYear.click()
await nextTick()
expect(getYearLabel()).toContain('1998')
expect(getMonthLabel()).toContain('December')
nextMonth.click()
await nextTick()
expect(getYearLabel()).toContain('1999')
expect(getMonthLabel()).toContain('January')
nextYear.click()
await nextTick()
expect(getYearLabel()).toContain('2000')
expect(getMonthLabel()).toContain('January')
})
it('month with fewer dates', async () => {
// July has 31 days, June has 30
await initNavigationTest(new Date(2000, 6, 31))
prevMonth.click()
await nextTick()
expect(getYearLabel()).toContain('2000')
expect(getMonthLabel()).toContain('June')
})
it('year with fewer Feburary dates', async () => {
// Feburary 2008 has 29 days, Feburary 2007 has 28
await initNavigationTest(new Date(2008, 1, 29))
prevYear.click()
await nextTick()
expect(getYearLabel()).toContain('2007')
expect(getMonthLabel()).toContain('February')
})
it('month label with fewer dates', async () => {
await initNavigationTest(new Date(2000, 6, 31))
const yearLabel = document.querySelectorAll(
'.el-date-picker__header-label'
)[0]
;(yearLabel as HTMLElement).click()
await nextTick()
const year1999Label = document.querySelectorAll('.el-year-table td a')[1]
;(year1999Label as HTMLElement).click()
await nextTick()
const juneLabel = document.querySelectorAll('.el-month-table td a')[5]
;(juneLabel as HTMLElement).click()
await nextTick()
expect(getYearLabel()).toContain('2001')
expect(getMonthLabel()).toContain('June')
const monthLabel = document.querySelectorAll(
'.el-date-picker__header-label'
)[1]
;(monthLabel as HTMLElement).click()
await nextTick()
const janLabel = document.querySelectorAll('.el-month-table td a')[0]
;(janLabel as HTMLElement).click()
await nextTick()
expect(getYearLabel()).toContain('2001')
expect(getMonthLabel()).toContain('January')
})
})
describe('MonthPicker', () => {
it('basic', async () => {
const wrapper = _mount(
`<el-date-picker
type='month'
v-model="value"
/>`,
() => ({ value: new Date(2020, 7, 1) })
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
expect(
(document.querySelector('.el-month-table') as HTMLElement).style.display
).toBe('')
expect(document.querySelector('.el-year-table')).toBeNull()
;(document.querySelector('.el-month-table a.cell') as HTMLElement).click()
await nextTick()
const vm = wrapper.vm as any
expect(vm.value.getMonth()).toBe(0)
})
it('value-format', async () => {
const valueFormat = '[Element-Plus] YYYY.MM'
const wrapper = _mount(
`
<el-date-picker
type="month"
v-model="value"
value-format="${valueFormat}"
></el-date-picker>
`,
() => ({ value: dayjs(new Date(2020, 7, 1)).format(valueFormat) })
)
await nextTick()
expect(wrapper.findComponent(Input).vm.modelValue).toBe('2020-08')
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
{
;(document.querySelector('.el-month-table a.cell') as HTMLElement).click()
}
await nextTick()
expect(wrapper.findComponent(Input).vm.modelValue).toBe('2020-01')
expect((wrapper.vm as any).value).toBe(
dayjs(new Date(2020, 0, 1)).format(valueFormat)
)
})
})
describe('YearPicker', () => {
it('basic', async () => {
const wrapper = _mount(
`<el-date-picker
type='year'
v-model="value"
/>`,
() => ({ value: new Date(2020, 7, 1) })
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
expect(
(document.querySelector('.el-year-table') as HTMLElement).style.display
).toBe('')
expect(document.querySelector('.el-month-table')).toBeNull()
const leftBtn = document.querySelector(
'.el-icon-d-arrow-left'
) as HTMLElement
const rightBtn = document.querySelector(
'.el-icon-d-arrow-right'
) as HTMLElement
let count = 2
while (--count) {
leftBtn.click()
}
count = 3
while (--count) {
rightBtn.click()
}
await nextTick()
;(document.querySelector('.el-year-table a.cell') as HTMLElement).click()
await nextTick()
const vm = wrapper.vm as any
expect(vm.value.getFullYear()).toBe(2030)
})
it('value-format', async () => {
const valueFormat = '[Element-Plus] YYYY'
const wrapper = _mount(
`
<el-date-picker
type="year"
v-model="value"
value-format="${valueFormat}"
></el-date-picker>
`,
() => ({ value: dayjs(new Date(2005, 7, 1)).format(valueFormat) })
)
await nextTick()
expect(wrapper.findComponent(Input).vm.modelValue).toBe('2005')
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
const cell = document.querySelector('.el-year-table a.cell') as HTMLElement
cell.click()
await nextTick()
expect((wrapper.vm as any).value).toBe(
dayjs(new Date(Number.parseInt(cell.innerHTML.trim()), 0, 1)).format(
valueFormat
)
)
})
})
describe('WeekPicker', () => {
it('create', async () => {
const wrapper = _mount(
`<el-date-picker
type='week'
v-model="value"
/>`,
() => ({ value: new Date(2020, 7, 15) })
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
expect(document.querySelector('.is-week-mode')).not.toBeNull()
// select month still is in week-mode
;(
document.querySelectorAll(
'.el-date-picker__header-label'
)[1] as HTMLElement
).click()
await nextTick()
;(
document.querySelectorAll('.el-month-table .cell')[7] as HTMLElement
).click()
await nextTick()
expect(document.querySelector('.is-week-mode')).not.toBeNull()
const numberOfHighlightRows = () =>
document.querySelectorAll('.el-date-table__row.current').length
;(
document.querySelector(
'.el-date-table__row ~ .el-date-table__row td.available'
) as HTMLElement
).click()
await nextTick()
const vm = wrapper.vm as any
expect(vm.value).not.toBeNull()
input.trigger('blur')
input.trigger('focus')
await nextTick()
expect(numberOfHighlightRows()).toBe(1)
// test: next month should not have highlight
;(document.querySelector('.el-icon-arrow-right') as HTMLElement).click()
await nextTick()
expect(numberOfHighlightRows()).toBe(0)
// test: next year should not have highlight
;(document.querySelector('.el-icon-arrow-left') as HTMLElement).click()
await nextTick()
;(document.querySelector('.el-icon-d-arrow-right') as HTMLElement).click()
await nextTick()
expect(numberOfHighlightRows()).toBe(0)
})
;[
{ locale: zhCn, name: 'Monday', value: 1 },
{ locale: enUs, name: 'Sunday', value: 0 },
].forEach((loObj) => {
it(`emit first day of the week, ${loObj.locale.name} locale, ${loObj.name}`, async () => {
const wrapper = mount(
{
components: {
'el-date-picker': DatePicker,
'el-config-provider': ConfigProvider,
},
template: `
<el-config-provider :locale="locale">
<el-date-picker
type='week'
v-model="value"
/>
</el-config-provider>
`,
data() {
return {
locale: loObj.locale,
value: '',
}
},
},
{
attachTo: 'body',
}
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
// click Wednesday
;(
document.querySelectorAll(
'.el-date-table__row ~ .el-date-table__row td'
)[3] as HTMLElement
).click()
await nextTick()
const vm = wrapper.vm as any
expect(vm.value).not.toBeNull()
expect(+dayjs(vm.value).locale(loObj.locale.name)).toBe(
+dayjs(vm.value).locale(loObj.locale.name).startOf('week')
)
expect(dayjs(vm.value).locale(loObj.locale.name).day()).toBe(loObj.value) // Sunday or Monday
})
})
})
describe('DatePicker dates', () => {
it('create', async () => {
const wrapper = _mount(
`<el-date-picker
type='dates'
v-model="value"
/>`,
() => ({ value: '' })
)
const input = wrapper.find('input')
input.trigger('blur')
input.trigger('focus')
await nextTick()
const td = document.querySelectorAll(
'.el-date-table__row .available'
) as NodeListOf<HTMLElement>
const vm = wrapper.vm as any
td[0].click()
await nextTick()
expect(vm.value.length).toBe(1)
td[1].click()
await nextTick()
expect(vm.value.length).toBe(2)
expect(
document.querySelectorAll('.el-date-table__row .selected').length
).toBe(2)
td[0].click()
await nextTick()
expect(vm.value.length).toBe(1)
td[1].click()
await nextTick()
expect(vm.value.length).toBe(0)
})
})
describe('DateRangePicker', () => {
it('create', async () => {
let calendarChangeValue = null
const changeHandler = jest.fn()
const wrapper = _mount(
`<el-date-picker
type='daterange'
v-model="value"
@CalendarChange="onCalendarChange"
/>`,
() => ({ value: '' }),
{
methods: {
onCalendarChange(e) {
calendarChangeValue = e
changeHandler(e)
},
},
}
)
const inputs = wrapper.findAll('input')
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const panels = document.querySelectorAll('.el-date-range-picker__content')
expect(panels.length).toBe(2)
;(panels[0].querySelector('td.available') as HTMLElement).click()
await nextTick()
;(panels[1].querySelector('td.available') as HTMLElement).click()
await nextTick()
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
// correct highlight
const startDate = document.querySelectorAll('.start-date')
const endDate = document.querySelectorAll('.end-date')
const inRangeDate = document.querySelectorAll('.in-range')
expect(startDate.length).toBe(1)
expect(endDate.length).toBe(1)
expect(inRangeDate.length).toBeGreaterThan(28)
// value is array
const vm = wrapper.vm as any
expect(Array.isArray(vm.value)).toBeTruthy()
// input text is something like date string
expect(inputs[0].element.value.length).toBe(10)
expect(inputs[1].element.value.length).toBe(10)
// calendar-change event
expect(changeHandler).toHaveBeenCalledTimes(2)
expect(calendarChangeValue.length).toBe(2)
expect(calendarChangeValue[0]).toBeInstanceOf(Date)
expect(calendarChangeValue[1]).toBeInstanceOf(Date)
})
it('reverse selection', async () => {
const wrapper = _mount(
`<el-date-picker
type='daterange'
v-model="value"
/>`,
() => ({ value: '' })
)
const inputs = wrapper.findAll('input')
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const panels = document.querySelectorAll('.el-date-range-picker__content')
;(panels[1].querySelector('td.available') as HTMLElement).click()
await nextTick()
;(panels[0].querySelector('td.available') as HTMLElement).click()
await nextTick()
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
// correct highlight
const startDate = document.querySelectorAll('.start-date')
const endDate = document.querySelectorAll('.end-date')
const inRangeDate = document.querySelectorAll('.in-range')
expect(startDate.length).toBe(1)
expect(endDate.length).toBe(1)
expect(inRangeDate.length).toBeGreaterThan(28)
const vm = wrapper.vm as any
expect(vm.value[0].getTime() < vm.value[1].getTime()).toBeTruthy()
})
it('reset selection', async () => {
const wrapper = _mount(
`<el-date-picker
type='daterange'
v-model="value"
/>`,
() => ({ value: '' })
)
const inputs = wrapper.findAll('input')
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const panels = document.querySelectorAll('.el-date-range-picker__content')
;(panels[1].querySelector('td.available') as HTMLElement).click()
await nextTick()
;(panels[0].querySelector('td.available') as HTMLElement).click()
await nextTick()
;(wrapper.vm as any).value = ''
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const inRangeDate = document.querySelectorAll('.in-range')
expect(inRangeDate.length).toBe(0)
})
it('range, start-date and end-date', async () => {
_mount(
`<el-date-picker
type='daterange'
v-model="value"
/>`,
() => ({ value: '' })
)
const table = document.querySelector('.el-date-table')
const availableTds = (table as HTMLTableElement).querySelectorAll(
'td.available'
)
;(availableTds[0] as HTMLElement).click()
await nextTick()
;(availableTds[1] as HTMLElement).click()
await nextTick()
expect(availableTds[0].classList.contains('in-range')).toBeTruthy()
expect(availableTds[0].classList.contains('start-date')).toBeTruthy()
expect(availableTds[1].classList.contains('in-range')).toBeTruthy()
expect(availableTds[1].classList.contains('end-date')).toBeTruthy()
;(availableTds[1] as HTMLElement).click()
await nextTick()
;(availableTds[0] as HTMLElement).click()
await nextTick()
expect(availableTds[0].classList.contains('in-range')).toBeTruthy()
expect(availableTds[0].classList.contains('start-date')).toBeTruthy()
expect(availableTds[1].classList.contains('in-range')).toBeTruthy()
expect(availableTds[1].classList.contains('end-date')).toBeTruthy()
const startDate = document.querySelectorAll('.start-date')
const endDate = document.querySelectorAll('.end-date')
const inRangeDate = document.querySelectorAll('.in-range')
expect(startDate.length).toBe(1)
expect(endDate.length).toBe(1)
expect(inRangeDate.length).toBe(2)
})
it('unlink:true', async () => {
const wrapper = _mount(
`<el-date-picker
type='daterange'
v-model="value"
unlink-panels
/>`,
() => ({ value: [new Date(2000, 9, 1), new Date(2000, 11, 2)] })
)
const inputs = wrapper.findAll('input')
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const panels = document.querySelectorAll('.el-date-range-picker__content')
const left = panels[0].querySelector('.el-date-range-picker__header')
const right = panels[1].querySelector(
'.is-right .el-date-range-picker__header'
)
expect(left.textContent).toBe('2000 October')
expect(right.textContent).toBe('2000 December')
;(panels[1].querySelector('.el-icon-d-arrow-right') as HTMLElement).click()
await nextTick()
;(panels[1].querySelector('.el-icon-arrow-right') as HTMLElement).click()
await nextTick()
expect(left.textContent).toBe('2000 October')
expect(right.textContent).toBe('2002 January')
})
it('daylight saving time highlight', async () => {
// Run test with environment variable TZ=Australia/Sydney
// The following test uses Australian Eastern Daylight Time (AEDT)
// AEST -> AEDT shift happened on 2016-10-02 02:00:00
const wrapper = _mount(
`<el-date-picker
type='daterange'
v-model="value"
unlink-panels
/>`,
() => ({ value: [new Date(2016, 9, 1), new Date(2016, 9, 3)] })
)
const inputs = wrapper.findAll('input')
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const startDate = document.querySelectorAll('.start-date')
const endDate = document.querySelectorAll('.end-date')
expect(startDate.length).toBe(1)
expect(endDate.length).toBe(1)
})
it('value-format', async () => {
const valueFormat = 'DD/MM YYYY'
const wrapper = _mount(
`
<el-date-picker
v-model="value"
type="daterange"
format="YYYY-MM-DD"
value-format="${valueFormat}"
/>`,
() => ({
value: [
dayjs(new Date(2021, 4, 2)).format(valueFormat),
dayjs(new Date(2021, 4, 12)).format(valueFormat),
],
})
)
await nextTick()
const [startInput, endInput] = wrapper.findAll('input')
expect(startInput.element.value).toBe('2021-05-02')
expect(endInput.element.value).toBe('2021-05-12')
startInput.trigger('blur')
startInput.trigger('focus')
await nextTick()
const panels = document.querySelectorAll('.el-date-range-picker__content')
expect(panels.length).toBe(2)
;(panels[0].querySelector('td.available') as HTMLElement).click()
await nextTick()
;(panels[1].querySelector('td.available') as HTMLElement).click()
await nextTick()
expect((wrapper.vm as any).value.toString()).toBe(
['01/05 2021', '01/06 2021'].toString()
)
})
})
describe('MonthRange', () => {
it('works', async () => {
const wrapper = _mount(
`<el-date-picker
type='monthrange'
v-model="value"
/>`,
() => ({ value: '' })
)
const inputs = wrapper.findAll('input')
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const panels = document.querySelectorAll('.el-date-range-picker__content')
expect(panels.length).toBe(2)
const p0 = <HTMLElement>panels[0].querySelector('td:not(.disabled)')
p0.click()
await nextTick()
const p1 = <HTMLElement>panels[1].querySelector('td:not(.disabled)')
p1.click()
await nextTick()
inputs[0].trigger('blur')
inputs[0].trigger('focus')
// correct highlight
const startDate = document.querySelectorAll('.start-date')
const endDate = document.querySelectorAll('.end-date')
const inRangeDate = document.querySelectorAll('.in-range')
expect(startDate.length).toBe(1)
expect(endDate.length).toBe(1)
expect(inRangeDate.length).toBeGreaterThan(0)
// value is array
const vm = wrapper.vm as any
expect(Array.isArray(vm.value)).toBeTruthy()
// input text is something like date string
expect(inputs[0].element.value.length).toBe(7)
expect(inputs[1].element.value.length).toBe(7)
// reverse selection
p1.click()
await nextTick()
p0.click()
await nextTick()
expect(vm.value[0].getTime() < vm.value[1].getTime()).toBeTruthy()
})
it('range, start-date and end-date', async () => {
_mount(
`<el-date-picker
type='monthrange'
v-model="value"
/>`,
() => ({ value: '' })
)
const table = document.querySelector('.el-month-table')
const tds = (table as HTMLTableElement).querySelectorAll('td')
;(tds[0] as HTMLElement).click()
await nextTick()
;(tds[1] as HTMLElement).click()
await nextTick()
expect(tds[0].classList.contains('in-range')).toBeTruthy()
expect(tds[0].classList.contains('start-date')).toBeTruthy()
expect(tds[1].classList.contains('in-range')).toBeTruthy()
expect(tds[1].classList.contains('end-date')).toBeTruthy()
;(tds[1] as HTMLElement).click()
await nextTick()
;(tds[0] as HTMLElement).click()
await nextTick()
expect(tds[0].classList.contains('in-range')).toBeTruthy()
expect(tds[0].classList.contains('start-date')).toBeTruthy()
expect(tds[1].classList.contains('in-range')).toBeTruthy()
expect(tds[1].classList.contains('end-date')).toBeTruthy()
const startDate = document.querySelectorAll('.start-date')
const endDate = document.querySelectorAll('.end-date')
const inRangeDate = document.querySelectorAll('.in-range')
expect(startDate.length).toBe(1)
expect(endDate.length).toBe(1)
expect(inRangeDate.length).toBe(2)
})
it('type:monthrange unlink:true', async () => {
const wrapper = _mount(
`<el-date-picker
type='monthrange'
v-model="value"
unlink-panels
/>`,
() => ({ value: [new Date(2000, 9), new Date(2002, 11)] })
)
const inputs = wrapper.findAll('input')
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const panels = document.querySelectorAll('.el-date-range-picker__content')
const left = panels[0].querySelector('.el-date-range-picker__header')
const right = panels[1].querySelector(
'.is-right .el-date-range-picker__header'
)
expect(left.textContent).toContain(2000)
expect(right.textContent).toContain(2002)
;(panels[1].querySelector('.el-icon-d-arrow-right') as HTMLElement).click()
await nextTick()
expect(left.textContent).toContain(2000)
expect(right.textContent).toContain(2003)
})
it('daylight saving time highlight', async () => {
const wrapper = _mount(
`<el-date-picker
type='monthrange'
v-model="value"
unlink-panels
/>`,
() => ({ value: [new Date(2016, 6), new Date(2016, 12)] })
)
const inputs = wrapper.findAll('input')
inputs[0].trigger('blur')
inputs[0].trigger('focus')
await nextTick()
const startDate = document.querySelectorAll('.start-date')
const endDate = document.querySelectorAll('.end-date')
expect(startDate.length).toBe(1)
expect(endDate.length).toBe(1)
})
it('should accept popper options and pass down', async () => {
const ElPopperOptions = {
strategy: 'fixed',
}
const wrapper = _mount(
`<el-date-picker
type='monthrange'
v-model="value"
:popper-options="options"
unlink-panels
/>`,
() => ({
value: [new Date(2016, 6), new Date(2016, 12)],
options: ElPopperOptions,
}),
{
provide() {
return {
ElPopperOptions,
}
},
}
)
await nextTick()
expect(
(wrapper.findComponent(CommonPicker).vm as any).elPopperOptions
).toEqual(ElPopperOptions)
})
}) | the_stack |
import { grpc } from '@improbable-eng/grpc-web'
import { UnaryOutput } from '@improbable-eng/grpc-web/dist/typings/unary'
import { Context, ContextInterface, defaultHost } from '@textile/context'
import { Identity } from '@textile/crypto'
import { WebsocketTransport } from '@textile/grpc-transport'
import {
bytesFromAddr,
bytesToOptions,
bytesToTuples,
stringFromBytes,
} from '@textile/multiaddr'
import { KeyInfo, ThreadKey, UserAuth } from '@textile/security'
import * as pb from '@textile/threads-client-grpc/threads_pb'
import {
API,
APIGetToken,
APIListen,
} from '@textile/threads-client-grpc/threads_pb_service'
import { ThreadID } from '@textile/threads-id'
import 'fastestsmallesttextencoderdecoder'
import toJsonSchema, { JSONSchema3or4 } from 'to-json-schema'
import {
ComparisonJSON,
CriterionJSON,
Event,
Filter,
Patch,
PatchType,
Query,
QueryJSON,
ReadTransaction,
SortJSON,
ValueJSON,
Where,
WriteTransaction,
} from './models'
export {
Filter,
Query,
Where,
WriteTransaction,
ReadTransaction,
QueryJSON,
ValueJSON,
ComparisonJSON,
CriterionJSON,
SortJSON,
JSONSchema3or4,
Event,
Patch,
PatchType,
}
function isEmpty(obj: any) {
return Object.keys(obj).length === 0 && obj.constructor === Object
}
export function getFunctionBody(
fn: ((...args: any[]) => any) | string,
): string {
// https://stackoverflow.com/a/25229488/1256988
function removeCommentsFromSource(str: string) {
return str.replace(
/(?:\/\*(?:[\s\S]*?)\*\/)|(?:([\s;])+\/\/(?:.*)$)/gm,
'$1',
)
}
const s = removeCommentsFromSource(fn.toString())
return s.substring(s.indexOf('{') + 1, s.lastIndexOf('}'))
}
/**
* CollectionConfig is the configuration options for creating and updating a Collection.
* It supports the following configuration options:
* - Name: The name of the collection, e.g, "Animals" (must be unique per DB).
* - Schema: A JSON Schema), which is used for instance validation.
* - Indexes: An optional list of index configurations, which define how instances are indexed.
* - WriteValidator: An optional JavaScript (ECMAScript 5.1) function that is used to validate
* instances on write.
* - ReadFilter: An optional JavaScript (ECMAScript 5.1) function that is used to filter
* instances on read.
*
* The `writeValidator` function receives three arguments:
* - writer: The multibase-encoded public key identity of the writer.
* - event: An object describing the update event (see core.Event).
* - instance: The current instance as a JavaScript object before the update event is applied.
*
* A falsy return value indicates a failed validation. Note that the function arguments must
* be named as documented here (writer, event, instance). These functions run in a secure sandbox
* where these argument names are specified.
*
* Having access to writer, event, and instance opens the door to a variety of app-specific logic.
* Textile Buckets file-level access roles are implemented in part with a write validator.
*
* The `readFilter` function receives three arguments:
* - reader: The multibase-encoded public key identity of the reader.
* - instance: The current instance as a JavaScript object.
*
* The function must return a JavaScript object. Most implementations will modify and return the
* current instance. Note that the function arguments must be named as documented here (reader,
* instance). These functions run in a secure sandbox where these argument names are specified.
* Like write validation, read filtering opens the door to a variety of app-specific logic.
* Textile Buckets file-level access roles are implemented in part with a read filter.
*/
export interface CollectionConfig<W = any, R = W> {
name: string
schema?: JSONSchema3or4 | any // JSONSchema is preferred but any works
indexes?: pb.Index.AsObject[]
writeValidator?:
| ((writer: string, event: Event, instance: W) => boolean)
| string
readFilter?: ((reader: string, instance: R) => R) | string
}
const encoder = new TextEncoder()
const decoder = new TextDecoder()
export function maybeLocalAddr(ip: string): boolean | RegExpMatchArray {
return (
['localhost', '', '::1'].includes(ip) ||
ip.match(/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/) ||
ip.startsWith('192.168.') ||
ip.startsWith('10.0.') ||
ip.endsWith('.local')
)
}
export enum Action {
CREATE = 0,
SAVE,
DELETE,
}
export interface Update<T = unknown> {
instance: T | undefined
collectionName: string
instanceID: string
action: Action
}
/**
* DBInfo contains joining/sharing information for a Thread/DB.
*/
export interface DBInfo {
/**
* The Thread Key, encoded as a base32 string.
* @see {@link ThreadKey} for details.
*/
key: string
/**
* The Multiaddrs for a peer hosting the given Thread/DB.
*/
addrs: string[]
}
/**
* Client is a web-gRPC wrapper client for communicating with a webgRPC-enabled Threads server.
* This client library can be used to interact with a local or remote Textile gRPC-service
* It is a wrapper around Textile Thread's 'DB' API, which is defined here:
* https://github.com/textileio/go-threads/blob/master/api/pb/api.proto.
*
* @example
* ```typescript
* import {Client, Identity, UserAuth} from '@textile/threads'
*
* async function setupDB(auth: UserAuth, identity: Identity) {
* // Initialize the client
* const client = Client.withUserAuth(auth)
*
* // Connect the user to your API
* const userToken = await client.getToken(identity)
*
* // Create a new DB
* const threadID = await client.newDB(undefined, 'nasa')
*
* // Create a new Collection from an Object
* const buzz = {
* name: 'Buzz',
* missions: 2,
* _id: '',
* }
* await client.newCollectionFromObject(threadID, buzz, { name: 'astronauts' })
*
* // Store the buzz object in the new collection
* await client.create(threadID, 'astronauts', [buzz])
*
* return threadID
* }
* ```
*/
export class Client {
public serviceHost: string
public rpcOptions: grpc.RpcOptions
/**
* Creates a new gRPC client instance for accessing the Textile Threads API.
* @param context The context to use for interacting with the APIs. Can be modified later.
* @param debug Should we run in debug mode. Defaults to false.
*/
constructor(public context: ContextInterface = new Context(), debug = false) {
this.serviceHost = context.host
this.rpcOptions = {
transport: WebsocketTransport(),
debug,
}
}
/**
* Create a new gRPC client instance from a supplied user auth object.
* Assumes all default gRPC settlings. For customization options, use a context object directly.
* The callback method will automatically refresh expiring credentials.
* @param auth The user auth object or an async callback that returns a user auth object.
* @example
* ```typescript
* import {UserAuth, Client} from '@textile/threads'
*
* function create (auth: UserAuth) {
* return Client.withUserAuth(auth)
* }
* ```
* @example
* ```typescript
* import {UserAuth, Client} from '@textile/threads'
*
* function setCallback (callback: () => Promise<UserAuth>) {
* return Client.withUserAuth(callback)
* }
* ```
*/
static withUserAuth(
auth: UserAuth | (() => Promise<UserAuth>),
host = defaultHost,
debug = false,
): Client {
const context =
typeof auth === 'object'
? Context.fromUserAuth(auth, host)
: Context.fromUserAuthCallback(auth, host)
return new Client(context, debug)
}
/**
* Create a new gRPC client instance from a supplied key and secret
* @param key The KeyInfo object containing {key: string, secret: string, type: 0}. 0 === User Group Key, 1 === Account Key
* @param host The remote gRPC host to connect with. Should be left as default.
* @param debug Whether to run in debug mode. Defaults to false.
* @example
* ```typescript
* import {KeyInfo, Client} from '@textile/threads'
*
* async function create (keyInfo: KeyInfo) {
* return await Client.withKeyInfo(keyInfo)
* }
* ```
*/
static async withKeyInfo(
key: KeyInfo,
host = defaultHost,
debug = false,
): Promise<Client> {
return new Client(await new Context(host).withKeyInfo(key), debug)
}
/**
* Obtain a token per user (identity) for interacting with the remote API.
* @param identity A user identity to use for creating records in the database. A random identity
* can be created with `Client.randomIdentity(), however, it is not easy/possible to migrate
* identities after the fact. Please store or otherwise persist any identity information if
* you wish to retrieve user data later, or use an external identity provider.
* @param ctx Context object containing web-gRPC headers and settings.
* @example
* ```typescript
* import {Client, Identity} from '@textile/threads'
*
* async function newToken (client: Client, user: Identity) {
* // Token is added to the client connection at the same time
* const token = await client.getToken(user)
* return token
* }
* ```
*/
getToken(identity: Identity, ctx?: ContextInterface): Promise<string> {
return this.getTokenChallenge(
identity.public.toString(),
async (challenge: Uint8Array) => {
return identity.sign(challenge)
},
ctx,
)
}
/**
* Obtain a token per user (identity) for interacting with the remote API.
* @param publicKey The public key of a user identity to use for creating records in the database.
* A random identity can be created with `Client.randomIdentity(), however, it is not
* easy/possible to migrate identities after the fact. Please store or otherwise persist any
* identity information if you wish to retrieve user data later, or use an external identity
* provider.
* @param callback A callback function that takes a `challenge` argument and returns a signed
* message using the input challenge and the private key associated with `publicKey`.
* @param ctx Context object containing web-gRPC headers and settings.
* @remarks `publicKey` must be the corresponding public key of the private key used in `callback`.
*/
getTokenChallenge(
publicKey: string,
callback: (challenge: Uint8Array) => Uint8Array | Promise<Uint8Array>,
ctx?: ContextInterface,
): Promise<string> {
const client = grpc.client<
pb.GetTokenRequest,
pb.GetTokenReply,
APIGetToken
>(API.GetToken, {
host: this.serviceHost,
transport: this.rpcOptions.transport,
debug: this.rpcOptions.debug,
})
return new Promise<string>((resolve, reject) => {
let token = ''
client.onMessage(async (message: pb.GetTokenReply) => {
if (message.hasChallenge()) {
const challenge = message.getChallenge_asU8()
const signature = await callback(challenge)
const req = new pb.GetTokenRequest()
req.setSignature(signature)
client.send(req)
client.finishSend()
} else if (message.hasToken()) {
token = message.getToken()
}
})
client.onEnd((
code: grpc.Code,
message: string /** trailers: grpc.Metadata */,
) => {
client.close()
if (code === grpc.Code.OK) {
this.context.withToken(token)
resolve(token)
} else {
reject(new Error(message))
}
})
const req = new pb.GetTokenRequest()
req.setKey(publicKey)
this.context.toMetadata(ctx).then((metadata) => {
client.start(metadata)
client.send(req)
})
})
}
/**
* newDB creates a new store on the remote node.
* @param threadID the ID of the database
* @param name The human-readable name for the database
* @example
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* async function createDB (client: Client) {
* const threadID: ThreadID = await client.newDB()
* return threadID
* }
* ```
*/
public newDB(threadID?: ThreadID, name?: string): Promise<ThreadID> {
const dbID = threadID ?? ThreadID.fromRandom()
const req = new pb.NewDBRequest()
req.setDbid(dbID.toBytes())
if (name !== undefined) {
this.context.withThreadName(name)
req.setName(name)
}
return this.unary(API.NewDB, req, () => {
// Update our context with out new thread id
this.context.withThread(dbID.toString())
return dbID
})
}
/**
* open creates and enters a new store on the remote node.
* @param threadID the ID of the database
* @param name The human-readable name for the database
* @example
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* async function openDB (client: Client, threadID: ThreadID) {
* await client.open(threadID)
* }
* ```
*/
public async open(threadID: ThreadID, name?: string): Promise<void> {
const req = new pb.ListDBsRequest()
// Check if we already have this thread on the client...
const found = await this.unary(API.ListDBs, req, (res: pb.ListDBsReply) => {
for (const db of res.getDbsList()) {
const id = ThreadID.fromBytes(db.getDbid_asU8())
if (id === threadID) {
this.context.withThread(threadID.toString())
return true
}
}
return false
})
// If yes, use that one...
if (found) return
// Otherwise, try to create a new one
await this.newDB(threadID, name)
this.context.withThread(threadID.toString())
}
/**
* Deletes an entire DB.
* @param threadID the ID of the database.
* @example
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* async function deleteDB (client: Client, threadID: ThreadID) {
* await client.deleteDB(threadID)
* return
* }
* ```
*/
public deleteDB(threadID: ThreadID): Promise<void> {
const req = new pb.DeleteDBRequest()
req.setDbid(threadID.toBytes())
return this.unary(API.DeleteDB, req)
}
/**
* Lists all known DBs.
*/
public listDBs(): Promise<Array<{ id: string; name?: string }>> {
const req = new pb.ListDBsRequest()
return this.unary(API.ListDBs, req, (res: pb.ListDBsReply) => {
const dbs: Array<{ id: string; name?: string }> = []
for (const db of res.getDbsList()) {
const id = ThreadID.fromBytes(db.getDbid_asU8()).toString()
dbs.push({
id,
name: db.getInfo()?.getName(),
})
}
return dbs
})
}
/**
* Lists the collections in a thread
* @param thread the ID of the database
*/
public listCollections(
thread: ThreadID,
): Promise<Array<pb.GetCollectionInfoReply.AsObject>> {
const req = new pb.ListCollectionsRequest()
req.setDbid(thread.toBytes())
return this.unary(
API.ListCollections,
req,
(res: pb.ListCollectionsReply) => res.toObject().collectionsList,
)
}
/**
* newCollection registers a new collection schema under the given name.
* The schema must be a valid json-schema.org schema, and can be a JSON string or object.
* @param threadID the ID of the database
* @param config A configuration object for the collection. See {@link CollectionConfig}. Note
* that the validator and filter functions can also be provided as strings.
*
* @example
* Create a new astronauts collection
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* const astronauts = {
* title: "Astronauts",
* type: "object",
* required: ["_id"],
* properties: {
* _id: {
* type: "string",
* description: "The instance's id.",
* },
* name: {
* type: "string",
* description: "The astronauts name.",
* },
* missions: {
* description: "The number of missions.",
* type: "integer",
* minimum: 0,
* },
* },
* }
*
* async function newCollection (client: Client, threadID: ThreadID) {
* return await client.updateCollection(threadID, { name: 'astronauts', schema: astronauts })
* }
* ```
* @example
* Create a collection with writeValidator and readFilter functions
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* const schema = {
* title: "Person",
* type: "object",
* required: ["_id"],
* properties: {
* _id: { type: "string" },
* name: { type: "string" },
* age: { type: "integer" },
* },
* }
*
* // We'll create a helper interface for type-safety
* interface Person {
* _id: string
* age: number
* name: string
* }
*
* const writeValidator = (writer: string, event: any, instance: Person) => {
* var type = event.patch.type
* var patch = event.patch.json_patch
* switch (type) {
* case "delete":
* if (writer != "the_boss") {
* return false // Not the boss? No deletes for you.
* }
* default:
* return true
* }
* }
*
* const readFilter = (reader: string, instance: Person) => {
* if (instance.age > 50) {
* delete instance.age // Let's just hide their age for them ;)
* }
* return instance
* }
*
* async function newCollection (client: Client, threadID: ThreadID) {
* return await client.updateCollection(threadID, {
* name: 'Person', schema, writeValidator, readFilter
* })
* }
* ```
*/
public newCollection(
threadID: ThreadID,
config: CollectionConfig,
): Promise<void> {
const req = new pb.NewCollectionRequest()
const conf = new pb.CollectionConfig()
conf.setName(config.name)
if (config.schema === undefined || isEmpty(config.schema)) {
// We'll use our default schema
config.schema = { properties: { _id: { type: 'string' } } }
}
conf.setSchema(encoder.encode(JSON.stringify(config.schema)))
if (config.writeValidator) {
conf.setWritevalidator(getFunctionBody(config.writeValidator))
}
if (config.readFilter) {
conf.setReadfilter(getFunctionBody(config.readFilter))
}
const idx: pb.Index[] = []
for (const item of config.indexes ?? []) {
const index = new pb.Index()
index.setPath(item.path)
index.setUnique(item.unique)
idx.push(index)
}
conf.setIndexesList(idx)
req.setDbid(threadID.toBytes())
req.setConfig(conf)
return this.unary(API.NewCollection, req)
}
/**
* newCollectionFromObject creates and registers a new collection under the given name.
* The input object must be serializable to JSON, and contain only json-schema.org types.
* @param threadID the ID of the database
* @param obj The actual object to attempt to extract a schema from.
* @param config A configuration object for the collection. See {@link CollectionConfig}.
*
* @example
* Change a new astronauts collection based of Buzz
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* async function newCollection (client: Client, threadID: ThreadID) {
* const buzz = {
* name: 'Buzz',
* missions: 2,
* _id: '',
* }
* return await client.newCollectionFromObject(threadID, buzz, { name: 'astronauts' })
* }
* ```
*/
public newCollectionFromObject(
threadID: ThreadID,
obj: Record<string, any>,
config: Omit<CollectionConfig, 'schema'>,
): Promise<void> {
const schema: JSONSchema3or4 = toJsonSchema(obj)
return this.newCollection(threadID, { ...config, schema })
}
/**
* updateCollection updates an existing collection.
* Currently, updates can include name and schema.
* @todo Allow update of indexing information.
* @param threadID the ID of the database
* @param config A configuration object for the collection. See {@link CollectionConfig}.
*
* @example
* Change the name of our astronauts collection
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* const astronauts = {
* title: "Astronauts",
* type: "object",
* required: ["_id"],
* properties: {
* _id: {
* type: "string",
* description: "The instance's id.",
* },
* name: {
* type: "string",
* description: "The astronauts name.",
* },
* missions: {
* description: "The number of missions.",
* type: "integer",
* minimum: 0,
* },
* },
* }
*
* async function changeName (client: Client, threadID: ThreadID) {
* return await client.updateCollection(threadID, { name: 'toy-story-characters', schema: astronauts })
* }
* ```
*/
public updateCollection(
threadID: ThreadID,
// Everything except "name" is optional here
config: CollectionConfig,
): Promise<void> {
const req = new pb.UpdateCollectionRequest()
const conf = new pb.CollectionConfig()
conf.setName(config.name)
if (config.schema === undefined || isEmpty(config.schema)) {
// We'll use our default schema
config.schema = { properties: { _id: { type: 'string' } } }
}
conf.setSchema(encoder.encode(JSON.stringify(config.schema)))
if (config.writeValidator) {
conf.setWritevalidator(getFunctionBody(config.writeValidator))
}
if (config.readFilter) {
conf.setReadfilter(getFunctionBody(config.readFilter))
}
const idx: pb.Index[] = []
for (const item of config.indexes ?? []) {
const index = new pb.Index()
index.setPath(item.path)
index.setUnique(item.unique)
idx.push(index)
}
conf.setIndexesList(idx)
req.setDbid(threadID.toBytes())
req.setConfig(conf)
return this.unary(API.UpdateCollection, req)
}
/**
* Deletes an existing collection.
* @param threadID the ID of the database.
* @param name The human-readable name for the collection.
* @param schema The actual json-schema.org compatible schema object.
* @example
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* async function deleteAstronauts (client: Client, thread: ThreadID) {
* await client.deleteCollection(thread, 'astronauts')
* return
* }
* ```
*/
public deleteCollection(threadID: ThreadID, name: string): Promise<void> {
const req = new pb.DeleteCollectionRequest()
req.setDbid(threadID.toBytes())
req.setName(name)
return this.unary(API.DeleteCollection, req)
}
/**
* Returns an existing indexes for a collection.
* @param threadID the ID of the database.
* @param name The human-readable name for the collection.
*
* @example
* Return a set of indexes for our astronauts collection
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* async function getIndexes (client: Client, threadID: ThreadID) {
* return await client.getCollectionIndexes(threadID, 'astronauts')
* }
* ```
*/
public getCollectionIndexes(
threadID: ThreadID,
name: string,
): Promise<pb.Index.AsObject[]> {
const req = new pb.GetCollectionIndexesRequest()
req.setDbid(threadID.toBytes())
req.setName(name)
return this.unary(
API.GetCollectionIndexes,
req,
(res: pb.GetCollectionIndexesReply) => res.toObject().indexesList,
)
}
public getCollectionInfo(
threadID: ThreadID,
name: string,
): Promise<CollectionConfig> {
const req = new pb.GetCollectionInfoRequest()
req.setDbid(threadID.toBytes())
req.setName(name)
return this.unary(
API.GetCollectionInfo,
req,
(res: pb.GetCollectionInfoReply) => {
const result: CollectionConfig = {
schema: JSON.parse(decoder.decode(res.getSchema_asU8())),
name: res.getName(),
indexes: res.getIndexesList().map((index) => index.toObject()),
// We'll always return strings in this case for safety reasons
writeValidator: res.getWritevalidator(),
readFilter: res.getReadfilter(),
}
return result
},
)
}
/**
* newDBFromAddr initializes the client with the given store, connecting to the given
* thread address (database). It should be called before any operation on the store, and is an
* alternative to start, which creates a local store. newDBFromAddr should also include the
* read/follow key, which should be a Uint8Array or base32-encoded string.
* @remarks
* See getDBInfo for a possible source of the address and keys. See {@link ThreadKey} for
* information about thread keys.
* @param address The address for the thread with which to connect.
* Should be of the form /ip4/<url/ip-address>/tcp/<port>/p2p/<peer-id>/thread/<thread-id>
* @param key The set of keys to use to connect to the database
* @param collections Array of CollectionConfig objects for seeding the DB with collections.
*/
public newDBFromAddr(
address: string,
key: string | Uint8Array,
collections?: Array<CollectionConfig>,
): Promise<ThreadID> {
const req = new pb.NewDBFromAddrRequest()
const addr = bytesFromAddr(address)
req.setAddr(addr)
// Should always be encoded string, but might already be bytes
req.setKey(
typeof key === 'string' ? ThreadKey.fromString(key).toBytes() : key,
)
if (collections !== undefined) {
req.setCollectionsList(
collections.map((c) => {
const config = new pb.CollectionConfig()
config.setName(c.name)
config.setSchema(encoder.encode(JSON.stringify(c.schema)))
const { indexes } = c
if (indexes !== undefined) {
const idxs = indexes.map((idx) => {
const index = new pb.Index()
index.setPath(idx.path)
index.setUnique(idx.unique)
return index
})
config.setIndexesList(idxs)
}
return config
}),
)
}
return this.unary(API.NewDBFromAddr, req, (/** res: pb.NewDBReply */) => {
// Hacky way to extract threadid from addr that succeeded
// TODO: Return this directly from the gRPC API on the go side?
const result = bytesToTuples(req.getAddr_asU8()).filter(
([key]) => key === 406,
)
return ThreadID.fromString(result[0][1] as string)
})
}
/**
* Connect client to an existing database using information in the DBInfo object
* This should be called before any operation on the store, and is an alternative
* to open, which re-opens a database already opened by the user.
* @remarks This is a helper method around newDBFromAddr, which takes the 'raw' output
* from getDBInfo. See getDBInfo for a possible source of the address and keys.
* @param info The output from a call to getDBInfo on a separate peer.
* @param includeLocal Whether to try dialing addresses that appear to be on the local host.
* Defaults to false, preferring to add from public ip addresses.
* @param collections Array of `name` and JSON schema pairs for seeding the DB with collections.
*
* @example
* Get DB info and use DB info to join an existing remote thread (e.g. invited)
* ```typescript
* import {Client, DBInfo, ThreadID} from '@textile/threads'
*
* async function getInfo (client: Client, threadID: ThreadID): Promise<DBInfo> {
* return await client.getDBInfo(threadID)
* }
*
* async function joinFromInfo (client: Client, info: DBInfo) {
* return await client.joinFromInfo(info)
* }
* ```
*/
public joinFromInfo(
info: DBInfo,
includeLocal = false,
collections?: Array<CollectionConfig>,
): Promise<ThreadID> {
const req = new pb.NewDBFromAddrRequest()
const filtered = info.addrs
.map(bytesFromAddr)
.filter(
(addr) => includeLocal || !maybeLocalAddr(bytesToOptions(addr).host),
)
for (const addr of filtered) {
req.setAddr(addr)
// Should always be encoded string, but might already be bytes
req.setKey(
typeof info.key === 'string'
? ThreadKey.fromString(info.key).toBytes()
: info.key,
)
if (collections !== undefined) {
req.setCollectionsList(
collections.map((c) => {
const config = new pb.CollectionConfig()
config.setName(c.name)
config.setSchema(encoder.encode(JSON.stringify(c.schema)))
const { indexes } = c
if (indexes !== undefined) {
const idxs = indexes.map((idx) => {
const index = new pb.Index()
index.setPath(idx.path)
index.setUnique(idx.unique)
return index
})
config.setIndexesList(idxs)
}
return config
}),
)
}
// Try to add addrs one at a time, if one succeeds, we are done.
return this.unary(API.NewDBFromAddr, req, () => {
// Hacky way to extract threadid from addr that succeeded
// @todo: Return this directly from the gRPC API?
const result = bytesToTuples(req.getAddr_asU8()).filter(
([key]) => key === 406,
)
return ThreadID.fromString(result[0][1] as string)
})
}
throw new Error('No viable addresses for dialing')
}
/**
* Returns a DBInfo objection containing metadata required to invite other peers to join a given thread.
* @param threadID the ID of the database
* @returns An object with an encoded thread key, and a list of multiaddrs.
*
* @example
* Get DB info and use DB info to join an existing remote thread (e.g. invited)
* ```typescript
* import {Client, DBInfo, ThreadID} from '@textile/threads'
*
* async function getInfo (client: Client, threadID: ThreadID): Promise<DBInfo> {
* return await client.getDBInfo(threadID)
* }
*
* async function joinFromInfo (client: Client, info: DBInfo) {
* return await client.joinFromInfo(info)
* }
* ```
*/
public getDBInfo(threadID: ThreadID): Promise<DBInfo> {
const req = new pb.GetDBInfoRequest()
req.setDbid(threadID.toBytes())
return this.unary(API.GetDBInfo, req, (res: pb.GetDBInfoReply) => {
const key = ThreadKey.fromBytes(res.getKey_asU8())
const addrs: string[] = []
for (const addr of res.getAddrsList()) {
const address = stringFromBytes(addr as Uint8Array)
addrs.push(address)
}
return { key: key.toString(), addrs }
})
}
/**
* Creates a new model instance in the given store.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
* @param values An array of model instances as JSON/JS objects.
*
* @example
* Create a new entry in our collection
* ```typescript
* import {Client, ThreadID, Where} from '@textile/threads'
*
* interface Astronaut {
* name: string
* missions: number
* _id: string
* }
*
* async function createBuzz (client: Client, threadID: ThreadID) {
* const buzz: Astronaut = {
* name: 'Buzz',
* missions: 2,
* _id: '',
* }
*
* await client.create(threadID, 'astronauts', [buzz])
* }
* ```
*/
public create(
threadID: ThreadID,
collectionName: string,
values: any[],
): Promise<string[]> {
const req = new pb.CreateRequest()
req.setDbid(threadID.toBytes())
req.setCollectionname(collectionName)
const list: any[] = []
values.forEach((v) => {
list.push(encoder.encode(JSON.stringify(v)))
})
req.setInstancesList(list)
return this.unary(
API.Create,
req,
(res: pb.CreateReply) => res.toObject().instanceidsList,
)
}
/**
* Saves changes to an existing model instance in the given store.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
* @param values An array of model instances as JSON/JS objects.
* Each model instance must have a valid existing `_id` property.
*
* @example
* Update an existing instance
* ```typescript
* import {Client, ThreadID, Where} from '@textile/threads'
*
* interface Astronaut {
* name: string
* missions: number
* _id: string
* }
*
* async function updateBuzz (client: Client, threadID: ThreadID) {
* const query = new Where('name').eq('Buzz')
* const result = await client.find<Astronaut>(threadID, 'astronauts', query)
*
* if (result.length < 1) return
*
* const buzz = result[0]
* buzz.missions += 1
*
* return await client.save(threadID, 'astronauts', [buzz])
* }
* ```
*/
public save(
threadID: ThreadID,
collectionName: string,
values: any[],
): Promise<void> {
const req = new pb.SaveRequest()
req.setDbid(threadID.toBytes())
req.setCollectionname(collectionName)
const list: any[] = []
values.forEach((v) => {
if (!v.hasOwnProperty('_id')) {
v['_id'] = '' // The server will add an _id if empty.
}
list.push(encoder.encode(JSON.stringify(v)))
})
req.setInstancesList(list)
return this.unary(API.Save, req)
}
/**
* Deletes an existing model instance from the given store.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
* @param IDs An array of instance ids to delete.
*
* @example
* Delete any instances that return from a query
* ```typescript
* import {Client, ThreadID, Where} from '@textile/threads'
*
* interface Astronaut {
* name: string
* missions: number
* _id: string
* }
*
* async function deleteBuzz (client: Client, threadID: ThreadID) {
* const query = new Where('name').eq('Buzz')
* const result = await client.find<Astronaut>(threadID, 'astronauts', query)
*
* if (result.length < 1) return
*
* const ids = await result.map((instance) => instance._id)
* await client.delete(threadID, 'astronauts', ids)
* }
* ```
*/
public delete(
threadID: ThreadID,
collectionName: string,
IDs: string[],
): Promise<void> {
const req = new pb.DeleteRequest()
req.setDbid(threadID.toBytes())
req.setCollectionname(collectionName)
req.setInstanceidsList(IDs)
return this.unary(API.Delete, req)
}
/**
* Check if a given instance exists in the collection.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
* @param IDs An array of instance ids to check for.
*
* @example
* Check if an instance exists
* ```typescript
* import {Client, ThreadID, Where} from '@textile/threads'
*
* async function instanceExists (client: Client, threadID: ThreadID, id: string) {
* return await client.has(threadID, 'astronauts', [id])
* }
* ```
*/
public has(
threadID: ThreadID,
collectionName: string,
IDs: string[],
): Promise<boolean> {
const req = new pb.HasRequest()
req.setDbid(threadID.toBytes())
req.setCollectionname(collectionName)
req.setInstanceidsList(IDs)
return this.unary(API.Has, req, (res: pb.HasReply) => res.getExists())
}
/**
* Queries a collection for entities matching the given query parameters.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
* @param query The object that describes the query. User Query class or primitive QueryJSON type.
*
* @example
* Query with return type
* ```typescript
* import {Client, ThreadID, Where} from '@textile/threads'
*
* interface Astronaut {
* name: string
* missions: number
* _id: string
* }
*
* async function getAstronautByName (client: Client, threadID: ThreadID, name: string) {
* const query = new Where('name').eq(name)
* const astronaut = await client.find<Astronaut>(threadID, 'astronauts', query)
* return astronaut
* }
* ```
*/
public find<T = unknown>(
threadID: ThreadID,
collectionName: string,
query: QueryJSON,
): Promise<T[]> {
const req = new pb.FindRequest()
req.setDbid(threadID.toBytes())
req.setCollectionname(collectionName)
// @todo: Find a more isomorphic way to do this base64 round-trip
req.setQueryjson(encoder.encode(JSON.stringify(query)))
return this.unary(API.Find, req, (res: pb.FindReply) => {
return res
.getInstancesList_asU8()
.map((instance) => JSON.parse(decoder.decode(instance)))
})
}
/**
* Queries the collection by a known instance ID.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
* @param ID The id of the instance to search for.
*
* @example
* Find and cast a known model by instance ID.
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* interface Astronaut {
* name: string
* missions: number
* _id: string
* }
*
* async function getAstronaut (client: Client, threadID: ThreadID, id: string) {
* const astronaut = await client.findByID<Astronaut>(threadID, 'astronauts', id)
* return astronaut
* }
* ```
*
* @example
* Simple find and return any instance
* ```typescript
* import {Client, ThreadID} from '@textile/threads'
*
* async function getInstance (client: Client, threadID: ThreadID, id: string) {
* return await client.findByID(threadID, 'astronauts', id)
* }
* ```
*/
public findByID<T = unknown>(
threadID: ThreadID,
collectionName: string,
ID: string,
): Promise<T> {
const req = new pb.FindByIDRequest()
req.setDbid(threadID.toBytes())
req.setCollectionname(collectionName)
req.setInstanceid(ID)
return this.unary(API.FindByID, req, (res: pb.FindByIDReply) =>
JSON.parse(decoder.decode(res.getInstance_asU8())),
)
}
/**
* Verify checks existing instance changes.
* Each model instance must have a valid existing `_id` property.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
* @param values An array of model instances as JSON/JS objects.
*
* @example
* Update an existing instance
* ```typescript
* import {Client, ThreadID, Where} from '@textile/threads'
*
* interface Astronaut {
* name: string
* missions: number
* _id: string
* }
*
* async function verifyBuzz (client: Client, threadID: ThreadID) {
* const query = new Where('name').eq('Buzz')
* const result = await client.find<Astronaut>(threadID, 'astronauts', query)
*
* if (result.length < 1) return
*
* const buzz = result[0]
* buzz.missions += 1
*
* // Is this going to be a valid update?
* return await client.verify(threadID, 'astronauts', [buzz])
* }
* ```
*/
public verify(
threadID: ThreadID,
collectionName: string,
values: any[],
): Promise<void> {
const req = new pb.VerifyRequest()
req.setDbid(threadID.toBytes())
req.setCollectionname(collectionName)
const list: any[] = values.map((v) => encoder.encode(JSON.stringify(v)))
req.setInstancesList(list)
return this.unary(API.Verify, req)
}
/**
* readTransaction creates a new read-only transaction object. See ReadTransaction for details.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
*/
public readTransaction(
threadID: ThreadID,
collectionName: string,
): ReadTransaction {
// TODO: We can do this setup much cleaner!
const client: grpc.Client<
pb.ReadTransactionRequest,
pb.ReadTransactionReply
> = grpc.client(API.ReadTransaction, {
host: this.serviceHost,
transport: this.rpcOptions.transport,
debug: this.rpcOptions.debug,
})
return new ReadTransaction(this.context, client, threadID, collectionName)
}
/**
* writeTransaction creates a new writeable transaction object. See WriteTransaction for details.
* @param threadID the ID of the database
* @param collectionName The human-readable name of the model to use.
*/
public writeTransaction(
threadID: ThreadID,
collectionName: string,
): WriteTransaction {
const client: grpc.Client<
pb.WriteTransactionRequest,
pb.WriteTransactionReply
> = grpc.client(API.WriteTransaction, {
host: this.serviceHost,
transport: this.rpcOptions.transport,
debug: this.rpcOptions.debug,
})
return new WriteTransaction(this.context, client, threadID, collectionName)
}
/**
* listen opens a long-lived connection with a remote node, running the given callback on each new update to the given instance.
* The return value is a `close` function, which cleanly closes the connection with the remote node.
* @param threadID the ID of the database
* @param filters contains an array of Filters
* @param callback The callback to call on each update to the given instance.
*
* @example
* ```typescript
* import {Client, ThreadID, Update} from '@textile/threads'
*
* interface Astronaut {
* name: string
* missions: number
* _id: string
* }
* function setupListener (client: Client, threadID: ThreadID) {
* const callback = (update?: Update<Astronaut>) => {
* // Not safe if more than the Astronauts collection existed in the same DB
* if (!update || !update.instance) return
* console.log('New update:', update.instance.name, update.instance.missions)
* }
* const closer = client.listen(threadID, [], callback)
* return closer
* }
* ```
*
* @example
* Listen to only CREATE events on a specific Collection.
* ```typescript
* import {Client, ThreadID, Update} from '@textile/threads'
*
* interface Astronaut {
* name: string
* missions: number
* _id: string
* }
* function setupListener (client: Client, threadID: ThreadID) {
* const callback = (update?: Update<Astronaut>) => {
* if (!update || !update.instance) return
* console.log('New update:', update.instance.name, update.instance.missions)
* }
* const filters = [
* {collectionName: 'Astronauts'},
* {actionTypes: ['CREATE']}
* ]
* const closer = client.listen(threadID, filters, callback)
* return closer
* }
* ```
*/
public listen<T = any>(
threadID: ThreadID,
filters: Filter[],
callback: (reply?: Update<T>, err?: Error) => void,
): grpc.Request {
const req = new pb.ListenRequest()
req.setDbid(threadID.toBytes())
for (const filter of filters) {
const requestFilter = new pb.ListenRequest.Filter()
if (filter.instanceID) {
requestFilter.setInstanceid(filter.instanceID)
} else if (filter.collectionName) {
requestFilter.setCollectionname(filter.collectionName)
}
if (filter.actionTypes) {
for (const at of filter.actionTypes) {
switch (at) {
case 'ALL': {
requestFilter.setAction(pb.ListenRequest.Filter.Action.ALL)
break
}
case 'CREATE': {
requestFilter.setAction(pb.ListenRequest.Filter.Action.CREATE)
break
}
case 'SAVE': {
requestFilter.setAction(pb.ListenRequest.Filter.Action.SAVE)
break
}
case 'DELETE': {
requestFilter.setAction(pb.ListenRequest.Filter.Action.DELETE)
break
}
}
}
} else {
requestFilter.setAction(0)
}
req.addFilters(requestFilter)
}
const decoder = new TextDecoder()
const client = grpc.client<pb.ListenRequest, pb.ListenReply, APIListen>(
API.Listen,
{
host: this.serviceHost,
transport: this.rpcOptions.transport,
debug: this.rpcOptions.debug,
},
)
client.onMessage((message: pb.ListenReply) => {
// Pull it apart explicitly
const instanceString = decoder.decode(message.getInstance_asU8())
const actionInt = message.getAction()
const action = (Action[actionInt] as unknown) as Action
const collectionName = message.getCollectionname()
const instanceID = message.getInstanceid()
const ret: Update<T> = {
collectionName,
instanceID,
action,
instance: undefined,
}
if (instanceString !== '') {
ret.instance = JSON.parse(instanceString)
}
callback(ret)
})
client.onEnd((
status: grpc.Code,
message: string /** trailers: grpc.Metadata */,
) => {
if (status !== grpc.Code.OK) {
callback(undefined, new Error(message))
}
callback()
})
this.context.toMetadata().then((metadata) => {
client.start(metadata)
client.send(req)
client.finishSend()
})
return { close: () => client.close() }
}
private async unary<
TResponse extends grpc.ProtobufMessage,
TRequest extends grpc.ProtobufMessage,
M extends grpc.UnaryMethodDefinition<TRequest, TResponse>,
O = undefined // Only thing we can't know ahead of time
>(
methodDescriptor: M,
req: TRequest,
mapper: (resp: TResponse) => O | undefined = () => undefined,
) {
const metadata = await this.context.toMetadata()
return new Promise<O>((resolve, reject) => {
grpc.unary(methodDescriptor, {
transport: this.rpcOptions.transport,
debug: this.rpcOptions.debug,
request: req,
host: this.serviceHost,
metadata,
onEnd: (res: UnaryOutput<TResponse>) => {
const { status, statusMessage, message } = res
if (status === grpc.Code.OK) {
resolve(mapper(message as any) as O)
} else {
reject(new Error(statusMessage))
}
},
})
})
}
}
export default Client | the_stack |
import { LoaderOptions, Library, Instance } from './seal'
import { Exception, SealError } from './exception'
import { MemoryPoolHandle } from './memory-pool-handle'
import { CipherText, CipherTextConstructorOptions } from './cipher-text'
import { Context } from './context'
import { Serializable, SerializableConstructorOptions } from './serializable'
import { PublicKey } from './public-key'
import { SecretKey } from './secret-key'
import { PlainText } from './plain-text'
export type EncryptorDependencyOptions = {
readonly Exception: Exception
readonly MemoryPoolHandle: MemoryPoolHandle
readonly CipherText: CipherTextConstructorOptions
readonly Serializable: SerializableConstructorOptions
}
export type EncryptorDependencies = {
({
Exception,
MemoryPoolHandle,
CipherText,
Serializable
}: EncryptorDependencyOptions): EncryptorConstructorOptions
}
export type EncryptorConstructorOptions = {
(context: Context, publicKey: PublicKey, secretKey?: SecretKey): Encryptor
}
export type Encryptor = {
readonly instance: Instance
readonly unsafeInject: (instance: Instance) => void
readonly delete: () => void
readonly encrypt: (
plainText: PlainText,
cipherText?: CipherText,
pool?: MemoryPoolHandle
) => CipherText | void
readonly encryptSerializable: (
plainText: PlainText,
pool?: MemoryPoolHandle
) => Serializable
readonly encryptSymmetric: (
plainText: PlainText,
cipherText?: CipherText,
pool?: MemoryPoolHandle
) => CipherText | void
readonly encryptSymmetricSerializable: (
plainText: PlainText,
pool?: MemoryPoolHandle
) => Serializable
readonly encryptZero: (
cipherText?: CipherText,
pool?: MemoryPoolHandle
) => CipherText | void
readonly encryptZeroSerializable: (pool?: MemoryPoolHandle) => Serializable
}
const EncryptorConstructor =
(library: Library): EncryptorDependencies =>
({
Exception,
MemoryPoolHandle,
CipherText,
Serializable
}: EncryptorDependencyOptions): EncryptorConstructorOptions =>
(context, publicKey, secretKey): Encryptor => {
const Constructor = library.Encryptor
let _instance = constructInstance(context, publicKey, secretKey)
function constructInstance(
context: Context,
publicKey: PublicKey,
secretKey?: SecretKey
) {
try {
if (secretKey) {
return new Constructor(
context.instance,
publicKey.instance,
secretKey.instance
)
}
return new Constructor(context.instance, publicKey.instance)
} catch (e) {
throw Exception.safe(e as SealError)
}
}
/**
* @implements Encryptor
*/
/**
* @interface Encryptor
*/
return {
/**
* Get the underlying WASM instance
*
* @private
* @readonly
* @name Encryptor#instance
* @type {Instance}
*/
get instance() {
return _instance
},
/**
* Inject this object with a raw WASM instance. No type checking is performed.
*
* @private
* @function
* @name Encryptor#unsafeInject
* @param {Instance} instance WASM instance
*/
unsafeInject(instance: Instance) {
if (_instance) {
_instance.delete()
_instance = undefined
}
_instance = instance
},
/**
* Delete the underlying WASM instance.
*
* Should be called before dereferencing this object to prevent the
* WASM heap from growing indefinitely.
* @function
* @name Encryptor#delete
*/
delete() {
if (_instance) {
_instance.delete()
_instance = undefined
}
},
/**
* Encrypts a PlainText and stores the result in the destination parameter.
* Dynamic memory allocations in the process are allocated from the memory
* pool pointed to by the given MemoryPoolHandle.
*
* @function
* @name Encryptor#encrypt
* @param {PlainText} plainText PlainText to encrypt
* @param {CipherText} [cipherText] CipherText destination to store the encrypted result
* @param {MemoryPoolHandle} [pool={@link MemoryPoolHandle.global}] MemoryPool to use
* @returns {CipherText|void} Returns undefined if a CipherText was specified. Otherwise returns a
* CipherText containing the encrypted result
*/
encrypt(
plainText: PlainText,
cipherText?: CipherText,
pool: MemoryPoolHandle = MemoryPoolHandle.global
): CipherText | void {
try {
if (cipherText) {
_instance.encrypt(plainText.instance, cipherText.instance, pool)
return
}
const cipher = CipherText()
_instance.encrypt(plainText.instance, cipher.instance, pool)
return cipher
} catch (e) {
throw Exception.safe(e as SealError)
}
},
/**
* Encrypts a PlainText and returns a CipherText as a Serializable object.
* Dynamic memory allocations in the process are allocated from the memory
* pool pointed to by the given MemoryPoolHandle.
*
* @function
* @name Encryptor#encryptSerializable
* @param {PlainText} plainText PlainText to encrypt
* @param {MemoryPoolHandle} [pool={@link MemoryPoolHandle.global}] MemoryPool to use
* @returns {Serializable<CipherText>} A Serializable containing the encrypted result
*/
encryptSerializable(
plainText: PlainText,
pool: MemoryPoolHandle = MemoryPoolHandle.global
): Serializable {
try {
const temp = Serializable()
const instance = _instance.encryptSerializable(
plainText.instance,
pool
)
temp.unsafeInject(instance)
return temp
} catch (e) {
throw Exception.safe(e as SealError)
}
},
/**
* Encrypts a PlainText with the secret key and stores the result in
* destination.
*
* The encryption parameters for the resulting CipherText
* correspond to:
* 1) in BFV, the highest (data) level in the modulus switching chain,
* 2) in CKKS, the encryption parameters of the plaintext.
* Dynamic memory allocations in the process are allocated from the memory
* pool pointed to by the given MemoryPoolHandle.
*
* @function
* @name Encryptor#encryptSymmetric
* @param {PlainText} plainText PlainText to encrypt
* @param {CipherText} [cipherText] CipherText destination to store the encrypted result.
* @param {MemoryPoolHandle} [pool={@link MemoryPoolHandle.global}] MemoryPool to use
* @returns {CipherText|void} Returns undefined if a CipherText was specified. Otherwise returns a
* CipherText containing the encrypted result
*/
encryptSymmetric(
plainText: PlainText,
cipherText?: CipherText,
pool: MemoryPoolHandle = MemoryPoolHandle.global
): CipherText | void {
try {
if (cipherText) {
_instance.encryptSymmetric(
plainText.instance,
cipherText.instance,
pool
)
return
}
const cipher = CipherText()
_instance.encryptSymmetric(plainText.instance, cipher.instance, pool)
return cipher
} catch (e) {
throw Exception.safe(e as SealError)
}
},
/**
* Encrypts a plaintext with the secret key and returns the ciphertext as
* a serializable object.
*
* The encryption parameters for the resulting CipherText
* correspond to:
* 1) in BFV, the highest (data) level in the modulus switching chain,
* 2) in CKKS, the encryption parameters of the plaintext.
* Dynamic memory allocations in the process are allocated from the memory
* pool pointed to by the given MemoryPoolHandle.
*
* Half of the ciphertext data is pseudo-randomly generated from a seed to
* reduce the object size. The resulting serializable object cannot be used
* directly and is meant to be serialized for the size reduction to have an
* impact.
*
* @function
* @name Encryptor#encryptSymmetricSerializable
* @param {PlainText} plainText PlainText to encrypt
* @param {MemoryPoolHandle} [pool={@link MemoryPoolHandle.global}] MemoryPool to use
* @returns {Serializable<CipherText>} Returns a Serializable containing the encrypted result
*/
encryptSymmetricSerializable(
plainText: PlainText,
pool: MemoryPoolHandle = MemoryPoolHandle.global
): Serializable {
try {
const serialized = Serializable()
const instance = _instance.encryptSymmetricSerializable(
plainText.instance,
pool
)
serialized.unsafeInject(instance)
return serialized
} catch (e) {
throw Exception.safe(e as SealError)
}
},
/**
*
* Encrypts a zero plaintext with the public key and returns the ciphertext
* as a serializable object.
*
* The encryption parameters for the resulting ciphertext correspond to the
* highest (data) level in the modulus switching chain. Dynamic memory
* allocations in the process are allocated from the memory pool pointed to
* by the given MemoryPoolHandle.
*
* @function
* @name Encryptor#encryptZero
* @param {CipherText} [cipherText] A CipherText to overwrite.
* @param {MemoryPoolHandle} [pool={@link MemoryPoolHandle.global}] MemoryPool to use
* @returns {CipherText|void} Returns undefined if a CipherText was specified. Otherwise returns a
* CipherText containing the encrypted result
*/
encryptZero(
cipherText?: CipherText,
pool: MemoryPoolHandle = MemoryPoolHandle.global
): CipherText | void {
try {
if (cipherText) {
_instance.encryptZero(cipherText.instance, pool)
return
}
const cipher = CipherText()
_instance.encryptZero(cipher.instance, pool)
return cipher
} catch (e) {
throw Exception.safe(e as SealError)
}
},
/**
*
* Encrypts a zero plaintext with the public key and stores the result in
* destination.
*
* The encryption parameters for the resulting ciphertext correspond to the
* highest (data) level in the modulus switching chain. Dynamic memory
* allocations in the process are allocated from the memory pool pointed to
* by the given MemoryPoolHandle.
*
* @function
* @name Encryptor#encryptZeroSerializable
* @param {MemoryPoolHandle} [pool={@link MemoryPoolHandle.global}] MemoryPool to use
* @returns {Serializable<CipherText>} A CipherText as a serialized object containing the encrypted result
*/
encryptZeroSerializable(
pool: MemoryPoolHandle = MemoryPoolHandle.global
): Serializable {
try {
const serialized = Serializable()
const instance = _instance.encryptZeroSerializable(pool)
serialized.unsafeInject(instance)
return serialized
} catch (e) {
throw Exception.safe(e as SealError)
}
}
}
}
export const EncryptorInit = ({
loader
}: LoaderOptions): EncryptorDependencies => {
const library: Library = loader.library
return EncryptorConstructor(library)
} | the_stack |
import debounce from 'just-debounce-it';
import { ReactiveController, ReactiveElement } from 'lit';
import {
ContextConsumerController,
ContextProviderController,
createContext,
isDerviedContext,
provideContextRecord
} from '../../base/context';
import { DisposalBin, listen, vdsEvent } from '../../base/events';
import {
FullscreenChangeEvent,
FullscreenErrorEvent
} from '../../base/fullscreen';
import {
Logger,
LogLevel,
LogLevelName,
LogLevelNameMap
} from '../../base/logger';
import { RequestQueue } from '../../base/queue';
import { DEV_MODE } from '../../global/env';
import { keysOf } from '../../utils/object';
import { isArray, isNil } from '../../utils/unit';
import {
mediaContext,
MediaContextProviderRecord,
MediaContextRecordValues
} from '../context';
import {
PauseEvent,
PlayErrorEvent,
PlayEvent,
PlayingEvent,
ReplayEvent,
SeekedEvent,
SeekingEvent,
VolumeChangeEvent,
WaitingEvent
} from '../events';
import {
MediaProviderConnectEvent,
MediaProviderElement
} from '../provider/MediaProviderElement';
import {
EnterFullscreenRequestEvent,
ExitFullscreenRequestEvent,
MuteRequestEvent,
PauseRequestEvent,
PendingMediaRequests,
PlayRequestEvent,
SeekingRequestEvent,
SeekRequestEvent,
UnmuteRequestEvent,
VolumeChangeRequestEvent
} from '../request.events';
export type MediaControllerHost = ReactiveElement & {
exitFullscreen?(): Promise<void>;
};
/* c8 ignore next */
const _logLevel = createContext(LogLevel.Silent);
/**
* The media controller acts as a message bus between the media provider and all other
* components, such as UI components and plugins. The main responsibilities are:
*
* - Provide the media context that is used to pass media state down to components (this
* context is injected into and managed by the media provider).
*
* - Listen for media request events and fulfill them by calling the appropriate props/methods on
* the current media provider.
*
* 💡 The base `MediaPlayer` acts as both a media controller and provider.
*/
export class MediaController implements ReactiveController {
protected readonly _disconnectDisposal: DisposalBin;
/* c8 ignore start */
protected readonly _logger?: Logger;
protected readonly _logLevelProvider: ContextProviderController<LogLevel>;
/* c8 ignore stop */
protected readonly _mediaProviderConnectedQueue: RequestQueue;
protected readonly _mediaProviderDisconnectedDisposal: DisposalBin;
constructor(protected readonly _host: MediaControllerHost) {
/* c8 ignore next */
this._logLevelProvider = _logLevel.provide(_host);
/* c8 ignore start */
if (DEV_MODE && !Logger._consumeLogLevel) {
// Inject log level context into `Logger` to avoid dep cycle.
Logger._consumeLogLevel = _logLevel.consume;
}
/* c8 ignore stop */
this.mediaCtx = provideContextRecord(_host, mediaContext);
/* c8 ignore next */
this._logger = DEV_MODE && new Logger(_host, { owner: this });
this._disconnectDisposal = new DisposalBin(
_host,
/* c8 ignore next */
DEV_MODE && { name: 'disconnectDisposal', owner: this }
);
this._mediaProviderConnectedQueue = new RequestQueue(
_host,
/* c8 ignore next */
DEV_MODE && {
name: 'mediaProviderConnectedQueue',
owner: this
}
);
this._mediaProviderDisconnectedDisposal = new DisposalBin(
_host,
/* c8 ignore next */
DEV_MODE && { name: 'mediaProviderDisconnectDisposal', owner: this }
);
_host.addController(this);
}
// -------------------------------------------------------------------------------------------
// Lifecycle
// -------------------------------------------------------------------------------------------
hostConnected() {
/* c8 ignore start */
if (DEV_MODE) {
this._logEvents();
this._logErrors();
}
/* c8 ignore stop */
this._addEventListeners();
}
hostDisconnected() {
this._clearPendingMediaRequests();
this._mediaProviderConnectedQueue.destroy();
this._mediaProviderDisconnectedDisposal.empty();
this._disconnectDisposal.empty();
}
protected _addEventListeners() {
const eventListeners = {
//
'vds-media-provider-connect': this._handleMediaProviderConnect,
'vds-mute-request': this._handleMuteRequest,
'vds-unmute-request': this._handleUnmuteRequest,
'vds-play-request': this._handlePlayRequest,
'vds-pause-request': this._handlePauseRequest,
'vds-seeking-request': this._handleSeekingRequest,
'vds-seek-request': this._handleSeekRequest,
'vds-volume-change-request': this._handleVolumeChangeRequest,
'vds-enter-fullscreen-request': this._handleEnterFullscreenRequest,
'vds-exit-fullscreen-request': this._handleExitFullscreenRequest,
'vds-fullscreen-change': this._handleFullscreenChange,
'vds-fullscreen-error': this._handleFullscreenError,
//
'vds-play': [this._handlePlay, { capture: true }],
'vds-play-error': [this._handlePlayError, { capture: true }],
'vds-playing': [this._handlePlaying, { capture: true }],
'vds-pause': [this._handlePause, { capture: true }],
'vds-volume-change': [this._handleVolumeChange, { capture: true }],
'vds-replay': [this._handleReplay, { capture: true }],
'vds-seeking': [this._handleSeeking, { capture: true }],
'vds-seeked': [this._handleSeeked, { capture: true }],
'vds-waiting': [this._handleWaiting],
//
seeked: [this._handleSeeked, { capture: true }]
};
keysOf(eventListeners).forEach((eventType) => {
const eventListener = eventListeners[eventType];
const listener = isArray(eventListener)
? eventListener[0]
: eventListener;
const options = isArray(eventListener) ? eventListener[1] : undefined;
const dispose = listen(
this._host,
eventType,
(listener as () => void).bind(this),
options as EventListenerOptions
);
this._disconnectDisposal.add(dispose);
});
}
// -------------------------------------------------------------------------------------------
// Logging
// -------------------------------------------------------------------------------------------
get logLevel(): LogLevelName {
/* c8 ignore next */
return DEV_MODE ? LogLevelNameMap[this._logLevelProvider.value] : 'silent';
}
set logLevel(newLevel: LogLevelName) {
/* c8 ignore next */
const numericLevel = DEV_MODE
? Object.values(LogLevelNameMap).findIndex((l) => l === newLevel)
: 0;
this._logLevelProvider.value = numericLevel >= 0 ? numericLevel : 0;
}
protected _logEvents() {
/* c8 ignore start */
if (DEV_MODE) {
const loggedEvents: (keyof GlobalEventHandlersEventMap)[] = [
'vds-controls-change',
'vds-fullscreen-change'
];
loggedEvents.forEach((eventType) => {
const dispose = listen(this._host, eventType, (event) => {
this._logger!.infoGroup(`📡 dispatching \`${eventType}\``)
.appendWithLabel('Event', event)
.appendWithLabel('Provider', this.mediaProvider)
.end();
});
this._disconnectDisposal.add(dispose);
});
}
/* c8 ignore stop */
}
protected _logErrors() {
/* c8 ignore start */
if (DEV_MODE) {
const dispose = listen(this._host, 'vds-error', (event) => {
this._logger!.errorGroup(event.type)
.appendWithLabel('Context', this.mediaCtx)
.appendWithLabel('Event', event)
.appendWithLabel('Provider', this.mediaProvider)
.end();
});
this._disconnectDisposal.add(dispose);
}
/* c8 ignore stop */
}
// -------------------------------------------------------------------------------------------
// Media Provider
// -------------------------------------------------------------------------------------------
protected _mediaProvider: MediaProviderElement | undefined;
get mediaProvider() {
return this._mediaProvider;
}
/** @internal */
setMediaProvider(mediaProvider?: MediaProviderElement) {
this._mediaProvider = mediaProvider;
}
protected _handleMediaProviderConnect(event: MediaProviderConnectEvent) {
event.stopPropagation();
const { element, onDisconnect } = event.detail;
if (this.mediaProvider === element) return;
/* c8 ignore start */
if (DEV_MODE) {
this._logger!.infoGroup('media provider connected')
.appendWithLabel('Provider', element)
.end();
}
/* c8 ignore stop */
this._handleMediaProviderDisconnect();
this._mediaProvider = element;
this._attachMediaContextRecordToProvider();
this._flushMediaProviderConnectedQueue();
onDisconnect(this._handleMediaProviderDisconnect.bind(this));
}
protected _handleMediaProviderDisconnect() {
if (isNil(this.mediaProvider)) return;
/* c8 ignore start */
if (DEV_MODE) {
this._logger!.infoGroup('media provider disconnected')
.appendWithLabel('Provider', this.mediaProvider)
.end();
}
/* c8 ignore stop */
this._mediaProviderConnectedQueue.destroy();
this._mediaProviderDisconnectedDisposal.empty();
this._mediaProvider = undefined;
}
protected _flushMediaProviderConnectedQueue() {
this._mediaProviderConnectedQueue.flush();
this._mediaProviderConnectedQueue.serveImmediately = true;
this._mediaProviderDisconnectedDisposal.add(() => {
this._mediaProviderConnectedQueue.serveImmediately = false;
this._mediaProviderConnectedQueue.reset();
});
}
// -------------------------------------------------------------------------------------------
// Media Context
// -------------------------------------------------------------------------------------------
/**
* Media context provider record which is injected by the media controller into the media
* provider, so it can be managed and updated by it.
*
* @internal
*/
readonly mediaCtx: MediaContextProviderRecord;
/**
* A snapshot of the current media state.
*/
get mediaState(): Readonly<MediaContextRecordValues> {
return Object.assign({}, this.mediaCtx);
}
protected _attachMediaContextRecordToProvider() {
if (isNil(this.mediaProvider)) return;
// Copy over context values before setting on provider.
Object.keys(this.mediaProvider.ctx).forEach((prop) => {
this.mediaCtx[prop] = this.mediaProvider!.ctx[prop];
});
this.mediaProvider.ctx.__destroy();
// @ts-expect-error - Override readonly
this.mediaProvider.ctx = this.mediaCtx;
/* c8 ignore start */
if (DEV_MODE) {
this._logger!.infoGroup('attached context record')
.appendWithLabel('Provider', this.mediaProvider)
.appendWithLabel('Context', this.mediaCtx)
.end();
}
/* c8 ignore stop */
this._mediaProviderDisconnectedDisposal.add(() => {
const ctx = provideContextRecord(this.mediaProvider!, mediaContext);
// Copy over context values before setting on provider.
Object.keys(this.mediaCtx).forEach((prop) => {
ctx[prop] = this.mediaCtx[prop];
});
// @ts-expect-error - Override readonly
this.mediaProvider!.ctx = ctx;
});
}
// -------------------------------------------------------------------------------------------
// Media Request Events
// -------------------------------------------------------------------------------------------
/**
* Media requests that have been made but are waiting to be satisfied. Key represents the media
* event type the request is waiting for to be considered "satisfied".
*/
protected _pendingMediaRequests: PendingMediaRequests = {
play: [],
pause: [],
volume: [],
fullscreen: [],
seeked: [],
seeking: []
};
protected _clearPendingMediaRequests(): void {
keysOf(this._pendingMediaRequests).forEach((key) => {
this._pendingMediaRequests[key] = [];
});
}
protected satisfyMediaRequest<T extends keyof PendingMediaRequests>(
type: T,
event: Event & { requestEvent?: Event }
): void {
event.requestEvent = this._pendingMediaRequests[type].shift();
}
/**
* Override this to allow media events to bubble up the DOM.
*
* @param event
*/
protected _mediaRequestEventGateway(event: Event) {
event.stopPropagation();
/* c8 ignore start */
if (DEV_MODE) {
this._logger!.infoGroup(`📬 received \`${event.type}\``)
.appendWithLabel('Request', event)
.end();
}
/* c8 ignore stop */
return true;
}
protected _handleMuteRequest(event: MuteRequestEvent): void {
if (!this._mediaRequestEventGateway(event)) return;
this._mediaProviderConnectedQueue.queue('muted', () => {
if (this._mediaProvider?.muted === true) return;
this._pendingMediaRequests.volume.push(event);
this.mediaProvider!.muted = true;
});
}
protected _handleUnmuteRequest(event: UnmuteRequestEvent): void {
if (!this._mediaRequestEventGateway(event)) return;
this._mediaProviderConnectedQueue.queue('muted', () => {
if (this._mediaProvider?.muted === false) return;
this._pendingMediaRequests.volume.push(event);
this.mediaProvider!.muted = false;
});
}
protected _handlePlayRequest(event: PlayRequestEvent): void {
if (!this._mediaRequestEventGateway(event)) return;
this._mediaProviderConnectedQueue.queue('paused', () => {
if (this._mediaProvider?.paused === false) return;
this._pendingMediaRequests.play.push(event);
this.mediaProvider!.paused = false;
});
}
protected _handlePauseRequest(event: PauseRequestEvent): void {
if (!this._mediaRequestEventGateway(event)) return;
this._mediaProviderConnectedQueue.queue('paused', () => {
if (this._mediaProvider?.paused === true) return;
this._pendingMediaRequests.pause.push(event);
this.mediaProvider!.paused = true;
});
}
protected _isSeekingRequestPending = false;
protected _handleSeekingRequest(event: SeekingRequestEvent): void {
if (!this._mediaRequestEventGateway(event)) return;
this._mediaProviderConnectedQueue.queue('seeking', () => {
this._pendingMediaRequests.seeking.push(event);
this._isSeekingRequestPending = true;
this.mediaProvider!.currentTime = event.detail;
this._fireWaiting.cancel();
});
}
protected _handleSeekRequest(event: SeekRequestEvent): void {
if (!this._mediaRequestEventGateway(event)) return;
this._mediaProviderConnectedQueue.queue('seeking', () => {
this._pendingMediaRequests.seeked.push(event);
this._isSeekingRequestPending = false;
let time = event.detail;
// Snap to end if close enough.
if (this.mediaProvider!.duration - event.detail < 0.25) {
time = this.mediaProvider!.duration;
}
this.mediaProvider!.currentTime = time;
});
}
protected _handleVolumeChangeRequest(event: VolumeChangeRequestEvent): void {
if (!this._mediaRequestEventGateway(event)) return;
this._mediaProviderConnectedQueue.queue('volume', () => {
if (this._mediaProvider?.volume === event.detail) return;
this._pendingMediaRequests.volume.push(event);
this.mediaProvider!.volume = event.detail;
});
}
protected async _handleEnterFullscreenRequest(
event: EnterFullscreenRequestEvent
): Promise<void> {
if (
!this._mediaRequestEventGateway(event) ||
this._mediaProvider?.fullscreen
) {
return;
}
this._pendingMediaRequests.fullscreen.push(event);
await this._host.requestFullscreen();
}
protected async _handleExitFullscreenRequest(
event: ExitFullscreenRequestEvent
): Promise<void> {
if (
!this._mediaRequestEventGateway(event) ||
!this._mediaProvider?.fullscreen
) {
return;
}
this._pendingMediaRequests.fullscreen.push(event);
await this._host.exitFullscreen?.();
}
protected _handleFullscreenChange(event: FullscreenChangeEvent): void {
this.mediaCtx.fullscreen = event.detail;
this.satisfyMediaRequest('fullscreen', event);
}
protected _handleFullscreenError(event: FullscreenErrorEvent): void {
this.satisfyMediaRequest('fullscreen', event);
}
// -------------------------------------------------------------------------------------------
// Media Events
// -------------------------------------------------------------------------------------------
protected _handlePlay(event: PlayEvent): void {
this.satisfyMediaRequest('play', event);
}
protected _handlePlayError(event: PlayErrorEvent): void {
this.satisfyMediaRequest('play', event);
}
protected _handlePlaying(event: PlayingEvent): void {
this._fireWaiting.cancel();
if (this._isSeekingRequestPending) {
event.stopImmediatePropagation();
this.mediaCtx.seeking = true;
}
}
protected _handlePause(event: PauseEvent): void {
this.satisfyMediaRequest('pause', event);
this._fireWaiting.cancel();
}
protected _handleVolumeChange(event: VolumeChangeEvent): void {
this.satisfyMediaRequest('volume', event);
}
protected _handleReplay(event: ReplayEvent): void {
event.requestEvent = this._pendingMediaRequests.play[0];
}
protected _handleSeeking(event: SeekingEvent): void {
this.satisfyMediaRequest('seeking', event);
if (this._lastWaitingEvent) this._fireWaiting();
}
protected _handleSeeked(event: SeekedEvent): void {
// We don't want `seeked` events firing while seeking is updating media playback position.
if (this._isSeekingRequestPending) {
event.stopImmediatePropagation();
this.mediaCtx.seeking = true;
} else if (event.type === 'vds-seeked') {
this._fireWaiting.cancel();
this.satisfyMediaRequest('seeked', event);
}
}
protected _firingWaiting = false;
protected _lastWaitingEvent?: WaitingEvent;
protected _fireWaiting = debounce(() => {
if (
this.mediaCtx.playing ||
this._isSeekingRequestPending ||
!this._lastWaitingEvent
) {
return;
}
this.mediaCtx.waiting = true;
this._firingWaiting = true;
const event = vdsEvent('vds-waiting', {
originalEvent: this._lastWaitingEvent
});
this._host.dispatchEvent(event);
this._firingWaiting = false;
this._lastWaitingEvent = undefined;
}, 300);
protected _handleWaiting(event: WaitingEvent): void {
if (this._firingWaiting) return;
event.stopImmediatePropagation();
this.mediaCtx.waiting = false;
this._lastWaitingEvent = event;
this._fireWaiting();
}
} | the_stack |
import { throws } from 'assert';
import { Template } from '@aws-cdk/assertions';
import * as ec2 from '@aws-cdk/aws-ec2';
import * as ecs from '@aws-cdk/aws-ecs';
import * as iam from '@aws-cdk/aws-iam';
import * as cdk from '@aws-cdk/core';
import * as batch from '../lib';
describe('Batch Compute Environment', () => {
let expectedManagedDefaultComputeProps: any;
let defaultServiceRole: any;
let stack: cdk.Stack;
let vpc: ec2.Vpc;
beforeEach(() => {
// GIVEN
stack = new cdk.Stack();
vpc = new ec2.Vpc(stack, 'test-vpc');
defaultServiceRole = {
ServiceRole: {
'Fn::GetAtt': [
'testcomputeenvResourceServiceInstanceRole105069A5',
'Arn',
],
},
};
expectedManagedDefaultComputeProps = (overrides: any) => {
return {
ComputeResources: {
AllocationStrategy: batch.AllocationStrategy.BEST_FIT,
InstanceRole: {
'Fn::GetAtt': [
'testcomputeenvInstanceProfileCBD87EAB',
'Arn',
],
},
InstanceTypes: [
'optimal',
],
MaxvCpus: 256,
MinvCpus: 0,
Subnets: [
{
Ref: 'testvpcPrivateSubnet1Subnet865FB50A',
},
{
Ref: 'testvpcPrivateSubnet2Subnet23D3396F',
},
],
Type: batch.ComputeResourceType.ON_DEMAND,
...overrides,
},
};
};
});
describe('when validating props', () => {
test('should deny setting compute resources when using type unmanaged', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: false,
computeResources: {
vpc,
},
});
});
});
test('should deny if creating a managed environment with no provided compute resource props', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
});
});
});
});
describe('using fargate resources', () => {
test('should deny setting bid percentage', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
bidPercentage: -1,
},
});
});
});
test('should deny setting allocation strategy', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
allocationStrategy: batch.AllocationStrategy.BEST_FIT,
},
});
});
});
test('should deny setting desired vCPUs', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
desiredvCpus: 1,
},
});
});
});
test('should deny setting min vCPUs', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
minvCpus: 1,
},
});
});
});
test('should deny setting image', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
image: ec2.MachineImage.latestAmazonLinux(),
},
});
});
});
test('should deny setting instance types', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
instanceTypes: [],
},
});
});
});
test('should deny setting EC2 key pair', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
ec2KeyPair: 'test',
},
});
});
});
test('should deny setting instance role', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
instanceRole: 'test',
},
});
});
});
test('should deny setting launch template', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
launchTemplate: {
launchTemplateName: 'test-template',
},
},
});
});
});
test('should deny setting placement group', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
placementGroup: 'test',
},
});
});
});
test('should deny setting spot fleet role', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.FARGATE,
spotFleetRole: iam.Role.fromRoleArn(stack, 'test-role-arn', 'test-role'),
},
});
});
});
});
describe('using spot resources', () => {
test('should provide a spot fleet role if one is not given and allocationStrategy is BEST_FIT', () => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
type: batch.ComputeResourceType.SPOT,
allocationStrategy: batch.AllocationStrategy.BEST_FIT,
vpc,
},
});
// THEN
Template.fromStack(stack).hasResourceProperties('AWS::Batch::ComputeEnvironment', {
Type: 'MANAGED',
...expectedManagedDefaultComputeProps({
Type: batch.ComputeResourceType.SPOT,
SpotIamFleetRole: {
'Fn::Join': [
'',
[
'arn:',
{
Ref: 'AWS::Partition',
},
':iam::',
{
Ref: 'AWS::AccountId',
},
':role/aws-service-role/spotfleet.amazonaws.com/AWSServiceRoleForEC2SpotFleet',
],
],
},
}),
});
});
describe('with a bid percentage', () => {
test('should deny my bid if set below 0', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.SPOT,
bidPercentage: -1,
},
});
});
});
test('should deny my bid if above 100', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
type: batch.ComputeResourceType.SPOT,
bidPercentage: 101,
},
});
});
});
});
});
describe('with properties specified', () => {
test('renders the correct cloudformation properties', () => {
// WHEN
const props = {
computeEnvironmentName: 'my-test-compute-env',
computeResources: {
allocationStrategy: batch.AllocationStrategy.BEST_FIT,
vpc,
computeResourcesTags: {
'Name': 'AWS Batch Instance - C4OnDemand',
'Tag Other': 'Has other value',
},
desiredvCpus: 1,
ec2KeyPair: 'my-key-pair',
image: ecs.EcsOptimizedImage.amazonLinux2(ecs.AmiHardwareType.STANDARD),
instanceRole: new iam.CfnInstanceProfile(stack, 'Instance-Profile', {
roles: [new iam.Role(stack, 'Ecs-Instance-Role', {
assumedBy: new iam.ServicePrincipal('ec2.amazonaws.com'),
managedPolicies: [
iam.ManagedPolicy.fromAwsManagedPolicyName('service-role/AmazonEC2ContainerServiceforEC2Role'),
],
}).roleName],
}).attrArn,
instanceTypes: [
ec2.InstanceType.of(ec2.InstanceClass.T2, ec2.InstanceSize.MICRO),
],
maxvCpus: 4,
minvCpus: 1,
placementGroup: 'example-cluster-group',
securityGroups: [
new ec2.SecurityGroup(stack, 'test-sg', {
vpc,
allowAllOutbound: true,
}),
],
type: batch.ComputeResourceType.ON_DEMAND,
vpcSubnets: {
subnetType: ec2.SubnetType.PRIVATE_WITH_NAT,
},
} as batch.ComputeResources,
enabled: false,
managed: true,
};
new batch.ComputeEnvironment(stack, 'test-compute-env', props);
// THEN
Template.fromStack(stack).hasResourceProperties('AWS::Batch::ComputeEnvironment', {
ComputeEnvironmentName: 'my-test-compute-env',
Type: 'MANAGED',
State: 'DISABLED',
ServiceRole: {
'Fn::GetAtt': [
'testcomputeenvResourceServiceInstanceRole105069A5',
'Arn',
],
},
ComputeResources: {
AllocationStrategy: batch.AllocationStrategy.BEST_FIT,
DesiredvCpus: props.computeResources.desiredvCpus,
Ec2KeyPair: props.computeResources.ec2KeyPair,
ImageId: {
Ref: 'SsmParameterValueawsserviceecsoptimizedamiamazonlinux2recommendedimageidC96584B6F00A464EAD1953AFF4B05118Parameter',
},
InstanceRole: {
'Fn::GetAtt': [
props.computeResources.instanceRole ? 'InstanceProfile' : '',
'Arn',
],
},
InstanceTypes: [
props.computeResources.instanceTypes ? props.computeResources.instanceTypes[0].toString() : '',
],
MaxvCpus: props.computeResources.maxvCpus,
MinvCpus: props.computeResources.minvCpus,
PlacementGroup: props.computeResources.placementGroup,
SecurityGroupIds: [
{
'Fn::GetAtt': [
'testsg872EB48A',
'GroupId',
],
},
],
Subnets: [
{
Ref: `${cdk.Names.uniqueId(vpc)}PrivateSubnet1Subnet865FB50A`,
},
{
Ref: `${cdk.Names.uniqueId(vpc)}PrivateSubnet2Subnet23D3396F`,
},
],
Tags: {
'Name': 'AWS Batch Instance - C4OnDemand',
'Tag Other': 'Has other value',
},
Type: 'EC2',
},
});
});
describe('with no allocation strategy specified', () => {
test('should default to a best_fit strategy', () => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
},
});
// THEN
Template.fromStack(stack).hasResourceProperties('AWS::Batch::ComputeEnvironment', {
Type: 'MANAGED',
ServiceRole: {
'Fn::GetAtt': [
'testcomputeenvResourceServiceInstanceRole105069A5',
'Arn',
],
},
});
});
});
describe('with a min vcpu value', () => {
test('should deny less than 0', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
computeResources: {
vpc,
minvCpus: -1,
},
});
});
});
test('cannot be greater than the max vcpu value', () => {
// THEN
throws(() => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
computeResources: {
vpc,
minvCpus: 2,
maxvCpus: 1,
},
});
});
});
});
describe('with no min vcpu value provided', () => {
test('should default to 0', () => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
},
});
// THEN
Template.fromStack(stack).hasResourceProperties('AWS::Batch::ComputeEnvironment', {
...defaultServiceRole,
...expectedManagedDefaultComputeProps({
MinvCpus: 0,
}),
});
});
});
describe('with no max vcpu value provided', () => {
test('should default to 256', () => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
},
});
// THEN
Template.fromStack(stack).hasResourceProperties('AWS::Batch::ComputeEnvironment', {
...expectedManagedDefaultComputeProps({
MaxvCpus: 256,
}),
});
});
});
describe('with no instance role specified', () => {
test('should generate a role for me', () => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
},
});
// THEN
Template.fromStack(stack).resourceCountIs('AWS::Batch::ComputeEnvironment', 1);
Template.fromStack(stack).resourceCountIs('AWS::IAM::Role', 2);
});
});
describe('with no instance type defined', () => {
test('should default to optimal matching', () => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
},
});
// THEN
Template.fromStack(stack).hasResourceProperties('AWS::Batch::ComputeEnvironment', {
...expectedManagedDefaultComputeProps({
InstanceTypes: ['optimal'],
}),
});
});
});
describe('with no type specified', () => {
test('should default to EC2', () => {
// WHEN
new batch.ComputeEnvironment(stack, 'test-compute-env', {
managed: true,
computeResources: {
vpc,
},
});
// THEN
Template.fromStack(stack).hasResourceProperties('AWS::Batch::ComputeEnvironment', {
...expectedManagedDefaultComputeProps({
Type: batch.ComputeResourceType.ON_DEMAND,
}),
});
});
});
});
}); | the_stack |
interface ErrorConstructor {
/** Create .stack property on a target object */
captureStackTrace(targetObject: object, constructorOpt?: Function): void;
/**
* Optional override for formatting stack traces
*
* @see https://v8.dev/docs/stack-trace-api#customizing-stack-traces
*/
prepareStackTrace?: (err: Error, stackTraces: NodeJS.CallSite[]) => any;
stackTraceLimit: number;
}
// Node.js ESNEXT support
interface String {
/** Removes whitespace from the left end of a string. */
trimLeft(): string;
/** Removes whitespace from the right end of a string. */
trimRight(): string;
/** Returns a copy with leading whitespace removed. */
trimStart(): string;
/** Returns a copy with trailing whitespace removed. */
trimEnd(): string;
}
interface ImportMeta {
url: string;
}
/*-----------------------------------------------*
* *
* GLOBAL *
* *
------------------------------------------------*/
// For backwards compability
interface NodeRequire extends NodeJS.Require {}
interface RequireResolve extends NodeJS.RequireResolve {}
interface NodeModule extends NodeJS.Module {}
declare var process: NodeJS.Process;
declare var console: Console;
declare var __filename: string;
declare var __dirname: string;
declare function setTimeout(callback: (...args: any[]) => void, ms?: number, ...args: any[]): NodeJS.Timeout;
declare namespace setTimeout {
function __promisify__(ms: number): Promise<void>;
function __promisify__<T>(ms: number, value: T): Promise<T>;
}
declare function clearTimeout(timeoutId: NodeJS.Timeout): void;
declare function setInterval(callback: (...args: any[]) => void, ms?: number, ...args: any[]): NodeJS.Timeout;
declare function clearInterval(intervalId: NodeJS.Timeout): void;
declare function setImmediate(callback: (...args: any[]) => void, ...args: any[]): NodeJS.Immediate;
declare namespace setImmediate {
function __promisify__(): Promise<void>;
function __promisify__<T>(value: T): Promise<T>;
}
declare function clearImmediate(immediateId: NodeJS.Immediate): void;
declare function queueMicrotask(callback: () => void): void;
declare var require: NodeRequire;
declare var module: NodeModule;
// Same as module.exports
declare var exports: any;
// Buffer class
type BufferEncoding = "ascii" | "utf8" | "utf-8" | "utf16le" | "ucs2" | "ucs-2" | "base64" | "base64url" | "latin1" | "binary" | "hex";
type WithImplicitCoercion<T> = T | { valueOf(): T };
/**
* Raw data is stored in instances of the Buffer class.
* A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized.
* Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
*/
declare class Buffer extends Uint8Array {
/**
* Allocates a new buffer containing the given {str}.
*
* @param str String to store in buffer.
* @param encoding encoding to use, optional. Default is 'utf8'
* @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead.
*/
constructor(str: string, encoding?: BufferEncoding);
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
* @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`).
*/
constructor(size: number);
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
* @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
*/
constructor(array: Uint8Array);
/**
* Produces a Buffer backed by the same allocated memory as
* the given {ArrayBuffer}/{SharedArrayBuffer}.
*
*
* @param arrayBuffer The ArrayBuffer with which to share memory.
* @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead.
*/
constructor(arrayBuffer: ArrayBuffer | SharedArrayBuffer);
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
* @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
*/
constructor(array: ReadonlyArray<any>);
/**
* Copies the passed {buffer} data onto a new {Buffer} instance.
*
* @param buffer The buffer to copy.
* @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead.
*/
constructor(buffer: Buffer);
/**
* When passed a reference to the .buffer property of a TypedArray instance,
* the newly created Buffer will share the same allocated memory as the TypedArray.
* The optional {byteOffset} and {length} arguments specify a memory range
* within the {arrayBuffer} that will be shared by the Buffer.
*
* @param arrayBuffer The .buffer property of any TypedArray or a new ArrayBuffer()
*/
static from(arrayBuffer: WithImplicitCoercion<ArrayBuffer | SharedArrayBuffer>, byteOffset?: number, length?: number): Buffer;
/**
* Creates a new Buffer using the passed {data}
* @param data data to create a new Buffer
*/
static from(data: Uint8Array | ReadonlyArray<number>): Buffer;
static from(data: WithImplicitCoercion<Uint8Array | ReadonlyArray<number> | string>): Buffer;
/**
* Creates a new Buffer containing the given JavaScript string {str}.
* If provided, the {encoding} parameter identifies the character encoding.
* If not provided, {encoding} defaults to 'utf8'.
*/
static from(str: WithImplicitCoercion<string> | { [Symbol.toPrimitive](hint: 'string'): string }, encoding?: BufferEncoding): Buffer;
/**
* Creates a new Buffer using the passed {data}
* @param values to create a new Buffer
*/
static of(...items: number[]): Buffer;
/**
* Returns true if {obj} is a Buffer
*
* @param obj object to test.
*/
static isBuffer(obj: any): obj is Buffer;
/**
* Returns true if {encoding} is a valid encoding argument.
* Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
*
* @param encoding string to test.
*/
static isEncoding(encoding: string): encoding is BufferEncoding;
/**
* Gives the actual byte length of a string. encoding defaults to 'utf8'.
* This is not the same as String.prototype.length since that returns the number of characters in a string.
*
* @param string string to test.
* @param encoding encoding used to evaluate (defaults to 'utf8')
*/
static byteLength(
string: string | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
encoding?: BufferEncoding
): number;
/**
* Returns a buffer which is the result of concatenating all the buffers in the list together.
*
* If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
* If the list has exactly one item, then the first item of the list is returned.
* If the list has more than one item, then a new Buffer is created.
*
* @param list An array of Buffer objects to concatenate
* @param totalLength Total length of the buffers when concatenated.
* If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
*/
static concat(list: ReadonlyArray<Uint8Array>, totalLength?: number): Buffer;
/**
* The same as buf1.compare(buf2).
*/
static compare(buf1: Uint8Array, buf2: Uint8Array): number;
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
* @param fill if specified, buffer will be initialized by calling buf.fill(fill).
* If parameter is omitted, buffer will be filled with zeros.
* @param encoding encoding used for call to buf.fill while initalizing
*/
static alloc(size: number, fill?: string | Buffer | number, encoding?: BufferEncoding): Buffer;
/**
* Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
* of the newly created Buffer are unknown and may contain sensitive data.
*
* @param size count of octets to allocate
*/
static allocUnsafe(size: number): Buffer;
/**
* Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
* of the newly created Buffer are unknown and may contain sensitive data.
*
* @param size count of octets to allocate
*/
static allocUnsafeSlow(size: number): Buffer;
/**
* This is the number of bytes used to determine the size of pre-allocated, internal Buffer instances used for pooling. This value may be modified.
*/
static poolSize: number;
write(string: string, encoding?: BufferEncoding): number;
write(string: string, offset: number, encoding?: BufferEncoding): number;
write(string: string, offset: number, length: number, encoding?: BufferEncoding): number;
toString(encoding?: BufferEncoding, start?: number, end?: number): string;
toJSON(): { type: 'Buffer'; data: number[] };
equals(otherBuffer: Uint8Array): boolean;
compare(
otherBuffer: Uint8Array,
targetStart?: number,
targetEnd?: number,
sourceStart?: number,
sourceEnd?: number
): number;
copy(targetBuffer: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
/**
* Returns a new `Buffer` that references **the same memory as the original**, but offset and cropped by the start and end indices.
*
* This method is incompatible with `Uint8Array#slice()`, which returns a copy of the original memory.
*
* @param begin Where the new `Buffer` will start. Default: `0`.
* @param end Where the new `Buffer` will end (not inclusive). Default: `buf.length`.
*/
slice(begin?: number, end?: number): Buffer;
/**
* Returns a new `Buffer` that references **the same memory as the original**, but offset and cropped by the start and end indices.
*
* This method is compatible with `Uint8Array#subarray()`.
*
* @param begin Where the new `Buffer` will start. Default: `0`.
* @param end Where the new `Buffer` will end (not inclusive). Default: `buf.length`.
*/
subarray(begin?: number, end?: number): Buffer;
writeBigInt64BE(value: bigint, offset?: number): number;
writeBigInt64LE(value: bigint, offset?: number): number;
writeBigUInt64BE(value: bigint, offset?: number): number;
writeBigUInt64LE(value: bigint, offset?: number): number;
writeUIntLE(value: number, offset: number, byteLength: number): number;
writeUIntBE(value: number, offset: number, byteLength: number): number;
writeIntLE(value: number, offset: number, byteLength: number): number;
writeIntBE(value: number, offset: number, byteLength: number): number;
readBigUInt64BE(offset?: number): bigint;
readBigUInt64LE(offset?: number): bigint;
readBigInt64BE(offset?: number): bigint;
readBigInt64LE(offset?: number): bigint;
readUIntLE(offset: number, byteLength: number): number;
readUIntBE(offset: number, byteLength: number): number;
readIntLE(offset: number, byteLength: number): number;
readIntBE(offset: number, byteLength: number): number;
readUInt8(offset?: number): number;
readUInt16LE(offset?: number): number;
readUInt16BE(offset?: number): number;
readUInt32LE(offset?: number): number;
readUInt32BE(offset?: number): number;
readInt8(offset?: number): number;
readInt16LE(offset?: number): number;
readInt16BE(offset?: number): number;
readInt32LE(offset?: number): number;
readInt32BE(offset?: number): number;
readFloatLE(offset?: number): number;
readFloatBE(offset?: number): number;
readDoubleLE(offset?: number): number;
readDoubleBE(offset?: number): number;
reverse(): this;
swap16(): Buffer;
swap32(): Buffer;
swap64(): Buffer;
writeUInt8(value: number, offset?: number): number;
writeUInt16LE(value: number, offset?: number): number;
writeUInt16BE(value: number, offset?: number): number;
writeUInt32LE(value: number, offset?: number): number;
writeUInt32BE(value: number, offset?: number): number;
writeInt8(value: number, offset?: number): number;
writeInt16LE(value: number, offset?: number): number;
writeInt16BE(value: number, offset?: number): number;
writeInt32LE(value: number, offset?: number): number;
writeInt32BE(value: number, offset?: number): number;
writeFloatLE(value: number, offset?: number): number;
writeFloatBE(value: number, offset?: number): number;
writeDoubleLE(value: number, offset?: number): number;
writeDoubleBE(value: number, offset?: number): number;
fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this;
indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number;
lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number;
entries(): IterableIterator<[number, number]>;
includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean;
keys(): IterableIterator<number>;
values(): IterableIterator<number>;
}
//#region borrowed
// from https://github.com/microsoft/TypeScript/blob/38da7c600c83e7b31193a62495239a0fe478cb67/lib/lib.webworker.d.ts#L633 until moved to separate lib
/** A controller object that allows you to abort one or more DOM requests as and when desired. */
interface AbortController {
/**
* Returns the AbortSignal object associated with this object.
*/
readonly signal: AbortSignal;
/**
* Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted.
*/
abort(): void;
}
/** A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */
interface AbortSignal {
/**
* Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise.
*/
readonly aborted: boolean;
}
declare var AbortController: {
prototype: AbortController;
new(): AbortController;
};
declare var AbortSignal: {
prototype: AbortSignal;
new(): AbortSignal;
};
//#endregion borrowed
/*----------------------------------------------*
* *
* GLOBAL INTERFACES *
* *
*-----------------------------------------------*/
declare namespace NodeJS {
interface InspectOptions {
/**
* If set to `true`, getters are going to be
* inspected as well. If set to `'get'` only getters without setter are going
* to be inspected. If set to `'set'` only getters having a corresponding
* setter are going to be inspected. This might cause side effects depending on
* the getter function.
* @default `false`
*/
getters?: 'get' | 'set' | boolean;
showHidden?: boolean;
/**
* @default 2
*/
depth?: number | null;
colors?: boolean;
customInspect?: boolean;
showProxy?: boolean;
maxArrayLength?: number | null;
/**
* Specifies the maximum number of characters to
* include when formatting. Set to `null` or `Infinity` to show all elements.
* Set to `0` or negative to show no characters.
* @default 10000
*/
maxStringLength?: number | null;
breakLength?: number;
/**
* Setting this to `false` causes each object key
* to be displayed on a new line. It will also add new lines to text that is
* longer than `breakLength`. If set to a number, the most `n` inner elements
* are united on a single line as long as all properties fit into
* `breakLength`. Short array elements are also grouped together. Note that no
* text will be reduced below 16 characters, no matter the `breakLength` size.
* For more information, see the example below.
* @default `true`
*/
compact?: boolean | number;
sorted?: boolean | ((a: string, b: string) => number);
}
interface CallSite {
/**
* Value of "this"
*/
getThis(): any;
/**
* Type of "this" as a string.
* This is the name of the function stored in the constructor field of
* "this", if available. Otherwise the object's [[Class]] internal
* property.
*/
getTypeName(): string | null;
/**
* Current function
*/
getFunction(): Function | undefined;
/**
* Name of the current function, typically its name property.
* If a name property is not available an attempt will be made to try
* to infer a name from the function's context.
*/
getFunctionName(): string | null;
/**
* Name of the property [of "this" or one of its prototypes] that holds
* the current function
*/
getMethodName(): string | null;
/**
* Name of the script [if this function was defined in a script]
*/
getFileName(): string | null;
/**
* Current line number [if this function was defined in a script]
*/
getLineNumber(): number | null;
/**
* Current column number [if this function was defined in a script]
*/
getColumnNumber(): number | null;
/**
* A call site object representing the location where eval was called
* [if this function was created using a call to eval]
*/
getEvalOrigin(): string | undefined;
/**
* Is this a toplevel invocation, that is, is "this" the global object?
*/
isToplevel(): boolean;
/**
* Does this call take place in code defined by a call to eval?
*/
isEval(): boolean;
/**
* Is this call in native V8 code?
*/
isNative(): boolean;
/**
* Is this a constructor call?
*/
isConstructor(): boolean;
}
interface ErrnoException extends Error {
errno?: number;
code?: string;
path?: string;
syscall?: string;
stack?: string;
}
interface ReadableStream extends EventEmitter {
readable: boolean;
read(size?: number): string | Buffer;
setEncoding(encoding: BufferEncoding): this;
pause(): this;
resume(): this;
isPaused(): boolean;
pipe<T extends WritableStream>(destination: T, options?: { end?: boolean; }): T;
unpipe(destination?: WritableStream): this;
unshift(chunk: string | Uint8Array, encoding?: BufferEncoding): void;
wrap(oldStream: ReadableStream): this;
[Symbol.asyncIterator](): AsyncIterableIterator<string | Buffer>;
}
interface WritableStream extends EventEmitter {
writable: boolean;
write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean;
write(str: string, encoding?: BufferEncoding, cb?: (err?: Error | null) => void): boolean;
end(cb?: () => void): void;
end(data: string | Uint8Array, cb?: () => void): void;
end(str: string, encoding?: BufferEncoding, cb?: () => void): void;
}
interface ReadWriteStream extends ReadableStream, WritableStream { }
interface Global {
AbortController: typeof AbortController;
AbortSignal: typeof AbortSignal;
Array: typeof Array;
ArrayBuffer: typeof ArrayBuffer;
Boolean: typeof Boolean;
Buffer: typeof Buffer;
DataView: typeof DataView;
Date: typeof Date;
Error: typeof Error;
EvalError: typeof EvalError;
Float32Array: typeof Float32Array;
Float64Array: typeof Float64Array;
Function: typeof Function;
Infinity: typeof Infinity;
Int16Array: typeof Int16Array;
Int32Array: typeof Int32Array;
Int8Array: typeof Int8Array;
Intl: typeof Intl;
JSON: typeof JSON;
Map: MapConstructor;
Math: typeof Math;
NaN: typeof NaN;
Number: typeof Number;
Object: typeof Object;
Promise: typeof Promise;
RangeError: typeof RangeError;
ReferenceError: typeof ReferenceError;
RegExp: typeof RegExp;
Set: SetConstructor;
String: typeof String;
Symbol: Function;
SyntaxError: typeof SyntaxError;
TypeError: typeof TypeError;
URIError: typeof URIError;
Uint16Array: typeof Uint16Array;
Uint32Array: typeof Uint32Array;
Uint8Array: typeof Uint8Array;
Uint8ClampedArray: typeof Uint8ClampedArray;
WeakMap: WeakMapConstructor;
WeakSet: WeakSetConstructor;
clearImmediate: (immediateId: Immediate) => void;
clearInterval: (intervalId: Timeout) => void;
clearTimeout: (timeoutId: Timeout) => void;
decodeURI: typeof decodeURI;
decodeURIComponent: typeof decodeURIComponent;
encodeURI: typeof encodeURI;
encodeURIComponent: typeof encodeURIComponent;
escape: (str: string) => string;
eval: typeof eval;
global: Global;
isFinite: typeof isFinite;
isNaN: typeof isNaN;
parseFloat: typeof parseFloat;
parseInt: typeof parseInt;
setImmediate: (callback: (...args: any[]) => void, ...args: any[]) => Immediate;
setInterval: (callback: (...args: any[]) => void, ms?: number, ...args: any[]) => Timeout;
setTimeout: (callback: (...args: any[]) => void, ms?: number, ...args: any[]) => Timeout;
queueMicrotask: typeof queueMicrotask;
undefined: typeof undefined;
unescape: (str: string) => string;
gc: () => void;
v8debug?: any;
}
interface RefCounted {
ref(): this;
unref(): this;
}
// compatibility with older typings
interface Timer extends RefCounted {
hasRef(): boolean;
refresh(): this;
[Symbol.toPrimitive](): number;
}
interface Immediate extends RefCounted {
hasRef(): boolean;
_onImmediate: Function; // to distinguish it from the Timeout class
}
interface Timeout extends Timer {
hasRef(): boolean;
refresh(): this;
[Symbol.toPrimitive](): number;
}
type TypedArray =
| Uint8Array
| Uint8ClampedArray
| Uint16Array
| Uint32Array
| Int8Array
| Int16Array
| Int32Array
| BigUint64Array
| BigInt64Array
| Float32Array
| Float64Array;
type ArrayBufferView = TypedArray | DataView;
interface Require {
(id: string): any;
resolve: RequireResolve;
cache: Dict<NodeModule>;
/**
* @deprecated
*/
extensions: RequireExtensions;
main: Module | undefined;
}
interface RequireResolve {
(id: string, options?: { paths?: string[]; }): string;
paths(request: string): string[] | null;
}
interface RequireExtensions extends Dict<(m: Module, filename: string) => any> {
'.js': (m: Module, filename: string) => any;
'.json': (m: Module, filename: string) => any;
'.node': (m: Module, filename: string) => any;
}
interface Module {
/**
* `true` if the module is running during the Node.js preload
*/
isPreloading: boolean;
exports: any;
require: Require;
id: string;
filename: string;
loaded: boolean;
/** @deprecated since 14.6.0 Please use `require.main` and `module.children` instead. */
parent: Module | null | undefined;
children: Module[];
/**
* @since 11.14.0
*
* The directory name of the module. This is usually the same as the path.dirname() of the module.id.
*/
path: string;
paths: string[];
}
interface Dict<T> {
[key: string]: T | undefined;
}
interface ReadOnlyDict<T> {
readonly [key: string]: T | undefined;
}
} | the_stack |
import { Unlocable } from "../utils";
import { Production } from "../production";
import { WorldInterface } from "./worldInterface";
import { Unit } from "../units/unit";
import { GameModel } from "../gameModel";
import {
BuyAction,
BuyAndUnlockAction,
UpAction,
UpHire,
UpSpecial,
Research
} from "../units/action";
import { Cost } from "../cost";
import { TypeList } from "../typeList";
import { BaseWorld } from "./baseWorld";
import { Base } from "../units/base";
export class Researchs implements WorldInterface {
up1: Research;
rDirt: Research;
upCombined: Research;
specialResearch: Research;
prestigeResearch: Research;
engineerRes: Research;
machineryRes: Research;
departmentRes: Research;
experimentResearch: Research;
composterResearch: Research;
refineryResearch: Research;
laserResearch: Research;
hydroResearch: Research;
planterResearch: Research;
scientificMethod: Research;
universityRes: Research;
publicLesson: Research;
advancedLesson: Research;
depEduRes: Research;
hereAndNow: Research;
hereAndNow2: Research;
adaptation: Research;
evolution: Research;
escape: Research;
timeWarp: Research;
r2: Research;
r4: Research;
bi: Research;
constructor(public game: GameModel) {}
public declareStuff() {}
public initStuff() {
// Bi
this.bi = new Research(
"biResea",
"Business Intelligence",
"See who produces or consumes your resources.",
[new Cost(this.game.baseWorld.science, new Decimal(2e3))],
[],
this.game
);
// Evolution
this.evolution = new Research(
"evolution",
"Evolution",
"Increase the resources need to travel to a new world (x5) and also increase the experience you will gain (x3).",
[new Cost(this.game.baseWorld.science, new Decimal(1e10))],
[],
this.game,
() => {
this.game.world.toUnlock.forEach(
t => (t.basePrice = t.basePrice.times(5))
);
this.game.world.experience = this.game.world.experience.times(3);
}
);
// Escape
this.escape = new Research(
"escapism",
"Escapism",
"Reduce the resources need to travel to a new world by 50%.",
[new Cost(this.game.baseWorld.science, new Decimal(5e10))],
[],
this.game,
() => {
this.game.world.toUnlock.forEach(
t => (t.basePrice = t.basePrice.div(2))
);
// this.game.world.toUnlockMax.forEach(t => t.basePrice = t.basePrice.times(4))
}
);
// Adaptation
this.adaptation = new Research(
"adaptation",
"Adaptation",
"Reduce the resources need to travel to a new world by 50%.",
[new Cost(this.game.baseWorld.science, new Decimal(5e8))],
[this.escape],
this.game,
() => {
this.game.world.toUnlock.forEach(
t => (t.basePrice = t.basePrice.div(2))
);
// this.game.world.toUnlockMax.forEach(t => t.basePrice = t.basePrice.times(4))
}
);
// Time Warp
this.timeWarp = new Research(
"timeWarp",
"Time warp",
"4 hour of update.",
[new Cost(this.game.baseWorld.science, new Decimal(1))],
[],
this.game,
() => {
this.game.longUpdate(3600 * 4000, true);
}
);
// Here and Now 2
this.hereAndNow2 = new Research(
"han2Res",
"Over and Beyond",
"Get 50% more experience when travel.",
[new Cost(this.game.baseWorld.science, new Decimal(1e12))],
[],
this.game,
() => {
this.game.world.experience = this.game.world.experience.times(1.5);
}
);
// Here and Now
this.hereAndNow = new Research(
"hereAndNow",
"Here and Now",
"Get 10% world experience, min 10.",
[new Cost(this.game.baseWorld.science, new Decimal(1e9))],
[this.timeWarp, this.hereAndNow2],
this.game,
() => {
const ne = Math.max(this.game.world.level / 10, 10);
this.game.prestige.experience.quantity = this.game.prestige.experience.quantity.plus(
ne
);
this.game.maxLevel = this.game.maxLevel.plus(ne);
this.game.expTabAv = true;
}
);
// University 4
this.depEduRes = new Research(
"depEduRes",
"Department of Education",
"Unlock Department of Education.",
[new Cost(this.game.baseWorld.science, new Decimal(3e10))],
[this.game.science.depEdu],
this.game
);
// University 3
this.advancedLesson = new Research(
"advancedLesson",
"Advanced Lesson",
"University also produces scientist.",
[new Cost(this.game.baseWorld.science, new Decimal(3e6))],
[this.game.science.scientistProduction, this.depEduRes],
this.game
);
// University 2
this.publicLesson = new Research(
"publicLesson",
"Public Lesson",
"University also produces students.",
[new Cost(this.game.baseWorld.science, new Decimal(1e5))],
[this.game.science.studentProduction, this.advancedLesson],
this.game
);
// University
this.universityRes = new Research(
"University",
"University",
"Unlock university.",
[new Cost(this.game.baseWorld.science, new Decimal(6e4))],
[this.game.science.university, this.publicLesson],
this.game
);
// Scientific Method
this.scientificMethod = new Research(
"scientificMethod",
"Scientific Method",
"Science production +100%",
[new Cost(this.game.baseWorld.science, new Decimal(4e3))],
[this.universityRes],
this.game
);
this.game.baseWorld.science.bonusProduction.push([
this.scientificMethod,
new Decimal(1)
]);
// Departements
const deps: Array<Unlocable> = this.game.engineers.listDep;
this.departmentRes = new Research(
"departementsRes",
"Departments",
"Departments yield engineers.",
[new Cost(this.game.baseWorld.science, new Decimal(1e11))],
deps,
this.game
);
// Engineer
const eng: Array<Unlocable> = this.game.engineers.listEnginer;
this.engineerRes = new Research(
"engineerRes",
"Engineer",
"Engineer will slowly build machinery.",
[new Cost(this.game.baseWorld.science, new Decimal(3e6))],
eng.concat(this.departmentRes),
this.game
);
// Planter
this.planterResearch = new Research(
"planRes",
"Planting",
"Tree planting is the process of transplanting tree seedlings.",
[new Cost(this.game.baseWorld.science, new Decimal(1e4))],
[this.game.baseWorld.planterAnt],
this.game
);
// Hydro
this.hydroResearch = new Research(
"hydroRes",
"Hydroponics",
"Hydroponics is the art of growing plants without soil.",
[new Cost(this.game.baseWorld.science, new Decimal(1e4))],
[this.game.baseWorld.hydroAnt],
this.game
);
// Laser
this.laserResearch = new Research(
"lasRes",
"Laser",
"Sand can be fused to crystal.",
[new Cost(this.game.baseWorld.science, new Decimal(1e4))],
[this.game.baseWorld.laserAnt],
this.game
);
// Refinery
this.refineryResearch = new Research(
"refRes",
"Refinery",
"Soil can be refined to sand.",
[new Cost(this.game.baseWorld.science, new Decimal(1e4))],
[this.game.baseWorld.refineryAnt],
this.game
);
// Compost
this.composterResearch = new Research(
"compRes",
"Compost",
"Wood can be degraded to fertile soil.",
[new Cost(this.game.baseWorld.science, new Decimal(1e4))],
[this.game.baseWorld.composterAnt],
this.game
);
// Experiment
this.experimentResearch = new Research(
"experimentRes",
"Experiment",
"Unlocks scientist Ant",
[new Cost(this.game.baseWorld.science, new Decimal(800))],
[this.game.science.scientist, this.scientificMethod],
this.game
);
// Prestige
this.prestigeResearch = new Research(
"prestigeRes",
"Travel",
"Allows you to move to new worlds",
[new Cost(this.game.baseWorld.science, new Decimal(1e7))],
[this.hereAndNow, this.adaptation, this.evolution],
this.game,
() => {
this.game.worldTabAv = true;
}
);
// Machinery
let listM = new Array<Base>();
listM = listM.concat(this.game.machines.listMachinery, [this.engineerRes]);
this.machineryRes = new Research(
"machiRes",
"Machinery",
"Unlocks powerful machinery.",
[new Cost(this.game.baseWorld.science, new Decimal(1e6))],
listM,
this.game
);
// Special
this.specialResearch = new Research(
"speRes",
"Technology",
"Allows you to research new technologies.",
[new Cost(this.game.baseWorld.science, new Decimal(3e3))],
[
this.composterResearch,
this.refineryResearch,
this.laserResearch,
this.hydroResearch,
this.planterResearch,
this.experimentResearch,
this.machineryRes,
this.prestigeResearch,
this.bi
],
this.game
);
// Up Combined
this.upCombined = new Research(
"upComb",
"Combined bonus",
"This is the ultimate bonus: multiply unit's bonus per hire bonus.",
[new Cost(this.game.baseWorld.science, new Decimal(1e10))],
[],
this.game
);
// Up Hire
const allUpH = Array.from(this.game.unitMap.values())
.filter(u => u.upHire)
.map(u => u.upHire);
allUpH.push(this.upCombined);
this.r4 = new Research(
"R4",
"Twin",
"Allows you to get more units for the same price.",
[new Cost(this.game.baseWorld.science, new Decimal(7e3))],
allUpH,
this.game
);
// Up 2
const allUp = Array.from(this.game.unitMap.values())
.filter(u => u.upAction)
.map(u => u.upAction);
allUp.push(this.r4);
this.r2 = new Research(
"R2",
"Teamwork 2",
"Upgrade your unit's production bonus.",
[new Cost(this.game.baseWorld.science, new Decimal(500))],
allUp,
this.game
);
// Up basic
this.up1 = new Research(
"RUp1",
"Teamwork",
"Gives a production bonus based on how many times you have bought a unit.",
[new Cost(this.game.baseWorld.science, new Decimal(50))],
[this.r2],
this.game
);
// Hunter 2
const hunting2 = new Research(
"HuntR2",
"Advanced Hunting",
"Equip ants with better weapons.",
[new Cost(this.game.baseWorld.science, new Decimal(4000))],
[this.game.baseWorld.advancedHunter],
this.game
);
// Hunter
const hunting = new Research(
"HuntR1",
"Hunting",
"Equip ants with weapons to get food.",
[new Cost(this.game.baseWorld.science, new Decimal(2000))],
[this.game.baseWorld.hunter, hunting2, this.specialResearch],
this.game
);
// Wood
const woodcutting = new Research(
"WR1",
"Woodcutting",
"Allows you to collect wood for future usage.",
[new Cost(this.game.baseWorld.science, new Decimal(1000))],
[this.game.baseWorld.lumberjack, hunting],
this.game
);
// Fungus up
const r3 = new Research(
"R3",
"Fungus experiments",
"Allows you to do experiments to increase fungus's food production.",
[new Cost(this.game.baseWorld.science, new Decimal(1000))],
[this.game.baseWorld.fungus.upSpecial],
this.game
);
// Farming
const r1 = new Research(
"R1",
"Ant–fungus symbiosis",
"Allows you to cultivate fungus. Fungus is a source of food.",
[new Cost(this.game.baseWorld.science, new Decimal(100))],
[this.game.baseWorld.farmer, r3, woodcutting],
this.game
);
// Soil
this.rDirt = new Research(
"RDirt",
"Soil",
"Allows you to collect soil for future usage.",
[new Cost(this.game.baseWorld.science, new Decimal(50))],
[this.game.baseWorld.soil, this.game.baseWorld.carpenter, r1, this.up1],
this.game
);
}
public addWorld() {}
} | the_stack |
import { expect } from "chai";
import * as Proxyquire from "proxyquire";
import { MockGuild } from "./mocks/guild";
import { MockMember } from "./mocks/member";
import { MockDiscordClient } from "./mocks/discordclient";
import { MockMessage } from "./mocks/message";
import { Util } from "../src/util";
import { AppserviceMock } from "./mocks/appservicemock";
import { MockUser } from "./mocks/user";
import { MockTextChannel } from "./mocks/channel";
// we are a test file and thus need those
/* tslint:disable:no-unused-expression max-file-line-count no-any */
const mockBridge = new AppserviceMock({});
const modDiscordBot = Proxyquire("../src/bot", {
"./clientfactory": require("./mocks/discordclientfactory"),
"./util": {
Util: {
AsyncForEach: Util.AsyncForEach,
DelayedPromise: Util.DelayedPromise,
DownloadFile: async () => {
return {buffer: Buffer.alloc(1000)};
},
UploadContentFromUrl: async () => {
return {mxcUrl: "uploaded"};
},
},
},
});
describe("DiscordBot", () => {
let discordBot;
const config = {
auth: {
botToken: "blah",
},
bridge: {
disablePresence: true,
domain: "localhost",
},
limits: {
discordSendDelay: 50,
},
};
describe("run()", () => {
it("should resolve when ready.", async () => {
discordBot = new modDiscordBot.DiscordBot(
config,
mockBridge,
{},
);
await discordBot.run();
});
});
describe("LookupRoom()", () => {
beforeEach( async () => {
discordBot = new modDiscordBot.DiscordBot(
config,
mockBridge,
{},
);
await discordBot.run();
});
it("should reject a missing guild.", async () => {
try {
await discordBot.LookupRoom("541", "321");
throw new Error("didn't fail");
} catch (e) {
expect(e.message).to.not.equal("didn't fail");
}
});
it("should reject a missing channel.", async () => {
try {
await discordBot.LookupRoom("123", "666");
throw new Error("didn't fail");
} catch (e) {
expect(e.message).to.not.equal("didn't fail");
}
});
it("should resolve a guild and channel id.", async () => {
await discordBot.LookupRoom("123", "321");
});
});
describe("OnMessage()", () => {
const channel = new MockTextChannel();
const msg = new MockMessage(channel);
const author = new MockUser("11111");
let HANDLE_COMMAND = false;
function getDiscordBot() {
HANDLE_COMMAND = false;
mockBridge.cleanup();
const discord = new modDiscordBot.DiscordBot(
config,
mockBridge,
{},
);
discord.bot = { user: { id: "654" } };
discord.userSync = {
OnUpdateUser: async () => { },
};
discord.channelSync = {
GetRoomIdsFromChannel: async () => ["!asdf:localhost"],
};
discord.discordCommandHandler = {
Process: async () => { HANDLE_COMMAND = true; },
};
discord.store = {
Insert: async (_) => { },
};
return discord;
}
it("ignores own messages", async () => {
discordBot = getDiscordBot();
const guild: any = new MockGuild("123", []);
const ownAuthor = new MockUser("654", "TestUsername");
guild._mockAddMember(author);
msg.author = ownAuthor;
msg.content = "Hi!";
await discordBot.OnMessage(msg);
expect(mockBridge.getIntent(author.id).wasCalled("sendEvent", false)).to.equal(0);
});
it("Passes on !matrix commands", async () => {
discordBot = getDiscordBot();
msg.author = author;
msg.content = "!matrix test";
await discordBot.OnMessage(msg);
expect(HANDLE_COMMAND).to.be.true;
});
it("skips empty messages", async () => {
discordBot = getDiscordBot();
msg.content = "";
msg.author = author;
await discordBot.OnMessage(msg as any);
expect(mockBridge.getIntent(author.id).wasCalled("sendEvent", false)).to.equal(0);
});
it("sends normal messages", async () => {
discordBot = getDiscordBot();
msg.author = author;
msg.content = "Foxies are amazing!";
await discordBot.OnMessage(msg as any);
mockBridge.getIntent(author.id).wasCalled("sendEvent");
});
it("sends edit messages", async () => {
discordBot = getDiscordBot();
msg.author = author;
msg.content = "Foxies are super amazing!";
await discordBot.OnMessage(msg, "editevent");
mockBridge.getIntent(author.id).wasCalled("sendEvent", true, "!asdf:localhost", {
"body": "* Foxies are super amazing!",
"format": "org.matrix.custom.html",
"formatted_body": "* Foxies are super amazing!",
"m.new_content": {
body: "Foxies are super amazing!",
format: "org.matrix.custom.html",
formatted_body: "Foxies are super amazing!",
msgtype: "m.text",
},
"m.relates_to": { event_id: "editevent", rel_type: "m.replace" },
"msgtype": "m.text",
});
});
it("uploads images", async () => {
discordBot = getDiscordBot();
msg.author = author;
msg.attachments.set("1234", {
name: "someimage.png",
size: 42,
height: 0,
url: "asdf",
width: 0,
});
await discordBot.OnMessage(msg);
const intent = mockBridge.getIntent(author.id);
intent.underlyingClient.wasCalled("uploadContent");
intent.wasCalled("sendEvent", true, "!asdf:localhost", {
body: "someimage.png",
external_url: "asdf",
info: {
h: 0,
mimetype: "image/png",
size: 42,
w: 0,
},
msgtype: "m.image",
url: "mxc://someimage.png",
});
});
it("uploads videos", async () => {
discordBot = getDiscordBot();
msg.author = author;
msg.attachments.set("1234", {
name: "foxes.mov",
size: 42,
height: 0,
url: "asdf",
width: 0,
});
await discordBot.OnMessage(msg);
const intent = mockBridge.getIntent(author.id);
intent.underlyingClient.wasCalled("uploadContent");
intent.wasCalled("sendEvent", true, "!asdf:localhost", {
body: "foxes.mov",
external_url: "asdf",
info: {
h: 0,
mimetype: "video/quicktime",
size: 42,
w: 0,
},
msgtype: "m.video",
url: "mxc://foxes.mov",
});
});
it("uploads audio", async () => {
discordBot = getDiscordBot();
msg.author = author;
msg.attachments.set("1234", {
name: "meow.mp3",
size: 42,
height: 0,
url: "asdf",
width: 0,
});
await discordBot.OnMessage(msg);
const intent = mockBridge.getIntent(author.id);
intent.underlyingClient.wasCalled("uploadContent");
intent.wasCalled("sendEvent", true, "!asdf:localhost", {
body: "meow.mp3",
external_url: "asdf",
info: {
mimetype: "audio/mpeg",
size: 42,
},
msgtype: "m.audio",
url: "mxc://meow.mp3",
});
});
it("uploads other files", async () => {
discordBot = getDiscordBot();
msg.author = author;
msg.attachments.set("1234", {
name: "meow.zip",
size: 42,
height: 0,
url: "asdf",
width: 0,
});
await discordBot.OnMessage(msg);
const intent = mockBridge.getIntent(author.id);
intent.underlyingClient.wasCalled("uploadContent");
intent.wasCalled("sendEvent", true, "!asdf:localhost", {
body: "meow.zip",
external_url: "asdf",
info: {
mimetype: "application/zip",
size: 42,
},
msgtype: "m.file",
url: "mxc://meow.zip",
});
});
});
describe("OnMessageUpdate()", () => {
it("should return on an unchanged message", async () => {
discordBot = new modDiscordBot.DiscordBot(
config,
mockBridge,
{},
);
const guild: any = new MockGuild("123", []);
guild._mockAddMember(new MockMember("12345", "TestUsername"));
const channel = new MockTextChannel(guild);
const oldMsg = new MockMessage(channel) as any;
const newMsg = new MockMessage(channel) as any;
oldMsg.embeds = [];
newMsg.embeds = [];
// Content updated but not changed
oldMsg.content = "a";
newMsg.content = "a";
// Mock the SendMatrixMessage method to check if it is called
let checkMsgSent = false;
discordBot.SendMatrixMessage = (...args) => checkMsgSent = true;
await discordBot.OnMessageUpdate(oldMsg, newMsg);
expect(checkMsgSent).to.be.false;
});
it("should send a matrix edit on an edited discord message", async () => {
discordBot = new modDiscordBot.DiscordBot(
config,
mockBridge,
{},
);
discordBot.store.Get = (a, b) => null;
const guild: any = new MockGuild("123", []);
guild._mockAddMember(new MockMember("12345", "TestUsername"));
const channel = new MockTextChannel(guild);
const oldMsg = new MockMessage(channel) as any;
const newMsg = new MockMessage(channel) as any;
oldMsg.embeds = [];
newMsg.embeds = [];
// Content updated and edited
oldMsg.content = "a";
newMsg.content = "b";
let storeMockResults = 1;
discordBot.store = {
Get: (a, b) => {
return {
MatrixId: "editedid",
Next: () => storeMockResults--,
Result: true,
};
},
};
let checkEditEventSent = "";
discordBot.OnMessage = (str, event) => {
checkEditEventSent = event;
};
await discordBot.OnMessageUpdate(oldMsg, newMsg);
expect(checkEditEventSent).to.equal("editedid");
});
it("should send a new message if no store event found", async () => {
discordBot = new modDiscordBot.DiscordBot(
config,
mockBridge,
{},
);
discordBot.store.Get = (a, b) => { return {
MatrixId: "$event:localhost;!room:localhost",
Next: () => true,
Result: true,
}; };
discordBot.lastEventIds["!room:localhost"] = "$event:localhost";
const guild: any = new MockGuild("123", []);
guild._mockAddMember(new MockMember("12345", "TestUsername"));
const channel = new MockTextChannel(guild, {} as any);
const oldMsg = new MockMessage(channel) as any;
const newMsg = new MockMessage(channel) as any;
oldMsg.embeds = [];
newMsg.embeds = [];
// Content updated and edited
oldMsg.content = "a";
newMsg.content = "b";
let storeMockResults = 0;
discordBot.store = {
Get: (a, b) => {
return {
MatrixId: "editedid",
Next: () => storeMockResults--,
Result: true,
};
},
};
let checkEditEventSent = "wrong";
discordBot.OnMessage = (str, event) => {
checkEditEventSent = event;
};
await discordBot.OnMessageUpdate(oldMsg, newMsg);
expect(checkEditEventSent).to.be.undefined;
});
});
describe("event:message", () => {
it("should delay messages so they arrive in order", async () => {
discordBot = new modDiscordBot.DiscordBot(
config,
mockBridge,
{},
);
let expected = 0;
discordBot.OnMessage = async (msg: any) => {
expect(msg.n).to.eq(expected);
expected++;
};
const client: MockDiscordClient = (await discordBot.ClientFactory.getClient()) as MockDiscordClient;
await discordBot.run();
const ITERATIONS = 25;
const CHANID = 123;
// Send delay of 50ms, 2 seconds / 50ms - 5 for safety.
for (let i = 0; i < ITERATIONS; i++) {
await client.emit("message", { channel: { guild: { id: CHANID }, id: CHANID} });
}
await discordBot.discordMessageQueue[CHANID];
});
it("should handle messages that reject in the queue", async () => {
discordBot = new modDiscordBot.DiscordBot(
config,
mockBridge,
{},
);
let expected = 0;
const THROW_EVERY = 5;
discordBot.OnMessage = async (msg: any) => {
expect(msg.n).to.eq(expected);
expected++;
if (expected % THROW_EVERY === 0) {
return Promise.reject("Deliberate throw in test");
}
return Promise.resolve();
};
const client: MockDiscordClient = (await discordBot.ClientFactory.getClient()) as MockDiscordClient;
await discordBot.run();
const ITERATIONS = 25;
const CHANID = 123;
// Send delay of 50ms, 2 seconds / 50ms - 5 for safety.
for (let n = 0; n < ITERATIONS; n++) {
await client.emit("message", { n, channel: { guild: { id: CHANID }, id: CHANID} });
}
await discordBot.discordMessageQueue[CHANID];
expect(expected).to.eq(ITERATIONS);
});
});
}); | the_stack |
import { OktaAuth } from '@okta/okta-auth-js';
import tokens from '@okta/test.support/tokens';
import util from '@okta/test.support/util';
import oauthUtil from '@okta/test.support/oauthUtil';
import SdkClock from '../../../lib/clock';
import * as features from '../../../lib/features';
import { TokenService } from '../../../lib/services/TokenService';
const Emitter = require('tiny-emitter');
function createAuth(options) {
options = options || {};
options.tokenManager = options.tokenManager || {};
jest.spyOn(SdkClock, 'create').mockReturnValue(new SdkClock(options.localClockOffset));
return new OktaAuth({
pkce: false,
issuer: 'https://auth-js-test.okta.com',
clientId: 'NPSfOkH5eZrTy8PMDlvx',
redirectUri: 'https://example.com/redirect',
storageUtil: options.storageUtil,
tokenManager: {
expireEarlySeconds: options.tokenManager.expireEarlySeconds || 0,
storage: options.tokenManager.storage,
storageKey: options.tokenManager.storageKey,
autoRenew: options.tokenManager.autoRenew || false,
autoRemove: options.tokenManager.autoRemove || false,
secure: options.tokenManager.secure // used by cookie storage
}
});
}
function mockStorageUtil() {
const _storageProvider = {
getItem: jest.fn(),
setItem: jest.fn()
};
return {
_storageProvider,
findStorageType: (type) => type,
getStorageByType: () => {
return _storageProvider;
}
};
}
describe('TokenManager', function() {
let client;
function setupSync(options = {}, start = false) {
client = createAuth(options);
// clear downstream listeners
client.tokenManager.off('added');
client.tokenManager.off('removed');
if (start) {
client.tokenManager.start();
}
return client;
}
beforeEach(function() {
client = null;
});
afterEach(function() {
if (client) {
client.tokenManager.stop();
client.tokenManager.clear();
}
jest.useRealTimers();
});
describe('service methods', () => {
beforeEach(() => {
setupSync();
});
describe('start', () => {
it('instantiates the token service', () => {
expect(client.tokenManager.service).not.toBeTruthy();
client.tokenManager.start();
expect(client.tokenManager.service).toBeTruthy();
});
it('starts the token service', () => {
jest.spyOn(TokenService.prototype, 'start');
client.tokenManager.start();
expect(TokenService.prototype.start).toHaveBeenCalled();
});
it('stops existing service', () => {
const myService = client.tokenManager.service = {
stop: jest.fn()
};
client.tokenManager.start();
expect(myService.stop).toHaveBeenCalled();
expect(myService).not.toBe(client.tokenManager.service);
});
});
describe('stop', () => {
it('stops the token service, if it exists', () => {
const myService = client.tokenManager.service = {
stop: jest.fn()
};
client.tokenManager.stop();
expect(myService.stop).toHaveBeenCalled();
});
it('sets service instance to null', () => {
client.tokenManager.service = {
stop: jest.fn()
};
client.tokenManager.stop();
expect(client.tokenManager.service).toBe(null);
});
it('does not error if there is no service instance', () => {
expect(client.tokenManager.service).toBe(undefined);
client.tokenManager.stop();
});
});
});
describe('Event emitter', function() {
it('uses emitter from the SDK client', function() {
jest.spyOn(Emitter.prototype, 'on');
setupSync();
var handlerFn = jest.fn();
client.tokenManager.on('fake', handlerFn);
var emitter = Emitter.prototype.on.mock.instances[0];
expect(emitter).toBe(client.emitter);
emitter.emit('fake');
expect(handlerFn).toHaveBeenCalled();
});
it('Can add event callbacks using on()', function() {
setupSync();
var handler = jest.fn();
client.tokenManager.on('fake', handler);
var payload = { foo: 'bar' };
client.emitter.emit('fake', payload);
expect(handler).toHaveBeenCalledWith(payload);
});
it('Event callbacks can have an optional context', function() {
setupSync();
var context = jest.fn();
var handler = jest.fn().mockImplementation(function() {
expect(this).toBe(context);
});
client.tokenManager.on('fake', handler, context);
var payload = { foo: 'bar' };
client.emitter.emit('fake', payload);
expect(handler).toHaveBeenCalledWith(payload);
});
it('Can remove event callbacks using off()', function() {
setupSync();
var handler = jest.fn();
client.tokenManager.on('fake', handler);
client.tokenManager.off('fake', handler);
var payload = { foo: 'bar' };
client.emitter.emit('fake', payload);
expect(handler).not.toHaveBeenCalled();
});
});
describe('storageKey', function() {
it('Uses "okta-token-storage" by default', function() {
const storageUtil = mockStorageUtil();
setupSync({
storageUtil
});
client.tokenManager.add('foo', tokens.standardIdTokenParsed);
expect(storageUtil._storageProvider.setItem).toHaveBeenCalledWith('okta-token-storage', JSON.stringify({ foo: tokens.standardIdTokenParsed }));
});
it('Can use a custom value', function() {
const storageUtil = mockStorageUtil();
setupSync({
storageUtil,
tokenManager: {
storageKey: 'custom1'
}
});
client.tokenManager.add('foo', tokens.standardIdTokenParsed);
expect(storageUtil._storageProvider.setItem).toHaveBeenCalledWith('custom1', JSON.stringify({ foo: tokens.standardIdTokenParsed }));
});
});
describe('storage', function() {
it('throws if storage option is unrecognized', function() {
var fn = createAuth.bind(null, {
tokenManager: {
storage: 'unheardof'
}
});
expect(fn).toThrowError('Unrecognized storage option');
});
it('has an in memory option', function() {
// warp to time to ensure tokens aren't expired
util.warpToUnixTime(tokens.standardIdTokenClaims.exp - 1);
setupSync({
tokenManager: {
storage: 'memory'
}
});
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
return client.tokenManager.get('test-idToken')
.then(function (value) {
expect(value).toEqual(tokens.standardIdTokenParsed);
});
});
it('accepts a custom provider', function() {
var store = {};
var provider = {
getItem: jest.fn().mockImplementation(function(key) {
return store[key];
}),
setItem: jest.fn().mockImplementation(function(key, val) {
store[key] = val;
})
};
setupSync({
tokenManager: {
storage: provider
}
});
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
oauthUtil.expectTokenStorageToEqual(provider, {
'test-idToken': tokens.standardIdTokenParsed
});
expect(provider.setItem).toHaveBeenCalled();
expect(provider.getItem).toHaveBeenCalled();
});
});
describe('add', function() {
it('throws an error when attempting to add a non-token', function() {
setupSync();
try {
client.tokenManager.add('test-idToken', [
tokens.standardIdTokenParsed,
tokens.standardIdTokenParsed
]);
// Should never hit this
expect(true).toEqual(false);
} catch (e) {
util.expectErrorToEqual(e, {
name: 'AuthSdkError',
message: 'Token must be an Object with scopes, expiresAt, and one of: an idToken, accessToken, or refreshToken property',
errorCode: 'INTERNAL',
errorSummary: 'Token must be an Object with scopes, expiresAt, and one of: an idToken, accessToken, or refreshToken property',
errorLink: 'INTERNAL',
errorId: 'INTERNAL',
errorCauses: []
});
}
});
});
describe('renew', function() {
beforeEach(() => {
jest.spyOn(features, 'isLocalhost').mockReturnValue(true);
setupSync();
});
it('on success, emits "renewed" event with the new token', function() {
expect.assertions(3);
const idTokenKey = 'test-idToken';
const origIdToken = tokens.standardIdTokenParsed;
const renewedIdToken = Object.assign({}, origIdToken);
client.tokenManager.add(idTokenKey, origIdToken);
const accessTokenKey = 'test-accessToken';
const origAccessToken = tokens.standardAccessTokenParsed;
const renewedAccessToken = Object.assign({}, origAccessToken);
client.tokenManager.add(accessTokenKey, origAccessToken);
jest.spyOn(client.token, 'renewTokens').mockImplementation(function() {
return Promise.resolve({ idToken: renewedIdToken, accessToken: renewedAccessToken });
});
const addedCallback = jest.fn();
const renewedCallback = jest.fn();
const removedCallback = jest.fn();
client.tokenManager.on('added', addedCallback);
client.tokenManager.on('renewed', renewedCallback);
client.tokenManager.on('removed', removedCallback);
return client.tokenManager.renew('test-idToken')
.then(() => {
expect(renewedCallback).toHaveBeenNthCalledWith(1, idTokenKey, renewedIdToken, origIdToken);
expect(addedCallback).toHaveBeenNthCalledWith(1, idTokenKey, renewedIdToken);
expect(removedCallback).toHaveBeenNthCalledWith(1, idTokenKey, origIdToken);
});
});
it('multiple overlapping calls will produce a single request and promise', function() {
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
jest.spyOn(client.token, 'renewTokens').mockImplementation(function() {
return Promise.resolve({ idToken: tokens.standardIdTokenParsed, accessToken: tokens.standardAccessTokenParsed });
});
var p1 = client.tokenManager.renew('test-idToken');
var p2 = client.tokenManager.renew('test-idToken');
expect(p1).toBe(p2);
return Promise.all([p1, p2]);
});
it('multiple overlapping calls will produce a single request and promise (failure case)', function() {
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
jest.spyOn(client.token, 'renewTokens').mockImplementation(function() {
return Promise.reject(new Error('expected'));
});
var p1 = client.tokenManager.renew('test-idToken');
var p2 = client.tokenManager.renew('test-idToken');
expect(p1).toBe(p2);
return Promise.allSettled([p1, p2]).then(function(results) {
expect(results).toHaveLength(2);
results.forEach(function(result) {
expect(result.status).toBe('rejected');
util.expectErrorToEqual(result.reason, {
name: 'Error',
message: 'expected',
});
});
});
});
it('sequential calls will produce a unique request and promise', function() {
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
jest.spyOn(client.token, 'renewTokens').mockImplementation(function() {
return Promise.resolve({ idToken: tokens.standardIdTokenParsed, accessToken: tokens.standardAccessTokenParsed });
});
var p1 = client.tokenManager.renew('test-idToken').then(function() {
var p2 = client.tokenManager.renew('test-idToken');
expect(p1).not.toBe(p2);
return p2;
});
return p1;
});
it('sequential calls will produce a unique request and promise (failure case)', function() {
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
jest.spyOn(client.token, 'renewTokens').mockImplementation(function() {
return Promise.reject(new Error('expected'));
});
var p1 = client.tokenManager.renew('test-idToken').then(function() {
expect(false).toBe(true);
}).catch(function(err) {
util.expectErrorToEqual(err, {
name: 'Error',
message: 'expected',
});
var p2 = client.tokenManager.renew('test-idToken');
expect(p1).not.toBe(p2);
return p2;
}).then(function() {
expect(false).toBe(true);
}).catch(function(err) {
util.expectErrorToEqual(err, {
name: 'Error',
message: 'expected',
});
});
return p1;
});
});
describe('autoRenew', function() {
beforeEach(function() {
jest.useFakeTimers();
jest.spyOn(features, 'isLocalhost').mockReturnValue(true);
});
afterEach(async () => {
jest.useRealTimers();
});
it('should register listener for "expired" event', function() {
jest.spyOn(Emitter.prototype, 'on');
setupSync({}, true);
client.tokenManager.start();
expect(Emitter.prototype.on).toHaveBeenCalledWith('expired', expect.any(Function));
});
it('emits "expired" on existing tokens even when autoRenew is disabled', function() {
jest.useFakeTimers();
setupSync({ tokenManager: { autoRenew: false } }, true);
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
util.warpToUnixTime(tokens.standardIdTokenClaims.iat);
var callback = jest.fn();
client.tokenManager.on('expired', callback);
util.warpByTicksToUnixTime(tokens.standardIdTokenParsed.expiresAt + 1);
expect(callback).toHaveBeenCalledWith('test-idToken', tokens.standardIdTokenParsed);
});
it('emits "expired" on new tokens even when autoRenew is disabled', function() {
jest.useFakeTimers();
setupSync({ tokenManager: { autoRenew: false } }, true);
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
util.warpToUnixTime(tokens.standardIdTokenClaims.iat);
var callback = jest.fn();
client.tokenManager.on('expired', callback);
util.warpByTicksToUnixTime(tokens.standardIdTokenParsed.expiresAt + 1);
expect(callback).toHaveBeenCalledWith('test-idToken', tokens.standardIdTokenParsed);
});
it('accounts for local clock offset when emitting "expired"', function() {
util.warpToUnixTime(tokens.standardIdTokenParsed.expiresAt);
var localClockOffset = -2000; // local client is 2 seconds fast
setupSync({
localClockOffset: localClockOffset
}, true);
var callback = jest.fn();
client.tokenManager.on('expired', callback);
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
jest.advanceTimersByTime(0);
expect(callback).not.toHaveBeenCalled();
jest.advanceTimersByTime(-localClockOffset);
expect(callback).toHaveBeenCalledWith('test-idToken', tokens.standardIdTokenParsed);
});
it('accounts for "expireEarlySeconds" option when emitting "expired"', function() {
var expireEarlySeconds = 10;
util.warpToUnixTime(tokens.standardIdTokenParsed.expiresAt - (expireEarlySeconds + 1));
setupSync({
tokenManager: {
expireEarlySeconds: expireEarlySeconds
}
}, true);
var callback = jest.fn();
client.tokenManager.on('expired', callback);
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
jest.advanceTimersByTime(0);
expect(callback).not.toHaveBeenCalled();
jest.advanceTimersByTime(1000);
expect(callback).toHaveBeenCalledWith('test-idToken', tokens.standardIdTokenParsed);
});
describe('too many renew requests', () => {
it('should emit too many renew error when latest 10 expired event happen in 30 seconds', () => {
setupSync({
tokenManager: { autoRenew: true }
}, true);
client.tokenManager.renew = jest.fn().mockImplementation(() => Promise.resolve());
const handler = jest.fn().mockImplementation(err => {
util.expectErrorToEqual(err, {
name: 'AuthSdkError',
message: 'Too many token renew requests',
errorCode: 'INTERNAL',
errorSummary: 'Too many token renew requests',
errorLink: 'INTERNAL',
errorId: 'INTERNAL',
errorCauses: []
});
});
client.tokenManager.on('error', handler);
let startTime = Math.round(Date.now() / 1000);
// 2 * 10 < 30 => emit error
for (let i = 0; i < 10; i++) {
util.warpToUnixTime(startTime);
client.emitter.emit('expired');
startTime = startTime + 2;
}
expect(handler).toHaveBeenCalledTimes(1);
expect(client.tokenManager.renew).toHaveBeenCalledTimes(9);
});
it('should keep emitting errors if expired events keep emitting in 30s', () => {
setupSync({
tokenManager: { autoRenew: true }
}, true);
client.tokenManager.renew = jest.fn().mockImplementation(() => Promise.resolve());
const handler = jest.fn();
client.tokenManager.on('error', handler);
let startTime = Math.round(Date.now() / 1000);
// 2 * 10 < 30 => emit error
for (let i = 0; i < 20; i++) {
util.warpToUnixTime(startTime);
client.emitter.emit('expired');
startTime = startTime + 2;
}
expect(handler).toHaveBeenCalledTimes(11);
expect(client.tokenManager.renew).toHaveBeenCalledTimes(9);
});
it('should not emit error if time diff for the latest 10 requests are more than 30s', () => {
setupSync({
tokenManager: { autoRenew: true }
}, true);
const handler = jest.fn();
client.tokenManager.on('error', handler);
client.tokenManager.renew = jest.fn().mockImplementation(() => Promise.resolve());
let startTime = Math.round(Date.now() / 1000);
// 5 * 10 > 30 => not emit error
for (let i = 0; i < 20; i++) {
util.warpToUnixTime(startTime);
client.emitter.emit('expired');
startTime = startTime + 5;
}
expect(handler).not.toHaveBeenCalled();
expect(client.tokenManager.renew).toHaveBeenCalledTimes(20);
});
it('should resume autoRenew if requests become normal again', () => {
setupSync({
tokenManager: { autoRenew: true }
}, true);
const handler = jest.fn();
client.tokenManager.on('error', handler);
client.tokenManager.renew = jest.fn().mockImplementation(() => Promise.resolve());
// trigger too many requests error
// 10 * 2 < 30 => should emit error
let startTime = Math.round(Date.now() / 1000);
for (let i = 0; i < 20; i++) {
util.warpToUnixTime(startTime);
client.emitter.emit('expired');
startTime = startTime + 2;
}
// resume to normal requests
// wait 50s, then 10 * 5 > 30 => not emit error
startTime = startTime + 50;
util.warpToUnixTime(startTime);
for (let i = 0; i < 10; i++) {
util.warpToUnixTime(startTime);
client.emitter.emit('expired');
startTime = startTime + 5;
}
expect(handler).toHaveBeenCalledTimes(11);
expect(client.tokenManager.renew).toHaveBeenCalledTimes(19);
});
});
});
describe('autoRemove', () => {
beforeEach(() => {
jest.useFakeTimers();
});
afterEach(() => {
jest.useRealTimers();
});
it('should call tokenManager.remove() when autoRenew === false && autoRemove === true', () => {
setupSync({ tokenManager: { autoRenew: false, autoRemove: true } }, true);
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
client.tokenManager.remove = jest.fn();
util.warpToUnixTime(tokens.standardIdTokenClaims.iat);
util.warpByTicksToUnixTime(tokens.standardIdTokenParsed.expiresAt + 1);
expect(client.tokenManager.remove).toHaveBeenCalledWith('test-idToken');
});
it('should not call tokenManager.remove() when autoRenew === false && autoRemove === false', () => {
setupSync({ tokenManager: { autoRenew: false, autoRemove: false } }, true);
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
client.tokenManager.remove = jest.fn();
util.warpToUnixTime(tokens.standardIdTokenClaims.iat);
util.warpByTicksToUnixTime(tokens.standardIdTokenParsed.expiresAt + 1);
expect(client.tokenManager.remove).not.toHaveBeenCalled();
});
});
// describe('get', function() {
// it('should throw AuthSdkError if autoRenew is turned on and app is in oauth callback state', async () => {
// delete global.window.location;
// global.window.location = {
// protocol: 'https:',
// hostname: 'somesite.local',
// search: '?code=fakecode'
// };
// client = new OktaAuth({
// pkce: true,
// issuer: 'https://auth-js-test.okta.com',
// clientId: 'foo'
// });
// try {
// await client.tokenManager.get();
// } catch (err) {
// expect(err).toBeInstanceOf(AuthSdkError);
// expect(err.message).toBe('The app should not attempt to call authorize API on callback. Authorize flow is already in process. Use parseFromUrl() to receive tokens.');
// }
// });
// });
describe('hasExpired', function() {
beforeEach(() => {
jest.spyOn(features, 'isLocalhost').mockReturnValue(true);
});
it('returns false for a token that has not expired', function() {
util.warpToUnixTime(tokens.standardIdTokenClaims.iat);
setupSync();
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
return client.tokenManager.get('test-idToken')
.then(function(token) {
expect(token).toBeTruthy();
expect(client.tokenManager.hasExpired(token)).toBe(false);
});
});
it('returns false when a token is not expired, accounting for local clock offset', function() {
util.warpToUnixTime(tokens.standardIdTokenClaims.iat);
setupSync({
localClockOffset: -2000 // local clock is 2 seconds ahead of server
});
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
// Set local time to server expiration. local clock offset should keep the token valid
util.warpToUnixTime(tokens.standardIdTokenParsed.expiresAt + 1);
return client.tokenManager.get('test-idToken')
.then(function(token) {
expect(token).toBeTruthy();
expect(client.tokenManager.hasExpired(token)).toBe(false);
});
});
it('returns true for a token that has expired', function() {
util.warpToUnixTime(tokens.standardIdTokenClaims.iat);
setupSync();
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
util.warpToUnixTime(tokens.standardIdTokenParsed.expiresAt + 1);
return client.tokenManager.get('test-idToken')
.then(function(token) {
expect(token).toBeTruthy();
expect(client.tokenManager.hasExpired(token)).toBe(true);
});
});
it('returns true when a token is expired, accounting for local clock offset', function() {
util.warpToUnixTime(tokens.standardIdTokenClaims.iat);
setupSync({
localClockOffset: 5000 // local clock is 5 seconds behind server
});
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
// Set local time to server expiration minus 5 seconds
util.warpToUnixTime(tokens.standardIdTokenParsed.expiresAt - 5);
return client.tokenManager.get('test-idToken')
.then(function(token) {
expect(token).toBeTruthy();
expect(client.tokenManager.hasExpired(token)).toBe(true);
});
});
});
describe('getTokens', () => {
it('should get key agnostic tokens set from storage', () => {
expect.assertions(2);
setupSync();
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
client.tokenManager.add('test-accessToken', tokens.standardAccessTokenParsed);
return client.tokenManager.getTokens()
.then(({ accessToken, idToken }) => {
expect(accessToken).toEqual(tokens.standardAccessTokenParsed);
expect(idToken).toEqual(tokens.standardIdTokenParsed);
});
});
it('should get only idToken from storage', () => {
expect.assertions(2);
setupSync();
client.tokenManager.add('test-idToken', tokens.standardIdTokenParsed);
return client.tokenManager.getTokens()
.then(({ accessToken, idToken }) => {
expect(accessToken).toBeUndefined();
expect(idToken).toEqual(tokens.standardIdTokenParsed);
});
});
it('should get only accessToken from storage', () => {
expect.assertions(2);
setupSync();
client.tokenManager.add('test-accessToken', tokens.standardAccessTokenParsed);
return client.tokenManager.getTokens()
.then(({ accessToken, idToken }) => {
expect(idToken).toBeUndefined();
expect(accessToken).toEqual(tokens.standardAccessTokenParsed);
});
});
it('should get empty object if no token in storage', () => {
expect.assertions(1);
setupSync();
return client.tokenManager.getTokens()
.then((tokens) => {
expect(tokens).toEqual({});
});
});
});
describe('setTokens', () => {
let setItemMock;
let storageProvider;
beforeEach(() => {
setItemMock = jest.fn();
storageProvider = {
getItem: jest.fn().mockReturnValue(JSON.stringify({})),
setItem: setItemMock
};
});
it('should add set tokens with provided token object (two tokens in object)', () => {
setupSync({
tokenManager: {
storage: storageProvider
}
});
const handler = jest.fn();
client.tokenManager.on('added', handler);
const tokensObj = {
idToken: tokens.standardIdTokenParsed,
accessToken: tokens.standardAccessTokenParsed,
};
client.tokenManager.setTokens(tokensObj);
expect(setItemMock).toHaveBeenCalledWith('okta-token-storage', JSON.stringify(tokensObj));
expect(setItemMock).toHaveBeenCalledTimes(1);
expect(handler).toHaveBeenCalledTimes(2);
});
it('should add set tokens with provided token object (one tokens in object)', () => {
setupSync({
tokenManager: {
storage: storageProvider
}
});
const handler = jest.fn();
client.tokenManager.on('added', handler);
const tokensObj = {
idToken: tokens.standardIdTokenParsed
};
client.tokenManager.setTokens(tokensObj);
expect(setItemMock).toHaveBeenCalledWith('okta-token-storage', JSON.stringify(tokensObj));
expect(setItemMock).toHaveBeenCalledTimes(1);
expect(handler).toHaveBeenCalledTimes(1);
});
it('should remove tokens if no token in tokenObject but tokens exist in storage', () => {
storageProvider = {
getItem: jest.fn().mockReturnValue(JSON.stringify({
idToken: tokens.standardIdTokenParsed,
accessToken: tokens.standardAccessTokenParsed,
})),
setItem: setItemMock
};
setupSync({
tokenManager: {
storage: storageProvider
}
});
const addedHandler = jest.fn();
client.tokenManager.on('added', addedHandler);
const renewHandler = jest.fn();
client.tokenManager.on('renewed', renewHandler);
const removedHandler = jest.fn();
client.tokenManager.on('removed', removedHandler);
const tokensObj = {};
client.tokenManager.setTokens(tokensObj);
expect(setItemMock).toHaveBeenCalledTimes(1);
expect(setItemMock).toHaveBeenCalledWith('okta-token-storage', JSON.stringify(tokensObj));
expect(addedHandler).not.toHaveBeenCalled();
expect(renewHandler).not.toHaveBeenCalled();
expect(removedHandler).toHaveBeenCalledTimes(2);
});
it('should add and remove tokens based on existing tokens from storage', () => {
// add token if token is provided in setTokens object
// remove token if there is existing token in storage, but not in setTokens object
storageProvider = {
getItem: jest.fn().mockReturnValue(JSON.stringify({
idToken: tokens.standardIdTokenParsed,
accessToken: tokens.standardAccessTokenParsed,
})),
setItem: setItemMock
};
setupSync({
tokenManager: {
storage: storageProvider
}
});
const addedHandler = jest.fn();
client.tokenManager.on('added', addedHandler);
const removedHandler = jest.fn();
client.tokenManager.on('removed', removedHandler);
const renewHandler = jest.fn();
client.tokenManager.on('renewed', renewHandler);
const tokensObj = {
idToken: tokens.standardIdToken2Parsed,
};
client.tokenManager.setTokens(tokensObj);
expect(setItemMock).toHaveBeenCalledTimes(1);
expect(setItemMock).toHaveBeenCalledWith('okta-token-storage', JSON.stringify(tokensObj));
expect(addedHandler).toHaveBeenCalledWith('idToken', tokens.standardIdToken2Parsed);
expect(renewHandler).toHaveBeenCalledWith('idToken', tokens.standardIdToken2Parsed, tokens.standardIdTokenParsed);
expect(removedHandler).toHaveBeenCalledWith('accessToken', tokens.standardAccessTokenParsed);
});
});
}); | the_stack |
/////** i1 is interface with properties*/
////interface i1 {
//// /** i1_p1*/
//// i1_p1: number;
//// /** i1_f1*/
//// i1_f1(): void;
//// /** i1_l1*/
//// i1_l1: () => void;
//// i1_nc_p1: number;
//// i1_nc_f1(): void;
//// i1_nc_l1: () => void;
//// p1: number;
//// f1(): void;
//// l1: () => void;
//// nc_p1: number;
//// nc_f1(): void;
//// nc_l1: () => void;
////}
////class c1 implements i1 {
//// public i1_p1: number;
//// public i1_f1() {
//// }
//// public i1_l1: () => void;
//// public i1_nc_p1: number;
//// public i1_nc_f1() {
//// }
//// public i1_nc_l1: () => void;
//// /** c1_p1*/
//// public p1: number;
//// /** c1_f1*/
//// public f1() {
//// }
//// /** c1_l1*/
//// public l1: () => void;
//// /** c1_nc_p1*/
//// public nc_p1: number;
//// /** c1_nc_f1*/
//// public nc_f1() {
//// }
//// /** c1_nc_l1*/
//// public nc_l1: () => void;
////}
////var i1/*1iq*/_i: /*16i*/i1;
////i1_i./*1*/i/*2q*/1_f1(/*2*/);
////i1_i.i1_n/*3q*/c_f1(/*3*/);
////i1_i.f/*4q*/1(/*4*/);
////i1_i.nc/*5q*/_f1(/*5*/);
////i1_i.i1/*l2q*/_l1(/*l2*/);
////i1_i.i1_/*l3q*/nc_l1(/*l3*/);
////i1_i.l/*l4q*/1(/*l4*/);
////i1_i.nc/*l5q*/_l1(/*l5*/);
////var c1/*6iq*/_i = new c1();
////c1_i./*6*/i1/*7q*/_f1(/*7*/);
////c1_i.i1_nc/*8q*/_f1(/*8*/);
////c1_i.f/*9q*/1(/*9*/);
////c1_i.nc/*10q*/_f1(/*10*/);
////c1_i.i1/*l7q*/_l1(/*l7*/);
////c1_i.i1_n/*l8q*/c_l1(/*l8*/);
////c1_i.l/*l9q*/1(/*l9*/);
////c1_i.nc/*l10q*/_l1(/*l10*/);
////// assign to interface
////i1_i = c1_i;
////i1_i./*11*/i1/*12q*/_f1(/*12*/);
////i1_i.i1_nc/*13q*/_f1(/*13*/);
////i1_i.f/*14q*/1(/*14*/);
////i1_i.nc/*15q*/_f1(/*15*/);
////i1_i.i1/*l12q*/_l1(/*l12*/);
////i1_i.i1/*l13q*/_nc_l1(/*l13*/);
////i1_i.l/*l14q*/1(/*l14*/);
////i1_i.nc/*l15q*/_l1(/*l15*/);
/////*16*/
////class c2 {
//// /** c2 c2_p1*/
//// public c2_p1: number;
//// /** c2 c2_f1*/
//// public c2_f1() {
//// }
//// /** c2 c2_prop*/
//// public get c2_prop() {
//// return 10;
//// }
//// public c2_nc_p1: number;
//// public c2_nc_f1() {
//// }
//// public get c2_nc_prop() {
//// return 10;
//// }
//// /** c2 p1*/
//// public p1: number;
//// /** c2 f1*/
//// public f1() {
//// }
//// /** c2 prop*/
//// public get prop() {
//// return 10;
//// }
//// public nc_p1: number;
//// public nc_f1() {
//// }
//// public get nc_prop() {
//// return 10;
//// }
//// /** c2 constructor*/
//// constr/*55*/uctor(a: number) {
//// this.c2_p1 = a;
//// }
////}
////class c3 extends c2 {
//// cons/*56*/tructor() {
//// su/*18sq*/per(10);
//// this.p1 = s/*18spropq*/uper./*18spropProp*/c2_p1;
//// }
//// /** c3 p1*/
//// public p1: number;
//// /** c3 f1*/
//// public f1() {
//// }
//// /** c3 prop*/
//// public get prop() {
//// return 10;
//// }
//// public nc_p1: number;
//// public nc_f1() {
//// }
//// public get nc_prop() {
//// return 10;
//// }
////}
////var c/*17iq*/2_i = new c/*17q*/2(/*17*/10);
////var c/*18iq*/3_i = new c/*18q*/3(/*18*/);
////c2_i./*19*/c2/*20q*/_f1(/*20*/);
////c2_i.c2_nc/*21q*/_f1(/*21*/);
////c2_i.f/*22q*/1(/*22*/);
////c2_i.nc/*23q*/_f1(/*23*/);
////c3_i./*24*/c2/*25q*/_f1(/*25*/);
////c3_i.c2_nc/*26q*/_f1(/*26*/);
////c3_i.f/*27q*/1(/*27*/);
////c3_i.nc/*28q*/_f1(/*28*/);
////// assign
////c2_i = c3_i;
////c2_i./*29*/c2/*30q*/_f1(/*30*/);
////c2_i.c2_nc_/*31q*/f1(/*31*/);
////c2_i.f/*32q*/1(/*32*/);
////c2_i.nc/*33q*/_f1(/*33*/);
////class c4 extends c2 {
////}
////var c4/*34iq*/_i = new c/*34q*/4(/*34*/10);
/////*35*/
////interface i2 {
//// /** i2_p1*/
//// i2_p1: number;
//// /** i2_f1*/
//// i2_f1(): void;
//// /** i2_l1*/
//// i2_l1: () => void;
//// i2_nc_p1: number;
//// i2_nc_f1(): void;
//// i2_nc_l1: () => void;
//// /** i2 p1*/
//// p1: number;
//// /** i2 f1*/
//// f1(): void;
//// /** i2 l1*/
//// l1: () => void;
//// nc_p1: number;
//// nc_f1(): void;
//// nc_l1: () => void;
////}
////interface i3 extends i2 {
//// /** i3 p1*/
//// p1: number;
//// /** i3 f1*/
//// f1(): void;
//// /** i3 l1*/
//// l1: () => void;
//// nc_p1: number;
//// nc_f1(): void;
//// nc_l1: () => void;
////}
////var i2/*36iq*/_i: /*51i*/i2;
////var i3/*37iq*/_i: i3;
////i2_i./*36*/i2/*37q*/_f1(/*37*/);
////i2_i.i2_n/*38q*/c_f1(/*38*/);
////i2_i.f/*39q*/1(/*39*/);
////i2_i.nc/*40q*/_f1(/*40*/);
////i2_i.i2_/*l37q*/l1(/*l37*/);
////i2_i.i2_nc/*l38q*/_l1(/*l38*/);
////i2_i.l/*l39q*/1(/*l39*/);
////i2_i.nc_/*l40q*/l1(/*l40*/);
////i3_i./*41*/i2_/*42q*/f1(/*42*/);
////i3_i.i2_nc/*43q*/_f1(/*43*/);
////i3_i.f/*44q*/1(/*44*/);
////i3_i.nc_/*45q*/f1(/*45*/);
////i3_i.i2_/*l42q*/l1(/*l42*/);
////i3_i.i2_nc/*l43q*/_l1(/*l43*/);
////i3_i.l/*l44q*/1(/*l44*/);
////i3_i.nc_/*l45q*/l1(/*l45*/);
////// assign to interface
////i2_i = i3_i;
////i2_i./*46*/i2/*47q*/_f1(/*47*/);
////i2_i.i2_nc_/*48q*/f1(/*48*/);
////i2_i.f/*49q*/1(/*49*/);
////i2_i.nc/*50q*/_f1(/*50*/);
////i2_i.i2_/*l47q*/l1(/*l47*/);
////i2_i.i2_nc/*l48q*/_l1(/*l48*/);
////i2_i.l/*l49q*/1(/*l49*/);
////i2_i.nc_/*l50q*/l1(/*l50*/);
/////*51*/
/////**c5 class*/
////class c5 {
//// public b: number;
////}
////class c6 extends c5 {
//// public d;
//// const/*57*/ructor() {
//// /*52*/super();
//// this.d = /*53*/super./*54*/b;
//// }
////}
goTo.marker('1');
verify.completionListContains("i1_p1", "(property) i1.i1_p1: number", "i1_p1");
verify.completionListContains("i1_f1", "(method) i1.i1_f1(): void", "i1_f1");
verify.completionListContains("i1_l1", "(property) i1.i1_l1: () => void", "i1_l1");
verify.completionListContains("i1_nc_p1", "(property) i1.i1_nc_p1: number", "");
verify.completionListContains("i1_nc_f1", "(method) i1.i1_nc_f1(): void", "");
verify.completionListContains("i1_nc_l1", "(property) i1.i1_nc_l1: () => void", "");
verify.completionListContains("p1", "(property) i1.p1: number", "");
verify.completionListContains("f1", "(method) i1.f1(): void", "");
verify.completionListContains("l1", "(property) i1.l1: () => void", "");
verify.completionListContains("nc_p1", "(property) i1.nc_p1: number", "");
verify.completionListContains("nc_f1", "(method) i1.nc_f1(): void", "");
verify.completionListContains("nc_l1", "(property) i1.nc_l1: () => void", "");
verify.signatureHelp(
{ marker: "2", docComment: "i1_f1" },
{ marker: ["3", "4", "5", "l2", "l3", "l4", "l5"], docComment: "" },
);
verify.quickInfos({
"1iq": "var i1_i: i1",
"2q": ["(method) i1.i1_f1(): void", "i1_f1"],
"3q": "(method) i1.i1_nc_f1(): void",
"4q": "(method) i1.f1(): void",
"5q": "(method) i1.nc_f1(): void",
l2q: ["(property) i1.i1_l1: () => void", "i1_l1"],
l3q: "(property) i1.i1_nc_l1: () => void",
l4q: "(property) i1.l1: () => void",
l5q: "(property) i1.nc_l1: () => void"
});
goTo.marker('6');
verify.completionListContains("i1_p1", "(property) c1.i1_p1: number", "i1_p1");
verify.completionListContains("i1_f1", "(method) c1.i1_f1(): void", "i1_f1");
verify.completionListContains("i1_l1", "(property) c1.i1_l1: () => void", "i1_l1");
verify.completionListContains("i1_nc_p1", "(property) c1.i1_nc_p1: number", "");
verify.completionListContains("i1_nc_f1", "(method) c1.i1_nc_f1(): void", "");
verify.completionListContains("i1_nc_l1", "(property) c1.i1_nc_l1: () => void", "");
verify.completionListContains("p1", "(property) c1.p1: number", "c1_p1");
verify.completionListContains("f1", "(method) c1.f1(): void", "c1_f1");
verify.completionListContains("l1", "(property) c1.l1: () => void", "c1_l1");
verify.completionListContains("nc_p1", "(property) c1.nc_p1: number", "c1_nc_p1");
verify.completionListContains("nc_f1", "(method) c1.nc_f1(): void", "c1_nc_f1");
verify.completionListContains("nc_l1", "(property) c1.nc_l1: () => void", "c1_nc_l1");
verify.signatureHelp(
{ marker: "7", docComment: "i1_f1" },
{ marker: "9", docComment: "c1_f1" },
{ marker: "10", docComment: "c1_nc_f1" },
{ marker: "l9", docComment: "c1_l1" },
{ marker: "l10", docComment: "c1_nc_l1" },
{ marker: ["8", "l7", "l8"], docComment: "" },
);
verify.quickInfos({
"6iq": "var c1_i: c1",
"7q": ["(method) c1.i1_f1(): void", "i1_f1"],
"8q": "(method) c1.i1_nc_f1(): void",
"9q": ["(method) c1.f1(): void", "c1_f1"],
"10q": ["(method) c1.nc_f1(): void", "c1_nc_f1"],
l7q: ["(property) c1.i1_l1: () => void", "i1_l1"],
l8q: "(property) c1.i1_nc_l1: () => void",
l9q: ["(property) c1.l1: () => void", "c1_l1"],
l10q: ["(property) c1.nc_l1: () => void", "c1_nc_l1"],
});
goTo.marker('11');
verify.completionListContains("i1_p1", "(property) i1.i1_p1: number", "i1_p1");
verify.completionListContains("i1_f1", "(method) i1.i1_f1(): void", "i1_f1");
verify.completionListContains("i1_l1", "(property) i1.i1_l1: () => void", "i1_l1");
verify.completionListContains("i1_nc_p1", "(property) i1.i1_nc_p1: number", "");
verify.completionListContains("i1_nc_f1", "(method) i1.i1_nc_f1(): void", "");
verify.completionListContains("i1_nc_l1", "(property) i1.i1_nc_l1: () => void", "");
verify.completionListContains("p1", "(property) i1.p1: number", "");
verify.completionListContains("f1", "(method) i1.f1(): void", "");
verify.completionListContains("l1", "(property) i1.l1: () => void", "");
verify.completionListContains("nc_p1", "(property) i1.nc_p1: number", "");
verify.completionListContains("nc_f1", "(method) i1.nc_f1(): void", "");
verify.completionListContains("nc_l1", "(property) i1.nc_l1: () => void", "");
verify.signatureHelp(
{ marker: "12", docComment: "i1_f1" },
{ marker: ["13", "14", "15", "l12", "l13", "l14", "l15"], docComment: "" },
);
verify.quickInfos({
"12q": ["(method) i1.i1_f1(): void", "i1_f1"],
"13q": "(method) i1.i1_nc_f1(): void",
"14q": "(method) i1.f1(): void",
"15q": "(method) i1.nc_f1(): void",
l12q: ["(property) i1.i1_l1: () => void", "i1_l1"],
l13q: "(property) i1.i1_nc_l1: () => void",
l14q: "(property) i1.l1: () => void",
l15q: "(property) i1.nc_l1: () => void",
});
goTo.marker('16');
verify.not.completionListContains("i1", "interface i1", "i1 is interface with properties");
verify.completionListContains("i1_i", "var i1_i: i1", "");
verify.completionListContains("c1", "class c1", "");
verify.completionListContains("c1_i", "var c1_i: c1", "");
goTo.marker('16i');
verify.completionListContains("i1", "interface i1", "i1 is interface with properties");
verify.quickInfos({
"17iq": "var c2_i: c2",
"18iq": "var c3_i: c3"
});
verify.signatureHelp(
{ marker: "17", docComment: "c2 constructor" },
{ marker: "18", docComment: "" },
);
verify.quickInfos({
"18sq": ["constructor c2(a: number): c2", "c2 constructor"],
"18spropq": "class c2",
"18spropProp": ["(property) c2.c2_p1: number", "c2 c2_p1"],
"17q": ["constructor c2(a: number): c2", "c2 constructor"],
"18q": "constructor c3(): c3"
});
goTo.marker('19');
verify.completionListContains("c2_p1", "(property) c2.c2_p1: number", "c2 c2_p1");
verify.completionListContains("c2_f1", "(method) c2.c2_f1(): void", "c2 c2_f1");
verify.completionListContains("c2_prop", "(property) c2.c2_prop: number", "c2 c2_prop");
verify.completionListContains("c2_nc_p1", "(property) c2.c2_nc_p1: number", "");
verify.completionListContains("c2_nc_f1", "(method) c2.c2_nc_f1(): void", "");
verify.completionListContains("c2_nc_prop", "(property) c2.c2_nc_prop: number", "");
verify.completionListContains("p1", "(property) c2.p1: number", "c2 p1");
verify.completionListContains("f1", "(method) c2.f1(): void", "c2 f1");
verify.completionListContains("prop", "(property) c2.prop: number", "c2 prop");
verify.completionListContains("nc_p1", "(property) c2.nc_p1: number", "");
verify.completionListContains("nc_f1", "(method) c2.nc_f1(): void", "");
verify.completionListContains("nc_prop", "(property) c2.nc_prop: number", "");
verify.signatureHelp(
{ marker: "20", docComment: "c2 c2_f1" },
{ marker: "22", docComment: "c2 f1" },
{ marker: ["21", "23"], docComment: "" },
);
verify.quickInfos({
"20q": ["(method) c2.c2_f1(): void", "c2 c2_f1"],
"21q": "(method) c2.c2_nc_f1(): void",
"22q": ["(method) c2.f1(): void", "c2 f1"],
"23q": "(method) c2.nc_f1(): void"
});
goTo.marker('24');
verify.completionListContains("c2_p1", "(property) c2.c2_p1: number", "c2 c2_p1");
verify.completionListContains("c2_f1", "(method) c2.c2_f1(): void", "c2 c2_f1");
verify.completionListContains("c2_prop", "(property) c2.c2_prop: number", "c2 c2_prop");
verify.completionListContains("c2_nc_p1", "(property) c2.c2_nc_p1: number", "");
verify.completionListContains("c2_nc_f1", "(method) c2.c2_nc_f1(): void", "");
verify.completionListContains("c2_nc_prop", "(property) c2.c2_nc_prop: number", "");
verify.completionListContains("p1", "(property) c3.p1: number", "c3 p1");
verify.completionListContains("f1", "(method) c3.f1(): void", "c3 f1");
verify.completionListContains("prop", "(property) c3.prop: number", "c3 prop");
verify.completionListContains("nc_p1", "(property) c3.nc_p1: number", "");
verify.completionListContains("nc_f1", "(method) c3.nc_f1(): void", "");
verify.completionListContains("nc_prop", "(property) c3.nc_prop: number", "");
verify.signatureHelp(
{ marker: "25", docComment: "c2 c2_f1" },
{ marker: "27", docComment: "c3 f1" },
{ marker: ["26", "28"], docComment: "" },
);
verify.quickInfos({
"25q": ["(method) c2.c2_f1(): void", "c2 c2_f1"],
"26q": "(method) c2.c2_nc_f1(): void",
"27q": ["(method) c3.f1(): void", "c3 f1"],
"28q": "(method) c3.nc_f1(): void"
});
goTo.marker('29');
verify.completionListContains("c2_p1", "(property) c2.c2_p1: number", "c2 c2_p1");
verify.completionListContains("c2_f1", "(method) c2.c2_f1(): void", "c2 c2_f1");
verify.completionListContains("c2_prop", "(property) c2.c2_prop: number", "c2 c2_prop");
verify.completionListContains("c2_nc_p1", "(property) c2.c2_nc_p1: number", "");
verify.completionListContains("c2_nc_f1", "(method) c2.c2_nc_f1(): void", "");
verify.completionListContains("c2_nc_prop", "(property) c2.c2_nc_prop: number");
verify.completionListContains("p1", "(property) c2.p1: number", "c2 p1");
verify.completionListContains("f1", "(method) c2.f1(): void", "c2 f1");
verify.completionListContains("prop", "(property) c2.prop: number", "c2 prop");
verify.completionListContains("nc_p1", "(property) c2.nc_p1: number", "");
verify.completionListContains("nc_f1", "(method) c2.nc_f1(): void", "");
verify.completionListContains("nc_prop", "(property) c2.nc_prop: number", "");
verify.signatureHelp(
{ marker: "30", docComment: "c2 c2_f1" },
{ marker: "32", docComment: "c2 f1" },
{ marker: ["31", "33"], docComment: "" },
);
verify.quickInfos({
"30q": ["(method) c2.c2_f1(): void", "c2 c2_f1"],
"31q": "(method) c2.c2_nc_f1(): void",
"32q": ["(method) c2.f1(): void", "c2 f1"],
"33q": "(method) c2.nc_f1(): void"
});
verify.signatureHelp({ marker: "34", docComment: "c2 constructor" });
verify.quickInfos({
"34iq": "var c4_i: c4",
"34q": ["constructor c4(a: number): c4", "c2 constructor"]
});
goTo.marker('35');
verify.completionListContains("c2", "class c2", "");
verify.completionListContains("c2_i", "var c2_i: c2", "");
verify.completionListContains("c3", "class c3", "");
verify.completionListContains("c3_i", "var c3_i: c3", "");
verify.completionListContains("c4", "class c4", "");
verify.completionListContains("c4_i", "var c4_i: c4", "");
goTo.marker('36');
verify.completionListContains("i2_p1", "(property) i2.i2_p1: number", "i2_p1");
verify.completionListContains("i2_f1", "(method) i2.i2_f1(): void", "i2_f1");
verify.completionListContains("i2_l1", "(property) i2.i2_l1: () => void", "i2_l1");
verify.completionListContains("i2_nc_p1", "(property) i2.i2_nc_p1: number", "");
verify.completionListContains("i2_nc_f1", "(method) i2.i2_nc_f1(): void", "");
verify.completionListContains("i2_nc_l1", "(property) i2.i2_nc_l1: () => void", "");
verify.completionListContains("p1", "(property) i2.p1: number", "i2 p1");
verify.completionListContains("f1", "(method) i2.f1(): void", "i2 f1");
verify.completionListContains("l1", "(property) i2.l1: () => void", "i2 l1");
verify.completionListContains("nc_p1", "(property) i2.nc_p1: number", "");
verify.completionListContains("nc_f1", "(method) i2.nc_f1(): void", "");
verify.completionListContains("nc_l1", "(property) i2.nc_l1: () => void", "");
verify.signatureHelp(
{ marker: "37", docComment: "i2_f1" },
{ marker: "39", docComment: "i2 f1" },
{ marker: ["38", "40", "l37", "l37", "l39", "l40"], docComment: "" },
);
verify.quickInfos({
"36iq": "var i2_i: i2",
"37iq": "var i3_i: i3",
"37q": ["(method) i2.i2_f1(): void", "i2_f1"],
"38q": "(method) i2.i2_nc_f1(): void",
"39q": ["(method) i2.f1(): void", "i2 f1"],
"40q": "(method) i2.nc_f1(): void",
"l37q": ["(property) i2.i2_l1: () => void", "i2_l1"],
"l38q": "(property) i2.i2_nc_l1: () => void",
"l39q": ["(property) i2.l1: () => void", "i2 l1"],
"l40q": "(property) i2.nc_l1: () => void",
});
goTo.marker('41');
verify.completionListContains("i2_p1", "(property) i2.i2_p1: number", "i2_p1");
verify.completionListContains("i2_f1", "(method) i2.i2_f1(): void", "i2_f1");
verify.completionListContains("i2_l1", "(property) i2.i2_l1: () => void", "i2_l1");
verify.completionListContains("i2_nc_p1", "(property) i2.i2_nc_p1: number", "");
verify.completionListContains("i2_nc_f1", "(method) i2.i2_nc_f1(): void", "");
verify.completionListContains("i2_nc_l1", "(property) i2.i2_nc_l1: () => void", "");
verify.completionListContains("p1", "(property) i3.p1: number", "i3 p1");
verify.completionListContains("f1", "(method) i3.f1(): void", "i3 f1");
verify.completionListContains("l1", "(property) i3.l1: () => void", "i3 l1");
verify.completionListContains("nc_p1", "(property) i3.nc_p1: number", "");
verify.completionListContains("nc_f1", "(method) i3.nc_f1(): void", "");
verify.completionListContains("nc_l1", "(property) i3.nc_l1: () => void", "");
verify.signatureHelp(
{ marker: "42", docComment: "i2_f1" },
{ marker: "44", docComment: "i3 f1" },
{ marker: ["43", "45", "l42", "l43", "l44", "l45"], docComment: "" },
);
verify.quickInfos({
"42q": ["(method) i2.i2_f1(): void", "i2_f1"],
"43q": "(method) i2.i2_nc_f1(): void",
"44q": ["(method) i3.f1(): void", "i3 f1"],
"45q": "(method) i3.nc_f1(): void",
l42q: ["(property) i2.i2_l1: () => void", "i2_l1"],
l43q: "(property) i2.i2_nc_l1: () => void",
l44q: ["(property) i3.l1: () => void", "i3 l1"],
l45q: "(property) i3.nc_l1: () => void"
});
goTo.marker('46');
verify.completionListContains("i2_p1", "(property) i2.i2_p1: number", "i2_p1");
verify.completionListContains("i2_f1", "(method) i2.i2_f1(): void", "i2_f1");
verify.completionListContains("i2_l1", "(property) i2.i2_l1: () => void", "i2_l1");
verify.completionListContains("i2_nc_p1", "(property) i2.i2_nc_p1: number", "");
verify.completionListContains("i2_nc_f1", "(method) i2.i2_nc_f1(): void", "");
verify.completionListContains("i2_nc_l1", "(property) i2.i2_nc_l1: () => void", "");
verify.completionListContains("p1", "(property) i2.p1: number", "i2 p1");
verify.completionListContains("f1", "(method) i2.f1(): void", "i2 f1");
verify.completionListContains("l1", "(property) i2.l1: () => void", "i2 l1");
verify.completionListContains("nc_p1", "(property) i2.nc_p1: number", "");
verify.completionListContains("nc_f1", "(method) i2.nc_f1(): void", "");
verify.completionListContains("nc_l1", "(property) i2.nc_l1: () => void", "");
verify.signatureHelp(
{ marker: "47", docComment: "i2_f1" },
{ marker: "49", docComment: "i2 f1" },
{ marker: ["48", "l47", "l48", "l49", "l50"], docComment: "" },
);
verify.quickInfos({
"47q": ["(method) i2.i2_f1(): void", "i2_f1"],
"48q": "(method) i2.i2_nc_f1(): void",
"49q": ["(method) i2.f1(): void", "i2 f1"],
"50q": "(method) i2.nc_f1(): void",
l47q: ["(property) i2.i2_l1: () => void", "i2_l1"],
l48q: "(property) i2.i2_nc_l1: () => void",
l49q: ["(property) i2.l1: () => void", "i2 l1"],
l40q: "(property) i2.nc_l1: () => void"
});
goTo.marker('51');
verify.not.completionListContains("i2", "interface i2", "");
verify.completionListContains("i2_i", "var i2_i: i2", "");
verify.not.completionListContains("i3", "interface i3", "");
verify.completionListContains("i3_i", "var i3_i: i3", "");
goTo.marker('51i');
verify.completionListContains("i2", "interface i2", "");
verify.completionListContains("i3", "interface i3", "");
verify.quickInfos({
52: ["constructor c5(): c5", "c5 class"],
53: ["class c5", "c5 class"],
54: "(property) c5.b: number",
55: ["constructor c2(a: number): c2", "c2 constructor"],
56: "constructor c3(): c3",
57: "constructor c6(): c6"
}); | the_stack |
import { api } from '@replit/protocol';
import { Channel } from './channel';
import { getWebSocketClass, getNextRetryDelay, getConnectionStr } from './util/helpers';
import { EIOCompat } from './util/EIOCompat';
import { FetchConnectionMetadataError, ConnectionState } from './types';
import type {
ConnectOptions,
GovalMetadata,
OpenChannelCb,
ChannelOptions,
DebugLog,
OpenOptions,
} from './types';
enum ClientCloseReason {
/**
* called `client.close`
*/
Intentional = 'Intentional',
/**
* The websocket connection died
*/
Disconnected = 'Disconnected',
/**
* The client encountered an unrecoverable/invariant error
*/
Error = 'Error',
}
type CloseResult =
| {
closeReason: ClientCloseReason.Intentional;
}
| {
closeReason: ClientCloseReason.Disconnected;
wsEvent: CloseEvent | Event;
}
| {
closeReason: ClientCloseReason.Error;
error: Error;
};
type ChannelRequest<Ctx> =
| {
options: ChannelOptions<Ctx>;
openChannelCb: OpenChannelCb<Ctx>;
isOpen: true;
closeRequested: boolean;
channelId: number;
cleanupCb: ReturnType<OpenChannelCb<Ctx>>;
}
| {
options: ChannelOptions<Ctx>;
openChannelCb: OpenChannelCb<Ctx>;
isOpen: false;
closeRequested: boolean;
channelId: null;
cleanupCb: null;
};
export class Client<Ctx extends unknown = null> {
/**
* Indicates the current state of the connection with the container.
* This will only be DISCONNECTED if `open` has not been called
* or the client closed permanently. Otherwise it'll be
* CONNECTED or CONNECTING
*/
public connectionState: ConnectionState;
/**
* The websocket used for communication with the container.
*
* @hidden
*/
private ws: WebSocket | null;
/**
* Supplied to us as the first argument when calling `client.open`.
* The most important option is the connection metadata getter
*
* @hidden
*/
private connectOptions: ConnectOptions<Ctx> | null;
/**
* Supplied to us as the second argument when calling `client.open`.
* Any time we connect we will call this callback with the control channel.
* If we disconnect before ever connecting and we won't retry;
* i.e. user called `client.close` or an unrecoverable error occured,
* we will call this function with an error.
* This has the same api as the second argument to openChannel.
*
* @hidden
*/
private chan0Cb: OpenChannelCb<Ctx> | null;
/**
* This is the return value from chan0Cb. This will be null as long as we
* haven't connected. Once connected, we call this anytime a connection ends
* it will be passed a `willReconnect` boolean indicating whether we're reconnecting or
* not, depending on the closure reason
*
* @hidden
*/
private chan0CleanupCb: ReturnType<OpenChannelCb<Ctx>> | null;
/**
* Anytime `openChannel` is called, we throw the request in here. This is used to maintain
* the `openChannel` calls accross reconnects and use to orchestrate channel opening and closing
*
* @hidden
*/
private channelRequests: Array<ChannelRequest<Ctx>>;
/**
* This is purely for optimization reasons, we don't wanna look through the channelRequests
* array to find the channel everytime. Instead we pull it out quickly from this map.
* Any channel here (except for channel 0) should have a corresponding `channelRequest`
* and the request should be in an `isOpen` true state with a corresponding channel id
*
* @hidden
*/
private channels: {
[id: number]: Channel;
};
/**
* Called for breadcrumbs and other debug reasons. Use addDebugFunc
* instead
*
* @hidden
*/
private legacyDebugFunc: undefined | ((log: DebugLog) => void);
/**
* Listeners to be called for breadcrumbs and other debug reasons
*
* @hidden
*/
private debugFuncs: Array<(log: DebugLog) => void>;
/**
* A function supplied to us by the user of the client. Will be called
* any time we have an unrecoverable error, usually an invariance
*
* @hidden
*/
private userUnrecoverableErrorHandler: ((e: Error) => void) | null;
/**
* The connection might require multiple retries to be established.
* Anytime we need to retry, we should also add an incremental backoff,
* we do that using `setTimeout`. When the client closes before our
* retry is initiated, we clear this timeout.
*
* @hidden
*/
private retryTimeoutId: ReturnType<typeof setTimeout> | null;
/**
* Abort controller is used so that when the user calls client.close while
* we're fetching connection metadata, we can be sure that we don't have a
* `connect` call lingering around waiting for connection metadata and
* eventually continue on as if we still want to connect
*
* @hidden
*/
private fetchTokenAbortController: AbortController | null;
/**
* Was the client destroyed? A destroyed client is a client that cannot
* be used ever again.
*
* @hidden
*/
private destroyed: boolean;
/**
* The metadata for the current connection.
*
* @hidden
*/
private connectionMetadata: GovalMetadata | null;
/**
* @typeParam Ctx context, passed to various callbacks, specified when calling {@link Client.open | open}
*/
constructor() {
this.ws = null;
this.channels = {};
this.connectOptions = null;
this.chan0Cb = null;
this.chan0CleanupCb = null;
this.connectionState = ConnectionState.DISCONNECTED;
this.debugFuncs = [];
this.userUnrecoverableErrorHandler = null;
this.channelRequests = [];
this.retryTimeoutId = null;
this.fetchTokenAbortController = null;
this.destroyed = false;
this.connectionMetadata = null;
this.debug({ type: 'breadcrumb', message: 'constructor' });
}
/**
* Starts connecting to the server and and opens channel 0
*
* See https://protodoc.turbio.repl.co/protov2 from more protocol specific info.
*
* Every client connected automatically "has" {@link Channel | channel} 0 listen to global events.
* Any time the client connects it will call callback with channel 0 so you can use it.
* Please refrain from using channel 0 to open channels and use [[Client.openChannel]] instead.
*
* This function follows similar semantics to [[Client.openChannel]]. The only
* difference is the first parameter which specifies options around the connection
* in addition to a context that is passed to various callbacks. It also does not
* return a close function, instead you can use [[Client.close]].
*
* Usage:
* ```typescript
* client.open({ context, fetchConnectionMetadata }, function onOpen({
* channel,
* context,
* }) {
* if (!channel) {
* // Closed before ever connecting. Due to `client.close` being called
* // or an unrecoverable, that can be handled by setting `client.setUnrecoverableError`
* return;
* }
*
* // The client is now connected (or reconnected in the event that it encountered an unexpected disconnect)
* // `channel` here is channel0 (more info at http://protodoc.turbio.repl.co/protov2)
* // - send commands using `channel.send`
* // - listen for commands using `channel.onCommand(cmd => ...)`
*
* return function cleanup({ willReconnect }) {
* // The client was closed and might reconnect if it was closed unexpectedly
* };
* });
* ```
*/
public open = (options: OpenOptions<Ctx>, cb: OpenChannelCb<Ctx>): void => {
if (this.chan0Cb) {
const error = new Error('You must call `close` before opening the client again');
this.onUnrecoverableError(error);
// throw to stop the execution of the caller
throw error;
}
if (this.destroyed) {
const error = new Error('Client has been destroyed and cannot be re-used');
this.onUnrecoverableError(error);
// throw to stop the execution of the caller
throw error;
}
this.connectOptions = {
timeout: 10000,
reuseConnectionMetadata: false,
...options,
};
this.debug({
type: 'breadcrumb',
message: 'open',
data: { polling: false },
});
this.chan0Cb = cb;
this.connect({ tryCount: 0, websocketFailureCount: 0 });
};
/**
*
* Opens a {@link Channel | channel} for a [service](https://protodoc.turbio.repl.co/services)
* and returns a function to close the channel.
*
* Read [opening channels](https://protodoc.turbio.repl.co/protov2#opening-channels)
* section in the protocol documentation for protocol specific information.
*
* When the client connects, and the channel opens, the open callback is called with the channel.
* As you should already know, you can use the channel to {@link Channel.send | send commands}
* and {@link Channel.onCommand | listen on incoming commands}. Once the client disconnects
* the channel is closed and is rendered un-usable (using it will throw an error) and you must
* wait for a new channel to be passed to the callback upon reconnection.
*
* The channel will keep reopening upon client reconnects so long as you don't call the close
* channel function.
*
* You can return an optional clean up function from the open callback to be used as a signal
* for the channel closing, regardless of whether it is going to reconnect or not. The cleanup
* function will be called with [[ChannelCloseReason]] which contians some useful information
* about reconnection and why we closed.
*
* If [[Client.close]] is called, it will close all channels and they won't reconnect. However,
* if you [[Client.open]] again in the future, all previously opened channels will re-open, unless
* the returned close channel function was called. [[Client.destroy]] will free up all `openChannel`
* calls but the client is unusable going forward.
*
* @param options Options for the channel
* @param cb The open callback
*
* @returns A function to close the channel
*
* Usage:
* ```typescript
* // See docs for exec service here https://protodoc.turbio.repl.co/services#exec
* const closeChannel = client.openChannel({ service: 'exec' }, function open({
* error,
* channel,
* context,
* }) {
* if (error) {
* return;
* }
*
* channel.onCommand((cmd) => {
* if (cmd.output) {
* terminal.write(cmd.output);
* }
* });
*
* const intervalId = setInterval(() => {
* channel.send({
* exec: { args: ['echo', 'hello', context.user.name] }
* blocking: true,
* });
* }, 100);
*
* return function cleanup() {
* clearInterval(intervalId);
* };
* });
*```
*/
public openChannel = (options: ChannelOptions<Ctx>, cb: OpenChannelCb<Ctx>): (() => void) => {
const sameNameChanRequests = options.name
? this.channelRequests.filter((cr) => cr.options.name === options.name)
: [];
if (sameNameChanRequests.some((cr) => !cr.closeRequested)) {
// The protocol forbids opening a channel with the same name, so we're gonna prevent that early
// so that we can give the caller a good stack trace to work with.
// If the channel is queued for closure or is closing then we allow it.
const error = new Error(`Channel with name ${options.name} already opened`);
this.onUnrecoverableError(error);
// throw to stop the execution of the caller
throw error;
}
if (this.destroyed) {
const error = new Error('Client has been destroyed and is unusable');
this.onUnrecoverableError(error);
// throw to stop the execution of the caller
throw error;
}
const channelRequest: ChannelRequest<Ctx> = {
options,
openChannelCb: cb,
isOpen: false,
channelId: null,
cleanupCb: null,
closeRequested: false,
};
this.channelRequests.push(channelRequest);
if (this.connectionState === ConnectionState.CONNECTED && !sameNameChanRequests.length) {
// If we're not connected, then the request to open will go out once we're connected.
// If there are channels with the same name then this request is queued after the other
// channel(s) with the same name is done closing
this.requestOpenChannel(channelRequest);
}
const closeChannel = () => {
if (channelRequest.closeRequested) {
return;
}
channelRequest.closeRequested = true;
if (!channelRequest.isOpen) {
// Channel is not open and we're not connected, let's just remove it from our list.
// If we're connected, it means there's an inflight open request
// then we'll be sending a close request right after it's done opening
// so that we can use the channel ID when closing
if (this.connectionState !== ConnectionState.CONNECTED) {
this.channelRequests = this.channelRequests.filter((cr) => cr !== channelRequest);
channelRequest.openChannelCb({
error: new Error('Channel closed before opening'),
channel: null,
context: this.connectOptions ? this.connectOptions.context : null,
});
}
return;
}
this.requestCloseChannel(channelRequest);
};
return closeChannel;
};
/** @hidden */
private requestOpenChannel = (channelRequest: ChannelRequest<Ctx>) => {
const { options, openChannelCb } = channelRequest;
if (!this.connectOptions) {
this.onUnrecoverableError(new Error('Expected connectionOptions'));
return;
}
const { skip } = options;
if (skip && skip(this.connectOptions.context)) {
return;
}
let { action } = options;
if (!action) {
action =
options.name == null
? api.OpenChannel.Action.CREATE
: api.OpenChannel.Action.ATTACH_OR_CREATE;
}
if (channelRequest.channelId) {
this.onUnrecoverableError(new Error('Unexpected channelId'));
return;
}
const service =
typeof options.service === 'string'
? options.service
: options.service(this.connectOptions.context);
this.debug({
type: 'breadcrumb',
message: 'requestOpenChannel',
data: {
name: options.name,
service,
action,
},
});
const chan0 = this.getChannel(0);
// Random base36 int
const ref = Number(Math.random().toString().split('.')[1]).toString(36);
// Not using Channel.request here because we want to
// resolve the response synchronously. We can receive
// openChanRes and a command on the requested channel
// in a single tick, using promises here would causes us to
// handle the incoming command before openChanRes, leading to errors
chan0.send({
ref,
openChan: {
name: options.name,
service,
action,
},
});
const dispose = chan0.onCommand((cmd: api.Command) => {
if (ref !== cmd.ref) {
return;
}
dispose();
if (cmd.openChanRes == null) {
this.onUnrecoverableError(new Error('Expected openChanRes on command'));
return;
}
const { id, state, error } = cmd.openChanRes;
this.debug({ type: 'breadcrumb', message: 'openChanres' });
if (!this.connectOptions) {
this.onUnrecoverableError(new Error('Expected connectionOptions'));
return;
}
if (state === api.OpenChannelRes.State.ERROR) {
this.onUnrecoverableError(
new Error(`Channel open resulted with an error: ${error || 'with no message'}`),
);
return;
}
if (typeof id !== 'number' || typeof state !== 'number') {
this.onUnrecoverableError(new Error('Expected state and channel id'));
return;
}
const channel = new Channel({
id,
onUnrecoverableError: this.onUnrecoverableError,
send: this.send,
});
this.channels[id] = channel;
// TODO we should stop relying on mutating the same channelrequest
(channelRequest as ChannelRequest<Ctx>).channelId = id;
(channelRequest as ChannelRequest<Ctx>).isOpen = true;
// Make sure to save this value as the user can call closeChannel within openChannelCb
// we want to avoid making the call to requestCloseChannel twice, once from within
// openChannelCb and once here.
const { closeRequested } = channelRequest;
if (closeRequested) {
// While we're opening the channel, we got a request to close this channel
// let's take care of that and request a close.
// The reason we call it before `openChannelCb`
// is just to make sure that channel has a status
// of `closing`
this.requestCloseChannel(channelRequest);
}
(channelRequest as ChannelRequest<Ctx>).cleanupCb = openChannelCb({
channel,
error: null,
context: this.connectOptions.context,
});
});
};
/** @hidden */
private requestCloseChannel = async (channelRequest: ChannelRequest<Ctx>) => {
if (!channelRequest.isOpen) {
this.onUnrecoverableError(new Error('Tried to request a channel close before opening'));
return;
}
const { channelId } = channelRequest;
const chan = this.getChannel(channelRequest.channelId);
chan.status = 'closing';
const chan0 = this.getChannel(0);
if (!chan0) {
this.onUnrecoverableError(
new Error('Tried to request a channel close but there was no chan0'),
);
return;
}
this.debug({
type: 'breadcrumb',
message: 'requestChannelClose',
data: {
id: channelId,
name: channelRequest.options.name,
service: channelRequest.options.service,
},
});
const res = await chan0.request({
closeChan: {
action: api.CloseChannel.Action.TRY_CLOSE,
id: channelRequest.channelId,
},
});
if (res.channelClosed) {
this.debug({
type: 'breadcrumb',
message: 'requestChannelClose:chan0Closed',
data: {
id: channelId,
name: channelRequest.options.name,
service: channelRequest.options.service,
},
});
} else {
if (res.closeChanRes == null) {
this.onUnrecoverableError(new Error('Expected closeChanRes'));
return;
}
const { id } = res.closeChanRes;
if (id == null) {
this.onUnrecoverableError(new Error(`Expected id, got ${id}`));
return;
}
if (id !== channelId) {
this.onUnrecoverableError(
new Error(`Expected id from closeChanRes to be ${channelId} got ${id}`),
);
return;
}
this.debug({
type: 'breadcrumb',
message: 'requestChannelClose:closeChanRes',
data: {
id: channelId,
name: channelRequest.options.name,
service: channelRequest.options.service,
closeStatus: res.closeChanRes.status,
},
});
}
this.channelRequests = this.channelRequests.filter((cr) => cr !== channelRequest);
delete this.channels[channelId];
chan.handleClose({ initiator: 'channel', willReconnect: false });
if (channelRequest.cleanupCb) {
channelRequest.cleanupCb({ initiator: 'channel', willReconnect: false });
}
// Next up: we will check if there are any channels with the same name
// that are queued up for opening. We have defered the opening of the channel
// until after the current open one closes (see `openChannel`) because the
// protocol doesn't allow opening multiple channels with the same name
if (!channelRequest.options.name || this.connectionState !== ConnectionState.CONNECTED) {
return;
}
const nextRequest = this.channelRequests.find(
(cr) => cr.options.name === channelRequest.options.name,
);
if (!nextRequest) {
return;
}
this.requestOpenChannel(nextRequest);
};
/**
* Closes the connection.
* - `open` must have been called before calling this method
* - If we haven't connected yet, open callback will be called with an error
* - If there's an open WebSocket connection it will be closed
* - Any open channels will be closed
* - Does not clear openChannel requests
* - If a channel never opened, its {@link OpenChannelCb | open channel callback}
* will be called with an error
* - Otherwise returned cleanup callback is called
*/
public close = (): void => {
this.debug({ type: 'breadcrumb', message: 'user close' });
if (!this.chan0Cb || !this.connectOptions) {
const error = new Error('Must call client.open before closing');
this.onUnrecoverableError(error);
// throw to stop the execution of the caller
throw error;
}
if (this.fetchTokenAbortController) {
this.fetchTokenAbortController.abort();
this.fetchTokenAbortController = null;
}
// If the close is intentional, let's unset the metadata, the client
// may be re-used to connect to another repl
this.connectionMetadata = null;
this.handleClose({ closeReason: ClientCloseReason.Intentional });
};
/**
* Destroy closes the connection, so all the rules of `close` apply here.
* The only difference is that `destroy` renders the client unsuable afterwards.
* It will also cleanup all saved `openChannel` calls freeing the callbacks and
* avoiding leaks.
*/
public destroy = (): void => {
this.destroyed = true;
this.debug({ type: 'breadcrumb', message: 'destroy' });
if (this.connectionState !== ConnectionState.DISCONNECTED) {
this.close();
}
this.debug = () => {};
this.userUnrecoverableErrorHandler = null;
this.channelRequests = [];
this.destroyed = true;
};
/**
* @hidden
* Gets a channel by Id
* */
public getChannel = (id: number): Channel => {
const chan = this.channels[id];
// this.debug({
// type: 'breadcrumb',
// message: 'getChannel',
// data: {
// id,
// },
// });
if (!chan) {
const error = new Error(`No channel with number ${id}`);
this.onUnrecoverableError(error);
throw error;
}
return chan;
};
/**
* Calls all the registered logging/debugging functions
*
* @hidden
*/
private debug = (log: DebugLog): void => {
if (this.legacyDebugFunc) {
this.legacyDebugFunc(log);
}
this.debugFuncs.forEach((func) => func(log));
};
/**
* Sets a logging/debugging function
*
* @deprecated use addDebugFunc instead
*/
public setDebugFunc = (debugFunc: (log: DebugLog) => void): void => {
this.legacyDebugFunc = debugFunc;
};
/**
* Adds a logging/debugging function. Returns a function that will remove
* the callback
*/
public addDebugFunc = (debugFunc: (log: DebugLog) => void): (() => void) => {
this.debugFuncs.push(debugFunc);
return () => {
const idx = this.debugFuncs.indexOf(debugFunc);
if (idx > -1) {
this.debugFuncs.splice(idx, 1);
}
};
};
/**
* Set a function to handle unrecoverable error
*
* Unrecoverable errors are internal errors or invariance errors
* caused by the user mis-using the client.
*/
public setUnrecoverableErrorHandler = (onUnrecoverableError: (e: Error) => void): void => {
this.userUnrecoverableErrorHandler = onUnrecoverableError;
};
/**
* Gets the current connection metadata used by the WebSocket, or null if the
* WebSocket is not present.
*/
public getConnectionMetadata = (): GovalMetadata | null => this.connectionMetadata;
/** @hidden */
private connect = async ({
tryCount,
websocketFailureCount,
}: {
tryCount: number;
websocketFailureCount: number;
}) => {
this.debug({
type: 'breadcrumb',
message: 'connecting',
data: {
connectionState: this.connectionState,
connectTries: tryCount,
websocketFailureCount,
readyState: this.ws ? this.ws.readyState : undefined,
chan0CbExists: Boolean(this.chan0Cb),
},
});
if (this.connectionState !== ConnectionState.DISCONNECTED) {
const error = new Error('Client must be disconnected to connect');
this.onUnrecoverableError(error);
throw error;
}
if (this.ws) {
const error = new Error('Unexpected existing websocket instance');
this.onUnrecoverableError(error);
throw error;
}
if (!this.connectOptions) {
const error = new Error('Expected connectionOptions');
this.onUnrecoverableError(error);
throw error;
}
if (!this.chan0Cb) {
this.onUnrecoverableError(new Error('Expected chan0Cb'));
return;
}
if (this.chan0CleanupCb) {
this.onUnrecoverableError(new Error('Unexpected chan0CleanupCb, are you sure you closed'));
return;
}
if (this.channelRequests.some((cr) => cr.isOpen)) {
this.onUnrecoverableError(new Error('All channels should be closed when we connect'));
return;
}
if (Object.keys(this.channels).length) {
this.onUnrecoverableError(new Error('Found an an unexpected existing channels'));
return;
}
this.connectionState = ConnectionState.CONNECTING;
const chan0 = new Channel({
id: 0,
onUnrecoverableError: this.onUnrecoverableError,
send: this.send,
});
this.channels[0] = chan0;
if (!this.connectOptions.reuseConnectionMetadata || this.connectionMetadata === null) {
if (this.fetchTokenAbortController) {
this.onUnrecoverableError(new Error('Expected fetchTokenAbortController to be null'));
return;
}
const abortController = new AbortController();
this.fetchTokenAbortController = abortController;
let connectionMetadataFetchResult;
try {
connectionMetadataFetchResult = await this.connectOptions.fetchConnectionMetadata(
abortController.signal,
);
} catch (e) {
this.onUnrecoverableError(e);
return;
}
this.fetchTokenAbortController = null;
const connectionMetadata = connectionMetadataFetchResult;
const aborted = connectionMetadata.error === FetchConnectionMetadataError.Aborted;
if (abortController.signal.aborted !== aborted) {
// the aborted return value and the abort signal should be equivalent
if (abortController.signal.aborted) {
// In cases where our abort signal has been called means `client.close` was called
// that means we shouldn't be calling `handleConnectError` because chan0Cb is null!
this.onUnrecoverableError(
new Error(
'Expected abort returned from fetchConnectionMetadata to be truthy when the controller aborts',
),
);
return;
}
// the user shouldn't return abort without the abort signal being called, if aborting is desired
// client.close should be called
this.onUnrecoverableError(
new Error('Abort should only be truthy returned when the abort signal is triggered'),
);
return;
}
if (connectionMetadata.error === FetchConnectionMetadataError.Aborted) {
// Just return. The user called `client.close leading to a connectionMetadata abort
// chan0Cb will be called with with an error Channel close, no need to do anything here.
return;
}
if (connectionMetadata.error === FetchConnectionMetadataError.Retriable) {
this.retryConnect({
tryCount: tryCount + 1,
websocketFailureCount,
chan0,
error: new Error('Retriable error'),
});
return;
}
if (this.connectionState !== ConnectionState.CONNECTING) {
this.onUnrecoverableError(new Error('Client was closed before connecting'));
return;
}
if (connectionMetadata.error) {
this.onUnrecoverableError(connectionMetadata.error);
return;
}
this.connectionMetadata = connectionMetadata;
}
if (websocketFailureCount === 3) {
// Report that we fellback to polling
this.debug({
type: 'breadcrumb',
message: 'polling fallback',
});
}
const isPolling = websocketFailureCount >= 3;
const WebSocketClass = isPolling
? EIOCompat
: getWebSocketClass(this.connectOptions.WebSocketClass);
const connStr = getConnectionStr(this.connectionMetadata, isPolling, this.connectOptions.pollingHost);
const ws = new WebSocketClass(connStr);
ws.binaryType = 'arraybuffer';
ws.onmessage = this.onSocketMessage;
this.ws = ws;
// We'll use this to determine whether or not we should consider the next
// failure a websocket failure and fallback to polling. If we were able to
// pass the handshake phase at some point, then websockets work fine.
let didWebsocketsWork = false;
// We'll use this to determine whether or not we should consider the next
// polling implementation failure to require a fresh metadata. If we were
// able to receive any messages on channel 0, then the current metadata
// should still be valid.
let didReceiveAnyCommand = false;
/**
* Failure can happen due to a number of reasons
* 1- Abrupt socket closure
* 2- Timedout connection request
* 3- ContainerState.SLEEP command
* 4- User calling `close` before we connect
*/
let onFailed: ((err: Error) => void) | null = null;
ws.onerror = () => {
if (!onFailed) {
this.onUnrecoverableError(new Error('Got websocket error but no `onFailed` cb'));
return;
}
onFailed(new Error('WebSocket errored'));
};
/**
* Abrupt socket closures should report failed
*/
ws.onclose = (event: CloseEvent | Event) => {
if (!onFailed) {
this.onUnrecoverableError(new Error('Got websocket closure but no `onFailed` cb'));
return;
}
if (WebSocketClass === EIOCompat) {
if (!didReceiveAnyCommand) {
// The polling implementation doesn't convey the Websocket close
// event. Let's assume that we need to request a new token.
this.connectionMetadata = null;
}
} else if ('code' in event) {
const closeEvent = <CloseEvent>event;
const closeCodePolicyViolation = 1008;
if (closeEvent.code === closeCodePolicyViolation) {
// This means that the token was rejected. We need to fetch another one.
this.connectionMetadata = null;
}
}
onFailed(new Error('WebSocket closed before we got READY'));
};
ws.onopen = () => {
if (WebSocketClass === EIOCompat) {
return;
}
// From this point on, we count this connection as successful.
didWebsocketsWork = true;
};
/**
* If the user specifies a timeout we will short circuit
* the connection if we don't get READY from the container
* within the specified timeout.
*
* Every time we get a message we reset the connection timeout
* this is because it signifies that the connection will eventually work.
*/
let resetTimeout = () => {};
let cancelTimeout = () => {};
const { timeout } = this.connectOptions;
if (timeout !== null) {
let timeoutId: ReturnType<typeof setTimeout>; // Can also be of type `number` in the browser
cancelTimeout = () => {
this.debug({ type: 'breadcrumb', message: 'cancel timeout' });
clearTimeout(timeoutId);
};
resetTimeout = () => {
this.debug({ type: 'breadcrumb', message: 'reset timeout' });
clearTimeout(timeoutId);
timeoutId = setTimeout(() => {
this.debug({ type: 'breadcrumb', message: 'connect timeout' });
if (!onFailed) {
this.onUnrecoverableError(
new Error('Connecting timed out but there was no `onFailed` cb'),
);
return;
}
onFailed(new Error('timeout'));
}, timeout);
};
resetTimeout();
}
/**
* Listen to incoming commands
* Every time we get a message we reset the connection timeout (if it exists)
* this is because it signifies that the connection will eventually work.
*
* If we ever get a ContainterState READY we can officially
* say that the connection is successful and we open chan0 and other `chanReq`s
*
* If we ever get ContainterState SLEEP it means that something went wrong
* and connection should be dropped
*/
const unlistenChan0 = chan0.onCommand((cmd: api.Command) => {
didReceiveAnyCommand = true;
// Everytime we get a message on channel0
// we will reset the timeout
resetTimeout();
if (cmd.containerState == null) {
return;
}
if (cmd.containerState.state == null) {
this.onUnrecoverableError(new Error('Got containterState but state was not defined'));
return;
}
const { state } = cmd.containerState;
this.debug({
type: 'breadcrumb',
message: 'containerState',
data: state,
});
const StateEnum = api.ContainerState.State;
switch (state) {
case StateEnum.READY: {
// Once we're READY we can stop listening to incoming commands
unlistenChan0();
cancelTimeout();
if (!this.connectOptions) {
this.onUnrecoverableError(new Error('Expected connectionOptions'));
return;
}
if (!chan0) {
this.onUnrecoverableError(new Error('Expected chan0 to be truthy'));
return;
}
if (!this.chan0Cb) {
this.onUnrecoverableError(new Error('Expected chan0Cb to be truthy'));
return;
}
this.handleConnect();
// defer closing if the user decides to call client.close inside chan0Cb
const originalClose = this.close;
this.close = () =>
setTimeout(() => {
originalClose();
}, 0);
this.chan0CleanupCb = this.chan0Cb({
channel: chan0,
error: null,
context: this.connectOptions.context,
});
this.close = originalClose;
break;
}
case StateEnum.SLEEP:
if (!onFailed) {
this.onUnrecoverableError(new Error('Got SLEEP but there was no `onFailed` cb'));
return;
}
onFailed(new Error('Got SLEEP as container state'));
break;
default:
}
});
onFailed = (error: Error) => {
// Make sure this function is not called multiple times.
onFailed = null;
// Cleanup related to this connection try. If we retry connecting a new `WebSocket` instance
// will be used in additon to new `cancelTimeout` and `unlistenChan0` functions.
this.cleanupSocket();
cancelTimeout();
unlistenChan0();
this.retryConnect({
tryCount: tryCount + 1,
websocketFailureCount: didWebsocketsWork ? 0 : websocketFailureCount + 1,
chan0,
error,
});
};
};
/**
* Attempt to reconnect after a short delay.
*
* @hidden
*/
private retryConnect = ({
tryCount,
websocketFailureCount,
chan0,
error,
}: {
tryCount: number;
websocketFailureCount: number;
chan0: Channel;
error: Error;
}) => {
if (this.retryTimeoutId) {
this.onUnrecoverableError(new Error('unexpected existing retryTimeoutId'));
return;
}
if (!this.chan0Cb) {
// User called close
// TODO (masad-frost) something more explicit here
// might be the way to go
return;
}
this.retryTimeoutId = setTimeout(() => {
if (!this.chan0Cb) {
this.onUnrecoverableError(new Error('Scheduled retry is called after we closed?'));
return;
}
this.retryTimeoutId = null;
this.debug({
type: 'breadcrumb',
message: 'retrying',
data: {
connectionState: this.connectionState,
connectTries: tryCount,
websocketFailureCount,
error,
wsReadyState: this.ws ? this.ws.readyState : undefined,
},
});
chan0.handleClose({ initiator: 'client', willReconnect: true });
delete this.channels[0];
this.connectionState = ConnectionState.DISCONNECTED;
this.connect({ tryCount, websocketFailureCount });
}, getNextRetryDelay(tryCount));
};
/** @hidden */
private send = (cmd: api.Command) => {
this.debug({ type: 'log', log: { direction: 'out', cmd } });
const cmdBuf = api.Command.encode(cmd).finish();
const buffer = cmdBuf.buffer.slice(cmdBuf.byteOffset, cmdBuf.byteOffset + cmdBuf.length);
if (this.ws == null) {
this.onUnrecoverableError(new Error('Calling send on a closed client'));
return;
}
this.ws.send(buffer);
};
/** @hidden */
private onSocketMessage = ({ data }: MessageEvent) => {
const d = new Uint8Array(data);
const cmd = api.Command.decode(d);
this.debug({ type: 'log', log: { direction: 'in', cmd } });
// Pass it to the right channel
this.getChannel(cmd.channel).handleCommand(cmd);
};
/**
* Called when chan0 connects. Opens all other required channels
*
* @hidden
*/
private handleConnect = () => {
this.connectionState = ConnectionState.CONNECTED;
this.debug({ type: 'breadcrumb', message: 'connected!' });
if (!this.ws) {
this.onUnrecoverableError(new Error('Expected Websocket instance'));
return;
}
// Update socket closure to do something else
const onClose = (event: CloseEvent | Event) => {
if (this.connectionState === ConnectionState.DISCONNECTED) {
this.onUnrecoverableError(
new Error('Got a close event on socket but client is in disconnected state'),
);
return;
}
this.debug({
type: 'breadcrumb',
message: 'wsclose',
data: {
event,
},
});
this.handleClose({
closeReason: ClientCloseReason.Disconnected,
wsEvent: event,
});
};
this.ws.onclose = onClose;
// Once connected treat any future error as a close event
// eslint-disable-next-line
// @ts-ignore seems like a type issue related to browser/node env
this.ws.onerror = onClose;
this.channelRequests.forEach((channelRequest) => {
this.requestOpenChannel(channelRequest);
});
};
/** @hidden */
private handleClose = (closeResult: CloseResult) => {
if (closeResult.closeReason !== ClientCloseReason.Error) {
// If we got here as a result of an error we'll ignore these assertions to avoid
// infinite recursion in onUnrecoverableError
if (this.connectionState === ConnectionState.DISCONNECTED) {
this.onUnrecoverableError(
new Error('handleClose is called but client already disconnected'),
);
return;
}
if (this.ws && this.fetchTokenAbortController) {
// Fetching connection metadata is required prior to initializing a
// websocket, we can't have both at the same time as the abort
// controller is unset after we fetch the connection metadata.
this.onUnrecoverableError(
new Error('fetchTokenAbortController and websocket exist simultaneously'),
);
return;
}
}
this.cleanupSocket();
if (this.retryTimeoutId) {
// Client was closed while reconnecting
clearTimeout(this.retryTimeoutId);
}
const willClientReconnect = closeResult.closeReason === ClientCloseReason.Disconnected;
this.channelRequests.forEach((channelRequest) => {
const willChannelReconnect: boolean = willClientReconnect && !channelRequest.closeRequested;
if (channelRequest.isOpen) {
const channel = this.getChannel(channelRequest.channelId);
channel.handleClose({
initiator: 'client',
willReconnect: willChannelReconnect,
});
delete this.channels[channelRequest.channelId];
} else if (!willChannelReconnect) {
// channel won't reconnect and was never opened
// we'll call the open channel callback with an error
channelRequest.openChannelCb({
channel: null,
error: new Error('Failed to open'),
context: this.connectOptions ? this.connectOptions.context : null,
});
}
const { cleanupCb, closeRequested } = channelRequest;
// Re-set the channel request's state
// TODO we should stop relying on mutating the same channelrequest
(channelRequest as ChannelRequest<Ctx>).channelId = null;
(channelRequest as ChannelRequest<Ctx>).isOpen = false;
(channelRequest as ChannelRequest<Ctx>).cleanupCb = null;
(channelRequest as ChannelRequest<Ctx>).closeRequested = false;
if (cleanupCb) {
// Call the cleanupCb after we update the values
// on the channelRequest to make sure any cascading effects
// have the right values for channelRequest
cleanupCb({
initiator: 'client',
willReconnect: willChannelReconnect,
});
}
if (closeRequested || channelRequest.closeRequested) {
// Channel closed earlier but we couldn't process the close request
// or closed during cleanupCb that we just called
this.channelRequests = this.channelRequests.filter((cr) => cr !== channelRequest);
}
});
if (this.channels[0]) {
this.channels[0].handleClose({
initiator: 'client',
willReconnect: willClientReconnect,
});
delete this.channels[0];
}
if (Object.keys(this.channels).length !== 0) {
this.channels = {};
if (closeResult.closeReason !== ClientCloseReason.Error) {
// if we got here as a result of an error we're not gonna call onUnrecoverableError again
this.onUnrecoverableError(
new Error('channels object should be empty after channelRequests and chan0 cleanup'),
);
return;
}
}
if (this.chan0CleanupCb) {
// Client successfully connected once
this.chan0CleanupCb({
initiator: 'client',
willReconnect: willClientReconnect,
});
this.chan0CleanupCb = null;
} else if (!willClientReconnect) {
if (this.chan0Cb) {
this.chan0Cb({
channel: null,
error: new Error('Failed to open'),
context: this.connectOptions ? this.connectOptions.context : null,
});
} else if (closeResult.closeReason !== ClientCloseReason.Error) {
// if we got here as a result of an error we're not gonna call onUnrecoverableError again
this.onUnrecoverableError(new Error('open should have been called before `handleClose`'));
return;
}
}
this.connectionState = ConnectionState.DISCONNECTED;
if (!willClientReconnect) {
// Client is done being used until the next `open` call
this.chan0Cb = null;
this.connectOptions = null;
return;
}
this.debug({
type: 'breadcrumb',
message: 'reconnecting',
});
this.connect({ tryCount: 0, websocketFailureCount: 0 });
};
/** @hidden */
private cleanupSocket = () => {
const { ws } = this;
this.debug({
type: 'breadcrumb',
message: 'cleanupSocket',
data: {
hasWs: Boolean(ws),
readyState: ws ? ws.readyState : null,
connectionState: this.connectionState,
},
});
if (!ws) {
return;
}
this.ws = null;
ws.onmessage = null;
ws.onclose = null;
ws.onopen = null;
// Replace exististing error handler so an error doesn't get thrown.
// We got here after either `handleClose` so it is safe to ignore
// any potential remaining errors
ws.onerror = () => {};
if (ws.readyState === 0 || ws.readyState === 1) {
this.debug({
type: 'breadcrumb',
message: 'wsclose',
});
ws.close();
}
};
/** @hidden */
private onUnrecoverableError = (e: Error) => {
this.debug({
type: 'breadcrumb',
message: 'unrecoverable error',
data: {
message: e.message,
},
});
if (this.connectionState !== ConnectionState.DISCONNECTED) {
try {
this.handleClose({
closeReason: ClientCloseReason.Error,
error: e,
});
} catch (handleCloseErr) {
// We tried our best to clean up. But we need to keep going and report
// unrecoverable error regardless of what happens inside handleClose
// eslint-disable-next-line no-console
console.error('handleClose errored during unrecoverable error');
// eslint-disable-next-line no-console
console.error(handleCloseErr);
}
}
if (this.userUnrecoverableErrorHandler) {
this.userUnrecoverableErrorHandler(e);
return;
}
// eslint-disable-next-line no-console
console.error('Please supply your own unrecoverable error handling function');
throw e;
};
} | the_stack |
import { Injectable } from '@angular/core';
import { LocationStrategy } from '@angular/common';
import { DefaultUrlSerializer, UrlSegmentGroup, UrlTree, ActivatedRouteSnapshot, Params } from '@angular/router';
import { Frame } from '@nativescript/core';
import { NativeScriptDebug } from '../trace';
import { isPresent } from '../lang-facade';
import { FrameService } from '../frame.service';
import { Outlet, NavigationOptions, LocationState, defaultNavOptions } from './ns-location-utils';
@Injectable({
providedIn: 'root',
})
export class NSLocationStrategy extends LocationStrategy {
private outlets: Array<Outlet> = [];
private currentOutlet: Outlet;
private popStateCallbacks = new Array<(_: any) => any>();
private _currentNavigationOptions: NavigationOptions;
private currentUrlTree: UrlTree;
public _modalNavigationDepth = 0;
constructor(private frameService: FrameService) {
super();
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.constructor()');
}
}
path(): string {
if (!this.currentUrlTree) {
return '/';
}
const state = this.currentOutlet && this.currentOutlet.peekState();
if (!state) {
return '/';
}
let tree = this.currentUrlTree;
let changedOutlet = this.getSegmentGroupByOutlet(this.currentOutlet);
// Handle case where the user declares a component at path "/".
// The url serializer doesn't parse this url as having a primary outlet.
if (state.isRootSegmentGroup) {
tree.root = state.segmentGroup;
} else if (changedOutlet) {
this.updateSegmentGroup(tree.root, changedOutlet, state.segmentGroup);
}
const urlSerializer = new DefaultUrlSerializer();
tree.queryParams = state.queryParams;
const url = urlSerializer.serialize(tree);
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.path(): ' + url);
}
return url;
}
prepareExternalUrl(internal: string): string {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.prepareExternalUrl() internal: ' + internal);
}
return internal;
}
pushState(state: any, title: string, url: string, queryParams: string): void {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.pushState state: ' + `${state}, title: ${title}, url: ${url}, queryParams: ${queryParams}`);
}
this.pushStateInternal(state, title, url, queryParams);
}
pushStateInternal(state: any, title: string, url: string, queryParams: string): void {
const urlSerializer = new DefaultUrlSerializer();
this.currentUrlTree = urlSerializer.parse(url);
const urlTreeRoot = this.currentUrlTree.root;
// Handle case where the user declares a component at path "/".
// The url serializer doesn't parse this url as having a primary outlet.
if (!Object.keys(urlTreeRoot.children).length) {
const segmentGroup = this.currentUrlTree && this.currentUrlTree.root;
const outletKey = this.getOutletKey(this.getSegmentGroupFullPath(segmentGroup), 'primary');
const outlet = this.findOutlet(outletKey);
if (outlet && this.updateStates(outlet, segmentGroup, this.currentUrlTree.queryParams)) {
this.currentOutlet = outlet; // If states updated
} else if (!outlet) {
// tslint:disable-next-line:max-line-length
const rootOutlet = this.createOutlet('primary', null, segmentGroup, null, null, this.currentUrlTree.queryParams);
this.currentOutlet = rootOutlet;
}
this.currentOutlet.peekState().isRootSegmentGroup = true;
return;
}
const queue = [];
let currentTree = <any>urlTreeRoot;
while (currentTree) {
Object.keys(currentTree.children).forEach((outletName) => {
const currentSegmentGroup = currentTree.children[outletName];
currentSegmentGroup.outlet = outletName;
currentSegmentGroup.root = urlTreeRoot;
const outletPath = this.getSegmentGroupFullPath(currentTree);
let outletKey = this.getOutletKey(outletPath, outletName);
let outlet = this.findOutlet(outletKey);
const parentOutletName = currentTree.outlet || '';
const parentOutletPath = this.getSegmentGroupFullPath(currentTree.parent);
const parentOutletKey = this.getOutletKey(parentOutletPath, parentOutletName);
const parentOutlet = this.findOutlet(parentOutletKey);
const containsLastState = outlet && outlet.containsTopState(currentSegmentGroup.toString());
if (!outlet) {
// tslint:disable-next-line:max-line-length
outlet = this.createOutlet(outletKey, outletPath, currentSegmentGroup, parentOutlet, this._modalNavigationDepth, this.currentUrlTree.queryParams);
this.currentOutlet = outlet;
} else if (this._modalNavigationDepth > 0 && outlet.showingModal && !containsLastState) {
// Navigation inside modal view.
this.upsertModalOutlet(outlet, currentSegmentGroup, this.currentUrlTree.queryParams);
} else {
outlet.parent = parentOutlet;
if (this.updateStates(outlet, currentSegmentGroup, this.currentUrlTree.queryParams)) {
this.currentOutlet = outlet; // If states updated
}
}
queue.push(currentSegmentGroup);
});
currentTree = queue.shift();
}
}
replaceState(state: any, title: string, url: string, queryParams: string): void {
const states = this.currentOutlet && this.currentOutlet.states;
if (states && states.length > 0) {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.replaceState changing existing state: ' + `${state}, title: ${title}, url: ${url}, queryParams: ${queryParams}`);
}
} else {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.replaceState pushing new state: ' + `${state}, title: ${title}, url: ${url}, queryParams: ${queryParams}`);
}
this.pushStateInternal(state, title, url, queryParams);
}
}
forward(): void {
throw new Error('NSLocationStrategy.forward() - not implemented');
}
back(outlet?: Outlet, frame?: Frame): void {
this.currentOutlet = outlet || this.currentOutlet;
if (this.currentOutlet.isPageNavigationBack) {
const states = this.currentOutlet.states;
// We are navigating to the previous page
// clear the stack until we get to a page navigation state
let state = states.pop();
let count = 1;
if (frame) {
while (state.frame && state.frame !== frame) {
state = states.pop();
count++;
}
}
while (!state.isPageNavigation) {
state = states.pop();
count++;
}
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog(`NSLocationStrategy.back() while navigating back. States popped: ${count}`);
}
this.callPopState(state, true);
} else {
let state = this.currentOutlet.peekState();
if (state && state.isPageNavigation) {
// This was a page navigation - so navigate through frame.
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.back() while not navigating back but top' + ' state is page - will call frame.goBack()');
}
if (!outlet) {
const topmostFrame = this.frameService.getFrame();
this.currentOutlet = this.getOutletByFrame(topmostFrame) || this.currentOutlet;
}
const frameToBack: Frame = this.currentOutlet.getFrameToBack();
if (frameToBack) {
frameToBack.goBack();
}
} else {
// Nested navigation - just pop the state
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.back() while not navigating back but top' + ' state is not page - just pop');
}
this.callPopState(this.currentOutlet.states.pop(), true);
}
}
}
canGoBack(outlet?: Outlet) {
outlet = outlet || this.currentOutlet;
return outlet.states.length > 1;
}
onPopState(fn: (_: any) => any): void {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.onPopState');
}
this.popStateCallbacks.push(fn);
}
getBaseHref(): string {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.getBaseHref()');
}
return '';
}
private callPopState(state: LocationState, pop: boolean = true, outlet?: Outlet) {
outlet = outlet || this.currentOutlet;
const urlSerializer = new DefaultUrlSerializer();
let changedOutlet = this.getSegmentGroupByOutlet(outlet);
if (state && changedOutlet) {
this.updateSegmentGroup(this.currentUrlTree.root, changedOutlet, state.segmentGroup);
} else if (changedOutlet) {
// when closing modal view there are scenarios (e.g. root viewContainerRef) when we need
// to clean up the named page router outlet to make sure we will open the modal properly again if needed.
this.updateSegmentGroup(this.currentUrlTree.root, changedOutlet, null);
}
const url = urlSerializer.serialize(this.currentUrlTree);
const change = { url: url, pop: pop };
for (let fn of this.popStateCallbacks) {
fn(change);
}
}
public toString() {
let result = [];
this.outlets.forEach((outlet) => {
const outletStates = outlet.states;
const outletLog = outletStates
// tslint:disable-next-line:max-line-length
.map((v, i) => `${outlet.outletKeys}.${i}.[${v.isPageNavigation ? 'PAGE' : 'INTERNAL'}].[${outlet.modalNavigationDepth ? 'MODAL' : 'BASE'}] "${v.segmentGroup.toString()}"`)
.reverse();
result = result.concat(outletLog);
});
return result.join('\n');
}
// Methods for syncing with page navigation in PageRouterOutlet
public _beginBackPageNavigation(frame: Frame) {
const outlet: Outlet = this.getOutletByFrame(frame);
if (!outlet || outlet.isPageNavigationBack) {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerError('Attempted to call startGoBack while going back.');
}
return;
}
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.startGoBack()');
}
outlet.isPageNavigationBack = true;
this.currentOutlet = outlet;
}
public _finishBackPageNavigation(frame: Frame) {
const outlet: Outlet = this.getOutletByFrame(frame);
if (!outlet || !outlet.isPageNavigationBack) {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerError('Attempted to call endGoBack while not going back.');
}
return;
}
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.finishBackPageNavigation()');
}
outlet.isPageNavigationBack = false;
}
public _beginModalNavigation(frame: Frame): void {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy._beginModalNavigation()');
}
this.currentOutlet = this.getOutletByFrame(frame) || this.currentOutlet;
// It is possible to have frame, but not corresponding Outlet, if
// showing modal dialog on app.component.ts ngOnInit() e.g. In that case
// the modal is treated as none modal navigation.
if (this.currentOutlet) {
this.currentOutlet.showingModal = true;
this._modalNavigationDepth++;
}
}
public _closeModalNavigation() {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.closeModalNavigation()');
}
const isShowingModal = this._modalNavigationDepth > 0;
if (isShowingModal) {
this._modalNavigationDepth--;
}
// currentOutlet should be the one that corresponds to the topmost frame
const topmostOutlet = this.getOutletByFrame(this.frameService.getFrame());
const outlet = this.findOutletByModal(this._modalNavigationDepth, isShowingModal) || topmostOutlet;
if (outlet) {
this.currentOutlet = outlet;
this.currentOutlet.showingModal = false;
this.callPopState(this.currentOutlet.peekState(), false);
}
}
public _beginPageNavigation(frame: Frame): NavigationOptions {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy._beginPageNavigation()');
}
this.currentOutlet = this.getOutletByFrame(frame) || this.currentOutlet;
const lastState = this.currentOutlet.peekState();
if (lastState) {
lastState.isPageNavigation = true;
}
const navOptions = this._currentNavigationOptions || defaultNavOptions;
if (navOptions.clearHistory) {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy._beginPageNavigation clearing states history');
}
this.currentOutlet.states = [lastState];
}
this._currentNavigationOptions = undefined;
return navOptions;
}
public _setNavigationOptions(options: NavigationOptions) {
this._currentNavigationOptions = {
clearHistory: isPresent(options.clearHistory) ? options.clearHistory : false,
animated: isPresent(options.animated) ? options.animated : true,
transition: options.transition,
};
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy._setNavigationOptions(' + `${JSON.stringify(this._currentNavigationOptions)})`);
}
}
public _getOutlets(): Array<Outlet> {
return this.outlets;
}
updateOutletFrame(outlet: Outlet, frame: Frame, isEmptyOutletFrame: boolean) {
const lastState = outlet.peekState();
if (lastState && !lastState.frame && !isEmptyOutletFrame) {
lastState.frame = frame;
}
if (!outlet.containsFrame(frame)) {
outlet.frames.push(frame);
}
this.currentOutlet = outlet;
}
clearOutlet(frame: Frame) {
this.outlets = this.outlets.filter((currentOutlet) => {
let isEqualToCurrent;
if (this.currentOutlet) {
isEqualToCurrent = currentOutlet.pathByOutlets === this.currentOutlet.pathByOutlets;
}
// Remove outlet from the url tree.
if (currentOutlet.containsFrame(frame) && !isEqualToCurrent) {
this.callPopState(null, true, currentOutlet);
}
// Skip frames filtering since currentOutlet is <router-outlet> when no frames available.
if (currentOutlet.frames.length && !currentOutlet.isNSEmptyOutlet) {
currentOutlet.frames = currentOutlet.frames.filter((currentFrame) => currentFrame !== frame);
return currentOutlet.frames.length;
}
return !currentOutlet.containsFrame(frame);
});
}
getSegmentGroupFullPath(segmentGroup: UrlSegmentGroup): string {
let fullPath = '';
while (segmentGroup) {
const url = segmentGroup.toString();
if (fullPath) {
fullPath = (url ? url + '/' : '') + fullPath;
} else {
fullPath = url;
}
segmentGroup = segmentGroup.parent;
}
return fullPath;
}
getRouteFullPath(currentRoute: any): string {
const outletName = currentRoute.outlet;
let fullPath;
currentRoute = currentRoute.parent;
while (currentRoute) {
const urls = currentRoute.url.value || currentRoute.url;
let url = urls;
if (Array.isArray(urls)) {
url = url.join('/');
}
fullPath = fullPath ? (url ? url + '/' : url) + fullPath : url;
currentRoute = currentRoute.parent;
}
return fullPath ? fullPath + '-' + outletName : outletName;
}
getPathByOutlets(urlSegmentGroup: any): string {
if (!urlSegmentGroup) {
return '';
}
let pathToOutlet;
let lastPath = urlSegmentGroup.outlet || 'primary';
let parent = urlSegmentGroup.parent;
while (parent && urlSegmentGroup.root !== parent) {
if (parent && parent.outlet !== lastPath) {
if (lastPath === 'primary') {
lastPath = parent.outlet;
} else {
lastPath = parent.outlet;
pathToOutlet = lastPath + '-' + (pathToOutlet || urlSegmentGroup.outlet);
}
}
parent = parent.parent;
}
return pathToOutlet || lastPath;
}
findOutlet(outletKey: string, activatedRouteSnapshot?: ActivatedRouteSnapshot): Outlet {
let outlet: Outlet = this.outlets.find((currentOutlet) => {
let equalModalDepth = currentOutlet.modalNavigationDepth === this._modalNavigationDepth;
return equalModalDepth && currentOutlet.outletKeys.indexOf(outletKey) > -1;
});
// No Outlet with the given outletKey could happen when using nested unnamed p-r-o
// primary -> primary -> prymary
if (!outlet && activatedRouteSnapshot) {
const pathByOutlets = this.getPathByOutlets(activatedRouteSnapshot);
outlet = this.outlets.find((currentOutlet) => {
let equalModalDepth = currentOutlet.modalNavigationDepth === this._modalNavigationDepth;
return equalModalDepth && currentOutlet.pathByOutlets === pathByOutlets;
});
}
return outlet;
}
private findOutletByModal(modalNavigation: number, isShowingModal?: boolean): Outlet {
return this.outlets.find((outlet) => {
let equalModalDepth = outlet.modalNavigationDepth === modalNavigation;
return isShowingModal ? equalModalDepth && outlet.showingModal : equalModalDepth;
});
}
private getOutletByFrame(frame: Frame): Outlet {
let outlet;
for (let index = 0; index < this.outlets.length; index++) {
const currentOutlet = this.outlets[index];
if (currentOutlet.containsFrame(frame)) {
outlet = currentOutlet;
break;
}
}
return outlet;
}
private updateStates(outlet: Outlet, currentSegmentGroup: UrlSegmentGroup, queryParams: Params): boolean {
const isNewPage = outlet.states.length === 0;
const lastState = outlet.states[outlet.states.length - 1];
const equalStateUrls = outlet.containsTopState(currentSegmentGroup.toString());
const locationState: LocationState = {
segmentGroup: currentSegmentGroup,
isRootSegmentGroup: false,
isPageNavigation: isNewPage,
queryParams: { ...queryParams },
};
if (!lastState || !equalStateUrls) {
outlet.states.push(locationState);
// Update last state segmentGroup of parent Outlet.
if (this._modalNavigationDepth === 0 && !outlet.showingModal) {
this.updateParentsStates(outlet, currentSegmentGroup.parent);
}
return true;
}
return false;
}
private updateParentsStates(outlet: Outlet, newSegmentGroup: UrlSegmentGroup) {
let parentOutlet = outlet.parent;
// Update parents lastState segmentGroups
while (parentOutlet && newSegmentGroup) {
const state = parentOutlet.peekState();
if (state) {
state.segmentGroup = newSegmentGroup;
newSegmentGroup = newSegmentGroup.parent;
parentOutlet = parentOutlet.parent;
}
}
}
// tslint:disable-next-line:max-line-length
private createOutlet(outletKey: string, path: string, segmentGroup: any, parent: Outlet, modalNavigation?: number, queryParams: Params = {}): Outlet {
const pathByOutlets = this.getPathByOutlets(segmentGroup);
const newOutlet = new Outlet(outletKey, path, pathByOutlets, modalNavigation);
const locationState: LocationState = {
segmentGroup: segmentGroup,
isRootSegmentGroup: false,
isPageNavigation: true, // It is a new OutletNode.
queryParams: { ...queryParams },
};
newOutlet.states = [locationState];
newOutlet.parent = parent;
this.outlets.push(newOutlet);
// Update last state segmentGroup of parent Outlet.
if (this._modalNavigationDepth === 0 && !newOutlet.showingModal) {
this.updateParentsStates(newOutlet, segmentGroup.parent);
}
return newOutlet;
}
private getSegmentGroupByOutlet(outlet: Outlet): UrlSegmentGroup {
const pathList = outlet.pathByOutlets.split('-');
let segmentGroup = this.currentUrlTree.root;
let pathToOutlet;
for (let index = 0; index < pathList.length; index++) {
const currentPath = pathList[index];
const childrenCount = Object.keys(segmentGroup.children).length;
if (childrenCount && segmentGroup.children[currentPath]) {
const url = segmentGroup.toString();
pathToOutlet = pathToOutlet ? pathToOutlet + '/' + url : url;
segmentGroup = segmentGroup.children[currentPath];
} else {
// If no child outlet found with the given name - forget about all previously found outlets.
// example: seaching for 'primary-second-primary' shouldn't return 'primary-second'
// if no 'primary' child available on 'second'.
segmentGroup = null;
break;
}
}
// Paths should also match since there could be another Outlet
// with the same pathByOutlets but different url path.
if (segmentGroup && outlet.path && pathToOutlet && outlet.path !== pathToOutlet) {
segmentGroup = null;
}
return segmentGroup;
}
// Traversal and replacement of segmentGroup.
private updateSegmentGroup(rootNode: any, oldSegmentGroup: UrlSegmentGroup, newSegmentGroup: UrlSegmentGroup) {
const queue = [];
let currentTree = rootNode;
while (currentTree) {
Object.keys(currentTree.children).forEach((outletName) => {
if (currentTree.children[outletName] === oldSegmentGroup) {
if (newSegmentGroup) {
currentTree.children[outletName] = newSegmentGroup;
} else {
delete currentTree.children[outletName];
}
}
queue.push(currentTree.children[outletName]);
});
currentTree = queue.shift();
}
}
private upsertModalOutlet(parentOutlet: Outlet, segmentedGroup: UrlSegmentGroup, queryParams: Params) {
let currentModalOutlet = this.findOutletByModal(this._modalNavigationDepth);
// We want to treat every p-r-o as a standalone Outlet.
if (!currentModalOutlet) {
if (this._modalNavigationDepth > 1) {
// The parent of the current Outlet should be the previous opened modal (if any).
parentOutlet = this.findOutletByModal(this._modalNavigationDepth - 1);
}
// No currentModalOutlet available when opening 'primary' p-r-o.
const outletName = 'primary';
const outletPath = parentOutlet.peekState().segmentGroup.toString();
const outletKey = this.getOutletKey(outletPath, outletName);
// tslint:disable-next-line:max-line-length
currentModalOutlet = this.createOutlet(outletKey, outletPath, segmentedGroup, parentOutlet, this._modalNavigationDepth, queryParams);
this.currentOutlet = currentModalOutlet;
} else if (this.updateStates(currentModalOutlet, segmentedGroup, queryParams)) {
this.currentOutlet = currentModalOutlet; // If states updated
}
}
private getOutletKey(path: string, outletName: string): string {
return path ? path + '-' + outletName : outletName;
}
ngOnDestroy() {
if (NativeScriptDebug.isLogEnabled()) {
NativeScriptDebug.routerLog('NSLocationStrategy.ngOnDestroy()');
}
this.outlets = [];
this.currentOutlet = null;
}
} | the_stack |
import { AccessLevelList } from "../shared/access-level";
import { PolicyStatement } from "../shared";
/**
* Statement provider for service [codebuild](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awscodebuild.html).
*
* @param sid [SID](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_sid.html) of the statement
*/
export class Codebuild extends PolicyStatement {
public servicePrefix = 'codebuild';
/**
* Statement provider for service [codebuild](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awscodebuild.html).
*
* @param sid [SID](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_sid.html) of the statement
*/
constructor (sid?: string) {
super(sid);
}
/**
* Deletes one or more builds.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_BatchDeleteBuilds.html
*/
public toBatchDeleteBuilds() {
return this.to('BatchDeleteBuilds');
}
/**
* Gets information about one or more build batches.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_BatchGetBuildBatches.html
*/
public toBatchGetBuildBatches() {
return this.to('BatchGetBuildBatches');
}
/**
* Gets information about one or more builds.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_BatchGetBuilds.html
*/
public toBatchGetBuilds() {
return this.to('BatchGetBuilds');
}
/**
* Gets information about one or more build projects.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_BatchGetProjects.html
*/
public toBatchGetProjects() {
return this.to('BatchGetProjects');
}
/**
* Returns an array of ReportGroup objects that are specified by the input reportGroupArns parameter.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_BatchGetReportGroups.html
*/
public toBatchGetReportGroups() {
return this.to('BatchGetReportGroups');
}
/**
* Returns an array of the Report objects specified by the input reportArns parameter.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_BatchGetReports.html
*/
public toBatchGetReports() {
return this.to('BatchGetReports');
}
/**
* Adds or updates information about a report.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-identity-based-access-control.html#console-policies
*/
public toBatchPutCodeCoverages() {
return this.to('BatchPutCodeCoverages');
}
/**
* Adds or updates information about a report.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-identity-based-access-control.html#console-policies
*/
public toBatchPutTestCases() {
return this.to('BatchPutTestCases');
}
/**
* Creates a build project.
*
* Access Level: Write
*
* Possible conditions:
* - .ifAwsRequestTag()
* - .ifAwsTagKeys()
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_CreateProject.html
*/
public toCreateProject() {
return this.to('CreateProject');
}
/**
* Creates a report. A report is created when tests specified in the buildspec file for a report groups run during the build of a project.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-identity-based-access-control.html#console-policies
*/
public toCreateReport() {
return this.to('CreateReport');
}
/**
* Creates a report group.
*
* Access Level: Write
*
* Possible conditions:
* - .ifAwsRequestTag()
* - .ifAwsTagKeys()
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_CreateReportGroup.html
*/
public toCreateReportGroup() {
return this.to('CreateReportGroup');
}
/**
* For an existing AWS CodeBuild build project that has its source code stored in a GitHub or Bitbucket repository, enables AWS CodeBuild to start rebuilding the source code every time a code change is pushed to the repository.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_CreateWebhook.html
*/
public toCreateWebhook() {
return this.to('CreateWebhook');
}
/**
* Deletes a build batch.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DeleteBuildBatch.html
*/
public toDeleteBuildBatch() {
return this.to('DeleteBuildBatch');
}
/**
* Deletes an OAuth token from a connected third-party OAuth provider. Only used in the AWS CodeBuild console.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-identity-based-access-control.html#console-policies
*/
public toDeleteOAuthToken() {
return this.to('DeleteOAuthToken');
}
/**
* Deletes a build project.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DeleteProject.html
*/
public toDeleteProject() {
return this.to('DeleteProject');
}
/**
* Deletes a report.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DeleteReport.html
*/
public toDeleteReport() {
return this.to('DeleteReport');
}
/**
* Deletes a report group.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DeleteReportGroup.html
*/
public toDeleteReportGroup() {
return this.to('DeleteReportGroup');
}
/**
* Deletes a resource policy for the associated project or report group.
*
* Access Level: Permissions management
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DeleteResourcePolicy.html
*/
public toDeleteResourcePolicy() {
return this.to('DeleteResourcePolicy');
}
/**
* Deletes a set of GitHub, GitHub Enterprise, or Bitbucket source credentials.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DeleteSourceCredentials.html
*/
public toDeleteSourceCredentials() {
return this.to('DeleteSourceCredentials');
}
/**
* For an existing AWS CodeBuild build project that has its source code stored in a GitHub or Bitbucket repository, stops AWS CodeBuild from rebuilding the source code every time a code change is pushed to the repository.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DeleteWebhook.html
*/
public toDeleteWebhook() {
return this.to('DeleteWebhook');
}
/**
* Returns an array of CodeCoverage objects.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DescribeCodeCoverages.html
*/
public toDescribeCodeCoverages() {
return this.to('DescribeCodeCoverages');
}
/**
* Returns an array of TestCase objects.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DescribeTestCases.html
*/
public toDescribeTestCases() {
return this.to('DescribeTestCases');
}
/**
* Analyzes and accumulates test report values for the test reports in the specified report group.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_GetReportGroupTrend.html
*/
public toGetReportGroupTrend() {
return this.to('GetReportGroupTrend');
}
/**
* Returns a resource policy for the specified project or report group.
*
* Access Level: Read
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_GetResourcePolicy.html
*/
public toGetResourcePolicy() {
return this.to('GetResourcePolicy');
}
/**
* Imports the source repository credentials for an AWS CodeBuild project that has its source code stored in a GitHub, GitHub Enterprise, or Bitbucket repository.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ImportSourceCredentials.html
*/
public toImportSourceCredentials() {
return this.to('ImportSourceCredentials');
}
/**
* Resets the cache for a project.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_InvalidateProjectCache.html
*/
public toInvalidateProjectCache() {
return this.to('InvalidateProjectCache');
}
/**
* Gets a list of build batch IDs, with each build batch ID representing a single build batch.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListBuildBatches.html
*/
public toListBuildBatches() {
return this.to('ListBuildBatches');
}
/**
* Gets a list of build batch IDs for the specified build project, with each build batch ID representing a single build batch.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListBuildBatchesForProject.html
*/
public toListBuildBatchesForProject() {
return this.to('ListBuildBatchesForProject');
}
/**
* Gets a list of build IDs, with each build ID representing a single build.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListBuilds.html
*/
public toListBuilds() {
return this.to('ListBuilds');
}
/**
* Gets a list of build IDs for the specified build project, with each build ID representing a single build.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListBuildsForProject.html
*/
public toListBuildsForProject() {
return this.to('ListBuildsForProject');
}
/**
* Lists connected third-party OAuth providers. Only used in the AWS CodeBuild console.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-identity-based-access-control.html#console-policies
*/
public toListConnectedOAuthAccounts() {
return this.to('ListConnectedOAuthAccounts');
}
/**
* Gets information about Docker images that are managed by AWS CodeBuild.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListCuratedEnvironmentImages.html
*/
public toListCuratedEnvironmentImages() {
return this.to('ListCuratedEnvironmentImages');
}
/**
* Gets a list of build project names, with each build project name representing a single build project.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListProjects.html
*/
public toListProjects() {
return this.to('ListProjects');
}
/**
* Returns a list of report group ARNs. Each report group ARN represents one report group.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListReportGroups.html
*/
public toListReportGroups() {
return this.to('ListReportGroups');
}
/**
* Returns a list of report ARNs. Each report ARN representing one report.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListReports.html
*/
public toListReports() {
return this.to('ListReports');
}
/**
* Returns a list of report ARNs that belong to the specified report group. Each report ARN represents one report.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListReportsForReportGroup.html
*/
public toListReportsForReportGroup() {
return this.to('ListReportsForReportGroup');
}
/**
* Lists source code repositories from a connected third-party OAuth provider. Only used in the AWS CodeBuild console.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-identity-based-access-control.html#console-policies
*/
public toListRepositories() {
return this.to('ListRepositories');
}
/**
* Returns a list of project ARNs that have been shared with the requester. Each project ARN represents one project.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListSharedProjects.html
*/
public toListSharedProjects() {
return this.to('ListSharedProjects');
}
/**
* Returns a list of report group ARNs that have been shared with the requester. Each report group ARN represents one report group.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListSharedReportGroups.html
*/
public toListSharedReportGroups() {
return this.to('ListSharedReportGroups');
}
/**
* Returns a list of SourceCredentialsInfo objects.
*
* Access Level: List
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListSourceCredentials.html
*/
public toListSourceCredentials() {
return this.to('ListSourceCredentials');
}
/**
* Saves an OAuth token from a connected third-party OAuth provider. Only used in the AWS CodeBuild console.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-identity-based-access-control.html#console-policies
*/
public toPersistOAuthToken() {
return this.to('PersistOAuthToken');
}
/**
* Creates a resource policy for the associated project or report group.
*
* Access Level: Permissions management
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_PutResourcePolicy.html
*/
public toPutResourcePolicy() {
return this.to('PutResourcePolicy');
}
/**
* Retries a build.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_RetryBuild.html
*/
public toRetryBuild() {
return this.to('RetryBuild');
}
/**
* Retries a build batch.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_RetryBuildBatch.html
*/
public toRetryBuildBatch() {
return this.to('RetryBuildBatch');
}
/**
* Starts running a build.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_StartBuild.html
*/
public toStartBuild() {
return this.to('StartBuild');
}
/**
* Starts running a build batch.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_StartBuildBatch.html
*/
public toStartBuildBatch() {
return this.to('StartBuildBatch');
}
/**
* Attempts to stop running a build.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_StopBuild.html
*/
public toStopBuild() {
return this.to('StopBuild');
}
/**
* Attempts to stop running a build batch.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_StopBuildBatch.html
*/
public toStopBuildBatch() {
return this.to('StopBuildBatch');
}
/**
* Changes the settings of an existing build project.
*
* Access Level: Write
*
* Possible conditions:
* - .ifAwsRequestTag()
* - .ifAwsTagKeys()
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_UpdateProject.html
*/
public toUpdateProject() {
return this.to('UpdateProject');
}
/**
* Changes the public visibility of a project and its builds.
*
* Access Level: Write
*
* Possible conditions:
* - .ifAwsRequestTag()
* - .ifAwsTagKeys()
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_UpdateProjectVisibility.html
*/
public toUpdateProjectVisibility() {
return this.to('UpdateProjectVisibility');
}
/**
* Updates information about a report.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-identity-based-access-control.html#console-policies
*/
public toUpdateReport() {
return this.to('UpdateReport');
}
/**
* Changes the settings of an existing report group.
*
* Access Level: Write
*
* Possible conditions:
* - .ifAwsRequestTag()
* - .ifAwsTagKeys()
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_UpdateReportGroup.html
*/
public toUpdateReportGroup() {
return this.to('UpdateReportGroup');
}
/**
* Updates the webhook associated with an AWS CodeBuild build project.
*
* Access Level: Write
*
* https://docs.aws.amazon.com/codebuild/latest/APIReference/API_UpdateWebhook.html
*/
public toUpdateWebhook() {
return this.to('UpdateWebhook');
}
protected accessLevelList: AccessLevelList = {
"Write": [
"BatchDeleteBuilds",
"BatchPutCodeCoverages",
"BatchPutTestCases",
"CreateProject",
"CreateReport",
"CreateReportGroup",
"CreateWebhook",
"DeleteBuildBatch",
"DeleteOAuthToken",
"DeleteProject",
"DeleteReport",
"DeleteReportGroup",
"DeleteSourceCredentials",
"DeleteWebhook",
"ImportSourceCredentials",
"InvalidateProjectCache",
"PersistOAuthToken",
"RetryBuild",
"RetryBuildBatch",
"StartBuild",
"StartBuildBatch",
"StopBuild",
"StopBuildBatch",
"UpdateProject",
"UpdateProjectVisibility",
"UpdateReport",
"UpdateReportGroup",
"UpdateWebhook"
],
"Read": [
"BatchGetBuildBatches",
"BatchGetBuilds",
"BatchGetProjects",
"BatchGetReportGroups",
"BatchGetReports",
"DescribeCodeCoverages",
"DescribeTestCases",
"GetReportGroupTrend",
"GetResourcePolicy"
],
"Permissions management": [
"DeleteResourcePolicy",
"PutResourcePolicy"
],
"List": [
"ListBuildBatches",
"ListBuildBatchesForProject",
"ListBuilds",
"ListBuildsForProject",
"ListConnectedOAuthAccounts",
"ListCuratedEnvironmentImages",
"ListProjects",
"ListReportGroups",
"ListReports",
"ListReportsForReportGroup",
"ListRepositories",
"ListSharedProjects",
"ListSharedReportGroups",
"ListSourceCredentials"
]
};
/**
* Adds a resource of type build to the statement
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-access-control-identity-based.html#arn-formats
*
* @param buildId - Identifier for the buildId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onBuild(buildId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:codebuild:${Region}:${Account}:build/${BuildId}';
arn = arn.replace('${BuildId}', buildId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type build-batch to the statement
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-access-control-identity-based.html#arn-formats
*
* @param buildBatchId - Identifier for the buildBatchId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onBuildBatch(buildBatchId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:codebuild:${Region}:${Account}:build-batch/${BuildBatchId}';
arn = arn.replace('${BuildBatchId}', buildBatchId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type project to the statement
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-access-control-identity-based.html#arn-formats
*
* @param projectName - Identifier for the projectName.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*
* Possible conditions:
* - .ifAwsResourceTag()
*/
public onProject(projectName: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:codebuild:${Region}:${Account}:project/${ProjectName}';
arn = arn.replace('${ProjectName}', projectName);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type report-group to the statement
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-access-control-identity-based.html#arn-formats
*
* @param reportGroupName - Identifier for the reportGroupName.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*
* Possible conditions:
* - .ifAwsResourceTag()
*/
public onReportGroup(reportGroupName: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:codebuild:${Region}:${Account}:report-group/${ReportGroupName}';
arn = arn.replace('${ReportGroupName}', reportGroupName);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
/**
* Adds a resource of type report to the statement
*
* https://docs.aws.amazon.com/codebuild/latest/userguide/auth-and-access-control-iam-access-control-identity-based.html#arn-formats
*
* @param reportGroupName - Identifier for the reportGroupName.
* @param reportId - Identifier for the reportId.
* @param account - Account of the resource; defaults to empty string: all accounts.
* @param region - Region of the resource; defaults to empty string: all regions.
* @param partition - Partition of the AWS account [aws, aws-cn, aws-us-gov]; defaults to `aws`.
*/
public onReport(reportGroupName: string, reportId: string, account?: string, region?: string, partition?: string) {
var arn = 'arn:${Partition}:codebuild:${Region}:${Account}:report/${ReportGroupName}:${ReportId}';
arn = arn.replace('${ReportGroupName}', reportGroupName);
arn = arn.replace('${ReportId}', reportId);
arn = arn.replace('${Account}', account || '*');
arn = arn.replace('${Region}', region || '*');
arn = arn.replace('${Partition}', partition || 'aws');
return this.on(arn);
}
} | the_stack |
import { EventEmitter } from 'events';
import { InjectParamsOption } from '../decorators/factory/decorator-factory';
import * as symbols from '../symbol';
const BIND = Symbol('Container#bind');
/**
* The Container
*/
export class Container extends EventEmitter {
/**
* Container binding identifier
*/
binds = new Map();
/**
* instances Map in the container
*/
instances = new Map();
/**
* abstract groups map
*/
tags: any = {};
/**
* static instance
*/
static instance: any;
/**
* Bind a singleton to the container
*/
singleton(abstract: any, concrete: any = null, callable = false) {
this[BIND](abstract, concrete, true, callable);
return this;
}
/**
* Bind a multiton to the container
*/
multiton(abstract: any, concrete: any = null, callable = false) {
this[BIND](abstract, concrete, false, callable);
}
/**
* Determines if the instance is Shared
*/
isShared(abstract: any) {
return this.instances.has(abstract) || (
this.binds.get(abstract)
&& Reflect.has(this.binds.get(abstract), 'shared')
&& this.binds.get(abstract).shared === true
);
}
/**
* Identifies whether the container has been bound
*/
bound(abstract: any) {
return this.binds.has(abstract) || this.instances.has(abstract);
}
/**
* Identifies whether the container has been instance
*/
exists(abstract: any) {
return this.instances.has(abstract);
}
/**
* Bind an object to the container
*/
[BIND](abstract: any, concrete: any, shared = false, callable = false) {
if (!abstract || !concrete) return;
let isShared = shared;
if (concrete && Reflect.getMetadata(symbols.MULTITON, concrete) === true) {
isShared = false;
} else if (concrete && Reflect.getMetadata(symbols.SINGLETON, concrete) === true) {
isShared = true;
}
if (typeof concrete === 'function') {
this.binds.set(abstract, {
concrete,
shared: isShared,
callable,
});
} else {
this.instances.set(abstract, {
concrete,
shared: true,
callable: false,
});
}
this.emit('binding', this.instances.get(abstract), this);
return this;
}
/**
* Create an instance of an object
*/
make(abstract: any, args: any[] = [], force = false): any {
const shared = this.isShared(abstract);
let obj;
// returns directly if an object instance already exists in the container
// instance shared
if (this.instances.has(abstract) && shared && !force) {
return this.instances.get(abstract).concrete;
}
// if a binding object exists, the binding object is instantiated
if (this.binds.has(abstract)) {
const { concrete, callable } = this.binds.get(abstract);
// 普通函数
if (callable) {
obj = this.invokeFunction(abstract, args);
}
// 可注入的class
else if (Reflect.getMetadata(symbols.INJECTABLE, concrete) === true) {
obj = this.invokeInjectAbleClass(abstract, args);
}
// 构造函数(class 和 function)
else {
obj = this.invokeConstructor(abstract, args);
}
this.emit('resolving', obj, this);
}
// 如果是单例,保存实例到容器
if (shared && obj !== undefined && !force) {
this.instances.set(abstract, {
concrete: obj,
shared,
});
}
return obj;
}
/**
* 调用普通函数
*/
private invokeFunction(abstract: any, args: any[]) {
const { concrete } = this.binds.get(abstract);
return concrete(...args, this);
}
/**
* 调用构造函数
*/
private invokeConstructor(abstract: any, args: any[]) {
const { concrete: Concrete } = this.binds.get(abstract);
return new Concrete(...args, this);
}
/**
* 调用可注入的类
*/
private invokeInjectAbleClass(abstract: any, args: any[]) {
const { concrete: Concrete } = this.binds.get(abstract);
const that = this;
const ConcreteProxy = new Proxy(Concrete, {
construct(target: any, targetArgArray: any[] = [], newTarget?: any) {
const params = that.bindConstructorParams(Concrete, args, targetArgArray);
const instance = Reflect.construct(target, [...params], newTarget);
instance.__context__ = args;
return new Proxy(instance, {
get(instanceTarget: any, propertyKey: string | number | symbol, receiver: any) {
if (propertyKey === 'constructor') return Reflect.get(instanceTarget, propertyKey, receiver);
if (typeof instanceTarget[propertyKey] === 'function') { // Method
return new Proxy(instanceTarget[propertyKey], {
apply(methodTarget: any, thisArg: any, argArray?: any) {
const methodParams = that.bindMethodParams(Concrete, propertyKey.toString(), args, argArray);
return Reflect.apply(methodTarget, thisArg, [...methodParams]);
}
});
}
const propertyParam = that.bindProperty(Concrete, args, propertyKey.toString());
return propertyParam ?? Reflect.get(instanceTarget, propertyKey, receiver);
}
});
}
});
return Reflect.construct(ConcreteProxy, [...args, this]);
}
/**
* 内建类型
* @param type
*/
private isBuildInType(type: any) {
return type === Number ||
type === String ||
type === Object ||
type === Boolean ||
type === Array ||
type === Function;
}
/**
* 绑定构造函数参数
* @param Concrete
* @param args
* @param vars
*/
private bindConstructorParams(Concrete: any, args: any[] = [], vars: any[] = []) {
const disableInject = Reflect.getMetadata(symbols.DISABLE_INJECT, Concrete);
if (disableInject) return vars;
const injectParams: InjectParamsOption[] = Reflect.getMetadata(symbols.INJECTTYPE_METADATA, Concrete) ?? [];
const typeParams: any[] = Reflect.getMetadata(symbols.PARAMTYPES_METADATA, Concrete) ?? [];
const argsLength = Math.max(Concrete.length, injectParams.length, typeParams.length, vars.length);
return this.bindParams(argsLength, injectParams, typeParams, args, vars);
}
/**
* 绑定类方法参数
* @param Concrete
* @param key
* @param args
* @param vars
*/
private bindMethodParams(Concrete: any, key: string | symbol, args: any[] = [], vars: any[] = []) {
const disableInject = Reflect.getMetadata(symbols.DISABLE_INJECT, Concrete, key);
if (disableInject) return vars;
const injectParams: InjectParamsOption[] = Reflect.getMetadata(symbols.INJECTTYPE_METADATA, Concrete, key) ?? [];
const typeParams: any[] = Reflect.getMetadata(symbols.PARAMTYPES_METADATA, Concrete.prototype, key) ?? [];
const argsLength = Math.max(Concrete.prototype[key].length, injectParams.length, typeParams.length, vars.length);
return this.bindParams(argsLength, injectParams, typeParams, args, vars);
}
/**
* 绑定类属性
* @param Concrete
* @param args
* @param key
*/
private bindProperty(Concrete: any, args: any[] = [], key: string | symbol) {
const injects: InjectParamsOption[] = Reflect.getMetadata(
symbols.INJECTTYPE_METADATA, Concrete, key
) ?? [];
const typeParam: any = Reflect.getMetadata(symbols.PROPERTYTYPE_METADATA, Concrete, key);
if (injects[0]) {
const { abstract, params, handler } = injects[0];
const injectedParam = this.make(abstract, [...params ?? [], ...args]);
if (typeof handler === 'function') {
return handler(injectedParam);
}
return injectedParam;
}
return this.make(
typeParam, [...args]
);
}
/**
* 参数绑定
* @param argsLength
* @param injectParams
* @param typeParams
* @param args
* @param vars
*/
private bindParams(argsLength: number, injectParams: InjectParamsOption[], typeParams: any[], args: any[], vars: any[]) {
const params: any[] = [];
// 未确认位置的手动注入的参数数组
const unPositionInjectParams = [];
// 已确认位置的手动注入的参数数组
const positionInjectParams = [];
for (const item of injectParams) {
if (item.index !== undefined) {
positionInjectParams.push(item);
} else {
unPositionInjectParams.push(item);
}
}
for (let index = 0; index < argsLength; index++) {
// 找到手动注入的匹配参数
const injectParam = positionInjectParams.find(item => item.index === index);
// 当前位置的类型参数
const typeParam = typeParams[index];
// 存在当前位置的手动注入,优先使用
if (injectParam) {
const { abstract, params: _params, handler } = injectParam;
const injected = this.make(abstract, [...(_params ?? []), ...args]);
params.push(
typeof handler === 'function' ? handler(injected) : injected
);
}
// 存在当前位置的类型注入,并且非内置类型
else if (typeParam && !this.isBuildInType(typeParam)) {
const injected = this.make(typeParams[index], [...args]);
params.push(injected);
}
// 还有剩余的未确认位置的手动注入参数
else if (unPositionInjectParams.length > 0) {
const { abstract, params: _params, handler } = unPositionInjectParams.pop() as InjectParamsOption;
const injected = this.make(abstract, [...(_params ?? []), ...args]);
params.push(
typeof handler === 'function' ? handler(injected) : injected
);
}
// 还有剩余传入的实参
else if (vars.length > 0) {
params.push(
vars.shift()
);
}
}
// 将多余的实参附加上去
params.push(...vars);
return params;
}
/**
* set abstract in groups
*/
static tag(abstract: any, tag: string) {
if (!abstract || !tag) return;
if (!this.getInstance().tags[tag]) this.getInstance().tags[tag] = [];
this.getInstance().tags[tag].push(abstract);
}
/**
* gets the object instance in the container
*/
static get(abstract: any, args: any[] = []) {
return this.getInstance().make(abstract, args);
}
/**
* bind an abstract in container
*/
static bind(abstract: any, concrete: any = null, shared = true, callable = false) {
return this.getInstance()[BIND](abstract, concrete, shared, callable);
}
/**
* Determines whether there is a corresponding binding within the container instance
*/
static has(abstract: any) {
return this.getInstance().binds.has(abstract) || this.getInstance().instances.has(abstract);
}
/**
* Get the container instance
*/
static getInstance() {
if (!this.instance) {
this.instance = new Container();
}
return this.instance;
}
/**
* Set the container instance
*/
static setInstance(instance: any) {
this.instance = instance;
}
} | the_stack |
import { IModelApp } from "@itwin/core-frontend";
import { Surface } from "./Surface";
class DragState {
private _newX = 0;
private _newY = 0;
private _prevX = 0;
private _prevY = 0;
public constructor(window: Window, click: HTMLElement) {
const drag = (e: MouseEvent) => {
window.invalidateDock();
const target = window.container;
const surf = window.surface.element;
e.preventDefault();
this._newX = this._prevX - e.clientX;
this._newY = this._prevY - e.clientY;
this._prevX = e.clientX;
this._prevY = e.clientY;
let top = Math.max(0, target.offsetTop - this._newY);
let left = Math.max(0, target.offsetLeft - this._newX);
top = Math.min(surf.clientHeight - target.clientHeight, top);
left = Math.min(surf.clientWidth - target.clientWidth, left);
target.style.top = `${top}px`;
target.style.left = `${left}px`;
};
const stopDrag = () => {
document.removeEventListener("mousemove", drag);
document.removeEventListener("mouseup", stopDrag);
};
const startDrag = (e: MouseEvent) => {
e.preventDefault();
this._prevX = e.clientX;
this._prevY = e.clientY;
document.addEventListener("mousemove", drag);
document.addEventListener("mouseup", stopDrag);
window.focus();
};
click.addEventListener("mousedown", startDrag);
}
}
class ResizeState {
public readonly minSize: number;
private _prevWidth = 0;
private _prevHeight = 0;
private _prevY = 0;
private _prevMouseX = 0;
private _prevMouseY = 0;
public constructor(window: Window, click: HTMLElement, minSize = 50) {
const target = window.container;
this.minSize = minSize;
const resize = (e: MouseEvent) => {
window.invalidateDock();
const width = this._prevWidth + (e.pageX - this._prevMouseX);
const height = this._prevHeight - (e.pageY - this._prevMouseY);
const windowTop = this._prevY + (e.pageY - this._prevMouseY);
const windowLeft = parseInt(window.container.style.left, 10);
const windowRight = windowLeft + width;
const prevBottom = this._prevY + this._prevHeight;
const surfaceWidth = window.surface.element.clientWidth;
const surfaceRight = window.surface.element.clientLeft + surfaceWidth;
const surfaceTop = window.surface.element.clientTop;
if (width > this.minSize)
target.style.width = `${(windowRight <= surfaceRight) ? width : surfaceWidth - windowLeft}px`;
if (height > minSize) {
target.style.height = `${(windowTop >= surfaceTop) ? height : prevBottom}px`;
target.style.top = `${(windowTop >= surfaceTop) ? windowTop : surfaceTop}px`;
}
};
const stopResize = () => {
document.removeEventListener("mousemove", resize);
document.removeEventListener("mouseup", stopResize);
};
click.addEventListener("mousedown", (e) => {
e.preventDefault();
e.stopPropagation();
const style = getComputedStyle(target, null);
const pxToNum = (propName: string) => parseFloat(style.getPropertyValue(propName).replace("px", ""));
this._prevWidth = pxToNum("width");
this._prevHeight = pxToNum("height");
this._prevY = pxToNum("top");
this._prevMouseX = e.pageX;
this._prevMouseY = e.pageY;
document.addEventListener("mousemove", resize);
document.addEventListener("mouseup", stopResize);
window.focus();
});
}
}
export enum Dock {
Full = 0,
Top = 1 << 0,
Left = 1 << 1,
Right = 1 << 2,
Bottom = 1 << 3,
TopLeft = Top | Left,
TopRight = Top | Right,
BottomLeft = Bottom | Left,
BottomRight = Bottom | Right,
}
interface DockState {
dock: Dock;
width: number;
height: number;
top: string;
left: string;
}
class WindowHeader {
public readonly window: Window;
public readonly element: HTMLElement;
private readonly _titleElement: HTMLElement;
private readonly _closeElement: HTMLElement;
private readonly _resizerElement: HTMLElement;
private _dockState?: DockState;
private readonly _resizeState: ResizeState;
public constructor(window: Window, parent: HTMLElement, title?: string) {
this.window = window;
this.element = IModelApp.makeHTMLElement("div", { className: "floating-window-header", parent });
this._titleElement = IModelApp.makeHTMLElement("span", { parent: this.element });
this.setTitle(title);
this._closeElement = IModelApp.makeHTMLElement("div", { className: "floating-window-header-close", parent: this.element });
this._closeElement.onclick = () => this.window.surface.close(this.window);
this.hideCloseWidget(!this.window.isCloseable);
this._resizerElement = IModelApp.makeHTMLElement("div", { className: "floating-window-header-resize", parent: this.element });
this.hideResizerWidget(!this.window.isResizable);
// Left-drag => move
new DragState(window, this.element);
// Left-drag corner => resize
this._resizeState = new ResizeState(window, this._resizerElement);
// Double-click => maximize or restore
const maximizeOrRestore = () => {
if (this.isDocked)
this.undock();
else
this.dock(Dock.Full);
};
this.element.addEventListener("dblclick", maximizeOrRestore);
}
public setTitle(title?: string): void {
this._titleElement.innerText = undefined !== title ? title : "";
}
public get title(): string {
const title = this._titleElement.innerText;
return title ? title : "";
}
public dock(dock: Dock): void {
// NB: Don't update saved position+dimensions if currently docked.
const state = this._dockState;
const target = this.window.container;
this._dockState = {
dock,
width: undefined !== state ? state.width : target.clientWidth,
height: undefined !== state ? state.height : target.clientHeight,
top: undefined !== state ? state.top : target.style.top,
left: undefined !== state ? state.left : target.style.left,
};
this.applyDock();
}
public undock(): void {
const s = this._dockState;
if (undefined === s)
return;
const target = this.window.container;
target.style.width = `${s.width}px`;
target.style.height = `${s.height}px`;
target.style.top = s.top;
target.style.left = s.left;
this._dockState = undefined;
this.window.focus();
}
public applyDock(): void {
if (undefined === this._dockState || !this.window.isResizable)
return;
const surf = this.window.surface;
const sw = surf.element.clientWidth;
const sh = surf.element.clientHeight;
const hw = Math.floor(sw / 2);
const hh = Math.floor(sh / 2);
let l = 0;
let w = sw;
let t = 0;
let h = sh;
const dock = this._dockState.dock;
if (Dock.Full !== dock) {
if (dock & Dock.Top)
h = hh;
else if (dock & Dock.Bottom)
t = h = hh;
if (dock & Dock.Left)
w = hw;
else if (dock & Dock.Right)
l = w = hw;
}
const style = this.window.container.style;
style.left = `${l}px`;
style.top = `${t}px`;
style.width = `${w}px`;
style.height = `${h}px`;
this.window.focus();
}
public ensureInSurface(): void {
const surf = this.window.surface;
const surfaceTop = surf.element.clientTop;
const surfaceBottom = surf.element.clientHeight;
const surfaceLeft = surf.element.clientLeft;
const surfaceRight = surfaceLeft + surf.element.clientWidth;
const style = this.window.container.style;
const windowHeight = this.window.container.clientHeight;
const windowWidth = this.window.container.clientWidth;
let windowTop = parseInt(style.top, 10);
let windowLeft = parseInt(style.left, 10);
const windowBottom = windowTop + windowHeight;
const windowRight = windowLeft + windowWidth;
// assure the window above of the surface boarder
if (windowBottom >= surfaceBottom)
windowTop = (surfaceBottom - windowHeight);
if (windowTop < surfaceTop)
windowTop = surfaceTop;
style.top = `${windowTop}px`;
if (windowHeight > surfaceBottom)
style.height = `${surfaceBottom}px`;
// assure the window left of the surface boarder
if (windowRight >= surfaceRight)
windowLeft = (surfaceRight - windowWidth);
if (windowLeft < surfaceLeft)
windowLeft = surfaceLeft;
style.left = `${windowLeft}px`;
if (windowWidth > surfaceRight)
style.width = `${surfaceRight}px`;
}
public addDock(add: Dock): void {
if (undefined === this._dockState) {
this.dock(add);
return;
}
if (Dock.Full === add)
return;
let dock = this._dockState.dock;
dock |= add;
if (add & Dock.Left)
dock &= ~Dock.Right;
if (add & Dock.Right)
dock &= ~Dock.Left;
if (add & Dock.Top)
dock &= ~Dock.Bottom;
if (add & Dock.Bottom)
dock &= ~Dock.Top;
this._dockState.dock = dock;
this.applyDock();
}
public get isDocked() { return undefined !== this._dockState; }
public invalidateDock() { this._dockState = undefined; }
public resizeContent(w: number, h: number): void {
// ###TODO kludge for 2px borders...
w += 4;
h += 4;
w = Math.max(w, this._resizeState.minSize);
h = Math.max(h, this._resizeState.minSize);
this._dockState = undefined;
const pw = this.window.contentDiv.clientWidth;
const ph = this.window.contentDiv.clientHeight;
const dw = w - pw;
const dh = h - ph;
const cont = this.window.container;
cont.style.width = `${cont.clientWidth + dw}px`;
cont.style.height = `${cont.clientHeight + dh}px`;
}
public hideCloseWidget(hide: boolean) {
this._closeElement.style.display = hide ? "none" : "block";
}
public hideResizerWidget(hide: boolean) {
this._resizerElement.style.display = hide ? "none" : "block";
}
public markAsPinned(isPinned: boolean) {
if (isPinned)
this._resizerElement.classList.add("window-pinned");
else
this._resizerElement.classList.remove("window-pinned");
}
}
export interface WindowProps {
title?: string;
top?: number;
left?: number;
width?: number;
height?: number;
scrollbars?: boolean;
}
export abstract class Window {
protected readonly _header: WindowHeader;
public readonly container: HTMLElement;
public readonly contentDiv: HTMLDivElement;
public readonly surface: Surface;
private _isPinned = false;
public abstract get windowId(): string;
public constructor(surface: Surface, props?: WindowProps) {
this.surface = surface;
this.container = IModelApp.makeHTMLElement("div", { className: "floating-window-container" });
this.container.style.top = `${0}px`;
this.container.style.left = `${0}px`;
this.container.style.width = `${surface.element.clientWidth / 3}px`;
this.container.style.height = `${surface.element.clientHeight / 3}px`;
if (undefined !== props) {
if (undefined !== props.top)
this.container.style.top = `${props.top}px`;
if (undefined !== props.left)
this.container.style.left = `${props.left}px`;
if (undefined !== props.width)
this.container.style.width = `${props.width}px`;
if (undefined !== props.height)
this.container.style.height = `${props.height}px`;
}
this._header = new WindowHeader(this, this.container, undefined !== props ? props.title : undefined);
this.contentDiv = IModelApp.makeHTMLElement("div", { className: "floating-window", parent: this.container });
if (props && props.scrollbars)
this.contentDiv.classList.add("overflow-auto");
}
// Do not set directly - use Surface.togglePin(window)
public get isPinned(): boolean { return this._isPinned; }
public set isPinned(value: boolean) {
this._header.markAsPinned(value);
this._isPinned = value;
}
public set title(title: string | undefined) {
this._header.setTitle(title);
}
public focus(): void {
this.surface.focus(this);
}
public get isDocked() { return this._header.isDocked; }
public dock(dock: Dock) { this._header.dock(dock); }
public updateDock() { this._header.applyDock(); }
public undock() { this._header.undock(); }
public ensureInSurface() { this._header.ensureInSurface(); }
public invalidateDock() { this._header.invalidateDock(); }
public addDock(dock: Dock) { this._header.addDock(dock); }
public updateUi(): void { this._header.hideCloseWidget(!this.isCloseable); }
public onFocus(): void {
this.container.classList.add("window-focused");
this._header.element.classList.add("window-header-focused");
}
public onLoseFocus(): void {
this.container.classList.remove("window-focused");
this._header.element.classList.remove("window-header-focused");
}
public onClosing(): void { }
public onClosed(): void { }
public get isCloseable(): boolean { return true; }
public get isResizable(): boolean { return true; }
public resizeContent(w: number, h: number): void {
this._header.resizeContent(w, h);
}
public setHeaderVisible(visible: boolean): void {
this._header.element.style.display = visible ? "block" : "none";
}
}
export interface NamedWindowProps extends WindowProps {
id: string;
}
export class NamedWindow extends Window {
private readonly _windowId: string;
public constructor(surface: Surface, props: NamedWindowProps) {
super(surface, props);
this._windowId = props.id;
if (undefined === props.title)
this._header.setTitle(this.windowId);
}
public get windowId() { return this._windowId; }
} | the_stack |
import {
Agile,
Collection,
CollectionPersistent,
ComputedTracker,
EnhancedState,
Group,
GroupObserver,
Item,
StateObserver,
TrackedChangeMethod,
} from '../../../../src';
import { LogMock } from '../../../helper/logMock';
describe('Group Tests', () => {
interface ItemInterface {
id: string;
name: string;
}
let dummyAgile: Agile;
let dummyCollection: Collection<ItemInterface>;
beforeEach(() => {
LogMock.mockLogs();
dummyAgile = new Agile();
dummyCollection = new Collection<ItemInterface>(dummyAgile, {
key: 'dummyCollection',
});
jest.spyOn(Group.prototype, 'rebuild');
jest.spyOn(Group.prototype, 'addSideEffect');
jest.clearAllMocks();
});
it('should create Group with no initialItems (default config)', () => {
// Overwrite methods once to not call it
jest
.spyOn(Group.prototype, 'rebuild')
.mockReturnValueOnce(undefined as any);
jest
.spyOn(Group.prototype, 'addSideEffect')
.mockReturnValueOnce(undefined as any);
const group = new Group(dummyCollection);
expect(group.collection()).toBe(dummyCollection);
expect(group._output).toStrictEqual([]);
expect(group.nextGroupOutput).toStrictEqual([]);
expect(group.notFoundItemKeys).toStrictEqual([]);
expect(group.loadedInitialValue).toBeTruthy();
// Check if State was called with correct parameters
expect(group._key).toBeUndefined();
expect(group.isSet).toBeFalsy();
expect(group.isPlaceholder).toBeFalsy();
expect(group.initialStateValue).toStrictEqual([]);
expect(group._value).toStrictEqual([]);
expect(group.previousStateValue).toStrictEqual([]);
expect(group.nextStateValue).toStrictEqual([]);
expect(group.observers['value']).toBeInstanceOf(StateObserver);
expect(Array.from(group.observers['value'].dependents)).toStrictEqual([]);
expect(group.observers['value'].key).toBeUndefined();
expect(group.observers['output']).toBeInstanceOf(GroupObserver);
expect(Array.from(group.observers['output'].dependents)).toStrictEqual([]);
expect(group.observers['output'].key).toBeUndefined();
expect(group.sideEffects).toStrictEqual({});
expect(group.computeValueMethod).toBeUndefined();
expect(group.computeExistsMethod).toBeInstanceOf(Function);
expect(group.isPersisted).toBeFalsy();
expect(group.persistent).toBeUndefined();
});
it('should create Group with no initialItems (specific config)', () => {
// Overwrite methods once to not call it
jest
.spyOn(Group.prototype, 'rebuild')
.mockReturnValueOnce(undefined as any);
jest
.spyOn(Group.prototype, 'addSideEffect')
.mockReturnValueOnce(undefined as any);
const group = new Group(dummyCollection, [], {
key: 'dummyKey',
isPlaceholder: true,
});
expect(group.collection()).toBe(dummyCollection);
expect(group._output).toStrictEqual([]);
expect(group.nextGroupOutput).toStrictEqual([]);
expect(group.notFoundItemKeys).toStrictEqual([]);
expect(group.loadedInitialValue).toBeTruthy();
// Check if State was called with correct parameters
expect(group._key).toBe('dummyKey');
expect(group.isSet).toBeFalsy();
expect(group.isPlaceholder).toBeTruthy();
expect(group.initialStateValue).toStrictEqual([]);
expect(group._value).toStrictEqual([]);
expect(group.previousStateValue).toStrictEqual([]);
expect(group.nextStateValue).toStrictEqual([]);
expect(group.observers['value']).toBeInstanceOf(StateObserver);
expect(Array.from(group.observers['value'].dependents)).toStrictEqual([]);
expect(group.observers['value'].key).toBe('dummyKey');
expect(group.observers['output']).toBeInstanceOf(GroupObserver);
expect(Array.from(group.observers['output'].dependents)).toStrictEqual([]);
expect(group.observers['output'].key).toBe('dummyKey');
expect(group.sideEffects).toStrictEqual({});
expect(group.computeValueMethod).toBeUndefined();
expect(group.computeExistsMethod).toBeInstanceOf(Function);
expect(group.isPersisted).toBeFalsy();
expect(group.persistent).toBeUndefined();
});
it('should create Group with initialItems (default config)', () => {
// Overwrite methods once to not call it
jest
.spyOn(Group.prototype, 'rebuild')
.mockReturnValueOnce(undefined as any);
jest
.spyOn(Group.prototype, 'addSideEffect')
.mockReturnValueOnce(undefined as any);
const group = new Group(dummyCollection, ['test1', 'test2', 'test3']);
expect(group.collection()).toBe(dummyCollection);
expect(group._output).toStrictEqual([]);
expect(group.nextGroupOutput).toStrictEqual([]);
expect(group.notFoundItemKeys).toStrictEqual([]);
expect(group.loadedInitialValue).toBeTruthy();
// Check if State was called with correct parameters
expect(group._key).toBeUndefined();
expect(group.isSet).toBeFalsy();
expect(group.isPlaceholder).toBeFalsy();
expect(group.initialStateValue).toStrictEqual(['test1', 'test2', 'test3']);
expect(group._value).toStrictEqual(['test1', 'test2', 'test3']);
expect(group.previousStateValue).toStrictEqual(['test1', 'test2', 'test3']);
expect(group.nextStateValue).toStrictEqual(['test1', 'test2', 'test3']);
expect(group.observers['value']).toBeInstanceOf(StateObserver);
expect(group.observers['value'].key).toBeUndefined();
expect(group.observers['output']).toBeInstanceOf(GroupObserver);
expect(group.observers['output'].key).toBeUndefined();
expect(group.sideEffects).toStrictEqual({});
expect(group.computeValueMethod).toBeUndefined();
expect(group.computeExistsMethod).toBeInstanceOf(Function);
expect(group.isPersisted).toBeFalsy();
expect(group.persistent).toBeUndefined();
});
describe('Group Function Tests', () => {
let group: Group<ItemInterface>;
let dummyItem1: Item<ItemInterface>;
let dummyItem2: Item<ItemInterface>;
let dummyItem3: Item<ItemInterface>;
beforeEach(() => {
group = new Group<ItemInterface>(dummyCollection, [], {
key: 'groupKey',
});
dummyCollection.collect({ id: 'dummyItem1Key', name: 'jeff' });
dummyCollection.collect({ id: 'dummyItem2Key', name: 'frank' });
dummyCollection.collect({ id: 'dummyItem3Key', name: 'hans' });
dummyItem1 = dummyCollection.getItem('dummyItem1Key') as any;
dummyItem2 = dummyCollection.getItem('dummyItem2Key') as any;
dummyItem3 = dummyCollection.getItem('dummyItem3Key') as any;
});
describe('output get function tests', () => {
beforeEach(() => {
jest.spyOn(ComputedTracker, 'tracked');
});
it('should return output of Group and call ComputedTracker.tracked', () => {
group._output = [
{ id: '1', name: 'Frank' },
{ id: '2', name: 'Hans' },
];
const response = group.output;
expect(response).toStrictEqual([
{ id: '1', name: 'Frank' },
{ id: '2', name: 'Hans' },
]);
expect(ComputedTracker.tracked).toHaveBeenCalledWith(
group.observers['output']
);
});
});
describe('output set function tests', () => {
it("shouldn't set output to passed value and print error", () => {
group._output = null as any;
group.output = [
{ id: '12', name: 'Hans der 3' },
{ id: '99', name: 'Frank' },
];
expect(group._output).toStrictEqual(null);
expect(LogMock.hasLoggedCode('1C:03:00', [group._key]));
});
});
describe('has function tests', () => {
beforeEach(() => {
group._value = ['test1', 'test2'];
});
it('should return true if group contains ItemKey', () => {
expect(group.has('test1')).toBeTruthy();
});
it("should return false if group doesn't contain ItemKey", () => {
expect(group.has('notExistingKey')).toBeFalsy();
});
});
describe('size function tests', () => {
it('should return size of Group', () => {
group._value = ['test1', 'test2'];
expect(group.size).toBe(2);
});
});
describe('remove function tests', () => {
beforeEach(() => {
group._value = [
'dummyItem1Key',
'dummyItem2Key',
'missingInCollectionItemKey',
];
group.nextStateValue = group._value;
group._preciseItemKeys = ['dummyItem1Key', 'dummyItem2Key'];
group.set = jest.fn();
});
it('should remove Item from Group (default config)', () => {
group.remove('dummyItem1Key');
expect(group.set).toHaveBeenCalledWith(
['dummyItem2Key', 'missingInCollectionItemKey'],
{
any: {
trackedChanges: [
{
index: 0,
method: TrackedChangeMethod.REMOVE,
key: 'dummyItem1Key',
},
],
},
softRebuild: true, // Not required but passed for simplicity
}
);
});
it('should remove Item from Group (specific config)', () => {
group.remove('dummyItem1Key', {
background: true,
force: true,
storage: false,
softRebuild: false,
});
expect(group.set).toHaveBeenCalledWith(
['dummyItem2Key', 'missingInCollectionItemKey'],
{
background: true,
force: true,
storage: false,
any: { trackedChanges: [] },
softRebuild: false, // Not required but passed for simplicity
}
);
});
it("shouldn't remove not existing Item from Group", () => {
group.remove('notExistingKey');
expect(group.set).not.toHaveBeenCalled();
});
it('should remove Items from Group', () => {
group.remove([
'dummyItem1Key',
'notExistingItemKey',
'missingInCollectionItemKey',
'dummyItem2Key',
]);
expect(group.set).toHaveBeenCalledWith([], {
any: {
trackedChanges: [
{
index: 0,
method: TrackedChangeMethod.REMOVE,
key: 'dummyItem1Key',
},
{
index: 0,
method: TrackedChangeMethod.REMOVE,
key: 'dummyItem2Key',
},
],
},
softRebuild: true, // Not required but passed for simplicity
});
});
it("should remove Item/s from Group that doesn't exist in the Collection in background", () => {
group.remove('missingInCollectionItemKey');
expect(group.set).toHaveBeenCalledWith(
['dummyItem1Key', 'dummyItem2Key'],
{
background: true,
any: { trackedChanges: [] },
softRebuild: true, // Not required but passed for simplicity
}
);
});
it(
'should remove Items from Group in background ' +
'if passing not existing Items to remove ' +
"and Items that doesn't exist in the Collection",
() => {
group.remove(['notExistingItemKey', 'missingInCollectionItemKey']);
expect(group.set).toHaveBeenCalledWith(
['dummyItem1Key', 'dummyItem2Key'],
{
background: true,
any: { trackedChanges: [] },
softRebuild: true, // Not required but passed for simplicity
}
);
}
);
});
describe('add function tests', () => {
beforeEach(() => {
group._value = ['placeholder', 'dummyItem1Key', 'placeholder'];
group.nextStateValue = group._value;
group._preciseItemKeys = ['dummyItem1Key'];
group.set = jest.fn();
});
it('should add Item at the end of the Group (default config)', () => {
group.add('dummyItem2Key');
expect(group.set).toHaveBeenCalledWith(
['placeholder', 'dummyItem1Key', 'placeholder', 'dummyItem2Key'],
{
any: {
trackedChanges: [
{
index: 1,
method: TrackedChangeMethod.ADD,
key: 'dummyItem2Key',
},
],
},
method: 'push', // Not required but passed for simplicity
softRebuild: true, // Not required but passed for simplicity
}
);
});
it('should add Item at the end of the Group (specific config)', () => {
group.add('dummyItem2Key', {
background: true,
force: true,
storage: false,
softRebuild: false,
});
expect(group.set).toHaveBeenCalledWith(
['placeholder', 'dummyItem1Key', 'placeholder', 'dummyItem2Key'],
{
background: true,
force: true,
storage: false,
any: { trackedChanges: [] },
method: 'push', // Not required but passed for simplicity
softRebuild: false, // Not required but passed for simplicity
}
);
});
it("should add Item at the beginning of the Group (config.method = 'unshift')", () => {
group.add('dummyItem2Key', { method: 'unshift' });
expect(group.set).toHaveBeenCalledWith(
['dummyItem2Key', 'placeholder', 'dummyItem1Key', 'placeholder'],
{
any: {
trackedChanges: [
{
index: 0,
method: TrackedChangeMethod.ADD,
key: 'dummyItem2Key',
},
],
},
method: 'unshift', // Not required but passed for simplicity
softRebuild: true, // Not required but passed for simplicity
}
);
});
it("shouldn't add already existing Item to the Group (default config)", () => {
group.add('dummyItem1Key');
expect(group.set).not.toHaveBeenCalled();
});
it('should add Items at the end of the Group', () => {
group.add(['dummyItem1Key', 'dummyItem2Key', 'notExistingItemKey']);
expect(group.set).toHaveBeenCalledWith(
[
'placeholder',
'dummyItem1Key',
'placeholder',
'dummyItem2Key',
'notExistingItemKey',
],
{
any: {
trackedChanges: [
{
index: 1,
method: TrackedChangeMethod.ADD,
key: 'dummyItem2Key',
},
{
index: 2,
method: TrackedChangeMethod.ADD,
key: 'notExistingItemKey',
},
],
},
method: 'push', // Not required but passed for simplicity
softRebuild: true, // Not required but passed for simplicity
}
);
});
it("should add Item that doesn't exist in Collection at the end of the Group in background", () => {
group.add('notExistingItemKey');
expect(group.set).toHaveBeenCalledWith(
['placeholder', 'dummyItem1Key', 'placeholder', 'notExistingItemKey'],
{
background: true,
any: {
trackedChanges: [
{
index: 1,
method: TrackedChangeMethod.ADD,
key: 'notExistingItemKey',
},
],
},
method: 'push', // Not required but passed for simplicity
softRebuild: true, // Not required but passed for simplicity
}
);
});
it(
'should add Items at the end of the Group in background ' +
'if passing already added Items ' +
"and Items that doesn't exist in the Collection",
() => {
group.add(['dummyItem1Key', 'notExistingItemKey']);
expect(group.set).toHaveBeenCalledWith(
[
'placeholder',
'dummyItem1Key',
'placeholder',
'notExistingItemKey',
],
{
background: true,
any: {
trackedChanges: [
{
index: 1,
method: TrackedChangeMethod.ADD,
key: 'notExistingItemKey',
},
],
},
method: 'push', // Not required but passed for simplicity
softRebuild: true, // Not required but passed for simplicity
}
);
}
);
});
describe('replace function tests', () => {
beforeEach(() => {
group._value = [1, 2, 3, 4, 5, 6];
group.set = jest.fn();
});
it('should replace oldItemKey with new ItemKey (default config)', () => {
group.replace(4, 20);
expect(group.set).toHaveBeenCalledWith([1, 2, 3, 20, 5, 6], {});
});
it('should replace oldItemKey with new ItemKey (specific config)', () => {
group.replace(2, 20, {
storage: true,
sideEffects: {
enabled: false,
},
});
expect(group.set).toHaveBeenCalledWith([1, 20, 3, 4, 5, 6], {
storage: true,
sideEffects: {
enabled: false,
},
});
});
});
describe('getItems function tests', () => {
beforeEach(() => {
group._value = ['dummyItem1Key', 'notExistingItemKey', 'dummyItem2Key'];
});
it('should return all existing Items of the Group', () => {
const items = group.getItems();
expect(items).toStrictEqual([dummyItem1, dummyItem2]);
});
});
describe('persist function tests', () => {
beforeEach(() => {
jest.spyOn(EnhancedState.prototype, 'persist');
});
it('should persist Group with formatted groupKey (default config)', () => {
group.persist();
expect(EnhancedState.prototype.persist).toHaveBeenCalledWith({
key: CollectionPersistent.getGroupStorageKey(
group._key,
dummyCollection._key
),
followCollectionPersistKeyPattern: true, // Not required but passed for simplicity
});
});
it('should persist Group with formatted key (specific config)', () => {
group.persist({
key: 'specificKey',
loadValue: false,
storageKeys: ['test1', 'test2'],
defaultStorageKey: 'test1',
});
expect(EnhancedState.prototype.persist).toHaveBeenCalledWith({
key: CollectionPersistent.getGroupStorageKey(
'specificKey',
dummyCollection._key
),
loadValue: false,
storageKeys: ['test1', 'test2'],
defaultStorageKey: 'test1',
followCollectionPersistKeyPattern: true, // Not required but passed for simplicity
});
});
it('should persist Group with groupKey (config.followCollectionPersistKeyPattern = false)', () => {
group.persist({ followCollectionPersistKeyPattern: false });
expect(EnhancedState.prototype.persist).toHaveBeenCalledWith({
key: group._key,
followCollectionPersistKeyPattern: false, // Not required but passed for simplicity
});
});
it('should persist Group with specified key (config.followCollectionPersistKeyPattern = false)', () => {
group.persist({
key: 'specificKey',
followCollectionPersistKeyPattern: false,
});
expect(EnhancedState.prototype.persist).toHaveBeenCalledWith({
key: 'specificKey',
followCollectionPersistKeyPattern: false, // Not required but passed for simplicity
});
});
});
describe('rebuild function tests', () => {
beforeEach(() => {
group._value = [
'dummyItem1Key',
'missingInCollectionItemKey',
'dummyItem2Key',
'dummyItem3Key',
];
group.observers['output'].ingestOutput = jest.fn();
group.observers['output'].ingest = jest.fn();
});
it(
'should hard rebuild the Group if no trackedChanges were specified ' +
'and set notExistingItemKeys to the not found Item Keys (default config)',
() => {
group.rebuild();
expect(group.notFoundItemKeys).toStrictEqual([
'missingInCollectionItemKey',
]);
expect(group.observers['output'].ingestOutput).toHaveBeenCalledWith(
[dummyItem1._value, dummyItem2._value, dummyItem3._value],
{}
);
expect(group.observers['output'].ingest).not.toHaveBeenCalled();
LogMock.hasLoggedCode(
'1C:02:00',
[dummyCollection._key, group._key],
['missingInCollectionItemKey']
);
}
);
it(
'should hard rebuild the Group if no trackedChanges were specified ' +
'and set notExistingItemKeys to the not found Item Keys (specific config)',
() => {
group.rebuild([], { background: true, force: false, key: 'frank' });
expect(group.notFoundItemKeys).toStrictEqual([
'missingInCollectionItemKey',
]);
expect(group.observers['output'].ingestOutput).toHaveBeenCalledWith(
[dummyItem1._value, dummyItem2._value, dummyItem3._value],
{ background: true, force: false, key: 'frank' }
);
LogMock.hasLoggedCode(
'1C:02:00',
[dummyCollection._key, group._key],
['missingInCollectionItemKey']
);
}
);
it(
'should soft rebuild the Group if trackedChanges were specified ' +
'and set notExistingItemKeys to the not found itemKeys (ADD)',
() => {
group.nextGroupOutput = [{ id: 'dummyItem1Key', name: 'jeff' }];
group._preciseItemKeys = ['dummyItem1Key'];
group.rebuild(
[
{
index: 1,
method: TrackedChangeMethod.ADD,
key: 'dummyItem3Key',
},
{
index: 2,
method: TrackedChangeMethod.ADD,
key: 'missingInCollectionItemKey',
},
],
{ key: 'test', background: true }
);
expect(group.notFoundItemKeys).toStrictEqual([
'missingInCollectionItemKey',
]);
expect(group.observers['output'].ingestOutput).not.toHaveBeenCalled();
expect(group.observers['output'].ingest).toHaveBeenCalledWith({
key: 'test',
background: true,
});
expect(group.nextGroupOutput).toStrictEqual([
{ id: 'dummyItem1Key', name: 'jeff' },
{ id: 'dummyItem3Key', name: 'hans' },
]);
expect(group._preciseItemKeys).toStrictEqual([
'dummyItem1Key',
'dummyItem3Key',
]);
LogMock.hasLoggedCode(
'1C:02:00',
[dummyCollection._key, group._key],
['missingInCollectionItemKey']
);
}
);
it('should soft rebuild the Group if trackedChanges were specified (REMOVE)', () => {
group.nextGroupOutput = [
{ id: 'dummyItem1Key', name: 'jeff' },
{ id: 'dummyItem2Key', name: 'frank' },
{ id: 'dummyItem3Key', name: 'hans' },
];
group._preciseItemKeys = [
'dummyItem1Key',
'dummyItem2Key',
'dummyItem3Key',
];
group.rebuild(
[
{
index: 1,
method: TrackedChangeMethod.REMOVE,
key: 'dummyItem2Key',
},
],
{ key: 'test', background: true }
);
expect(group.notFoundItemKeys).toStrictEqual([]);
expect(group.observers['output'].ingestOutput).not.toHaveBeenCalled();
expect(group.observers['output'].ingest).toHaveBeenCalledWith({
key: 'test',
background: true,
});
expect(group.nextGroupOutput).toStrictEqual([
{ id: 'dummyItem1Key', name: 'jeff' },
{ id: 'dummyItem3Key', name: 'hans' },
]);
expect(group._preciseItemKeys).toStrictEqual([
'dummyItem1Key',
'dummyItem3Key',
]);
LogMock.hasNotLogged('warn');
});
it(
'should soft rebuild the Group if trackedChanges were specified ' +
'and set notExistingItemKeys to the not found itemKeys (UPDATE)',
() => {
dummyItem1._value = { id: 'dummyItem1Key', name: 'frank' };
group.nextGroupOutput = [{ id: 'dummyItem1Key', name: 'jeff' }];
group._preciseItemKeys = ['dummyItem1Key'];
group.rebuild(
[
{
index: 0,
method: TrackedChangeMethod.UPDATE,
key: 'dummyItem1Key',
},
{
index: 1,
method: TrackedChangeMethod.UPDATE,
key: 'missingInCollectionItemKey',
},
],
{ key: 'test', background: true }
);
expect(group.notFoundItemKeys).toStrictEqual([
'missingInCollectionItemKey',
]);
expect(group.observers['output'].ingestOutput).not.toHaveBeenCalled();
expect(group.observers['output'].ingest).toHaveBeenCalledWith({
key: 'test',
background: true,
});
expect(group.nextGroupOutput).toStrictEqual([
{ id: 'dummyItem1Key', name: 'frank' },
]);
expect(group._preciseItemKeys).toStrictEqual(['dummyItem1Key']);
LogMock.hasLoggedCode(
'1C:02:00',
[dummyCollection._key, group._key],
['missingInCollectionItemKey']
);
}
);
it(
"shouldn't ingest the build Group output " +
'if the Collection was not properly instantiated',
() => {
dummyCollection.isInstantiated = false;
group.rebuild();
expect(group.notFoundItemKeys).toStrictEqual([]);
expect(group.observers['output'].ingestOutput).not.toHaveBeenCalled();
expect(group.observers['output'].ingest).not.toHaveBeenCalled();
LogMock.hasNotLogged('warn');
}
);
});
});
}); | the_stack |
import * as _ from 'lodash';
import { CancellationToken, DebugConfiguration, Disposable, FileSystemWatcher, RelativePattern, TestController, TestItem, TestRun, TestRunProfileKind, TestRunRequest, tests, TestTag, Uri, window, workspace, WorkspaceFolder } from 'vscode';
import { instrumentOperation, sendError, sendInfo } from 'vscode-extension-telemetry-wrapper';
import { INVOCATION_PREFIX } from '../constants';
import { IProgressReporter } from '../debugger.api';
import { isStandardServerReady, progressProvider } from '../extension';
import { testSourceProvider } from '../provider/testSourceProvider';
import { IExecutionConfig } from '../runConfigs';
import { BaseRunner } from '../runners/baseRunner/BaseRunner';
import { JUnitRunner } from '../runners/junitRunner/JunitRunner';
import { TestNGRunner } from '../runners/testngRunner/TestNGRunner';
import { IJavaTestItem, IRunTestContext, TestKind, TestLevel } from '../types';
import { loadRunConfig } from '../utils/configUtils';
import { resolveLaunchConfigurationForRunner } from '../utils/launchUtils';
import { dataCache, ITestItemData } from './testItemDataCache';
import { findDirectTestChildrenForClass, findTestPackagesAndTypes, findTestTypesAndMethods, loadJavaProjects, resolvePath, synchronizeItemsRecursively, updateItemForDocumentWithDebounce } from './utils';
export let testController: TestController | undefined;
export const watchers: Disposable[] = [];
export const runnableTag: TestTag = new TestTag('RunnableItem');
export function createTestController(): void {
if (!isStandardServerReady()) {
return;
}
testController?.dispose();
testController = tests.createTestController('javaTestController', 'Java Test');
testController.resolveHandler = async (item: TestItem) => {
await loadChildren(item);
};
testController.createRunProfile('Run Tests', TestRunProfileKind.Run, runHandler, true, runnableTag);
testController.createRunProfile('Debug Tests', TestRunProfileKind.Debug, runHandler, true, runnableTag);
startWatchingWorkspace();
}
export const loadChildren: (item: TestItem, token?: CancellationToken) => any = instrumentOperation('java.test.explorer.loadChildren', async (_operationId: string, item: TestItem, token?: CancellationToken) => {
if (!item) {
await loadJavaProjects();
return;
}
const data: ITestItemData | undefined = dataCache.get(item);
if (!data) {
return;
}
if (data.testLevel === TestLevel.Project) {
const packageAndTypes: IJavaTestItem[] = await findTestPackagesAndTypes(data.jdtHandler, token);
synchronizeItemsRecursively(item, packageAndTypes);
} else if (data.testLevel === TestLevel.Package) {
// unreachable code
} else if (data.testLevel === TestLevel.Class) {
if (!data.jdtHandler) {
sendError(new Error('The class node does not have jdt handler id.'));
return;
}
const testMethods: IJavaTestItem[] = await findDirectTestChildrenForClass(data.jdtHandler, token);
synchronizeItemsRecursively(item, testMethods);
}
});
async function startWatchingWorkspace(): Promise<void> {
if (!workspace.workspaceFolders) {
return;
}
for (const disposable of watchers) {
disposable.dispose();
}
for (const workspaceFolder of workspace.workspaceFolders) {
const patterns: RelativePattern[] = await testSourceProvider.getTestSourcePattern(workspaceFolder);
for (const pattern of patterns) {
const watcher: FileSystemWatcher = workspace.createFileSystemWatcher(pattern);
watchers.push(
watcher,
watcher.onDidCreate(async (uri: Uri) => {
const testTypes: IJavaTestItem[] = await findTestTypesAndMethods(uri.toString());
if (testTypes.length === 0) {
return;
}
await updateItemForDocumentWithDebounce(uri, testTypes);
}),
watcher.onDidChange(async (uri: Uri) => {
await updateItemForDocumentWithDebounce(uri);
}),
watcher.onDidDelete(async (uri: Uri) => {
const pathsData: IJavaTestItem[] = await resolvePath(uri.toString());
if (_.isEmpty(pathsData) || pathsData.length < 2) {
return;
}
const projectData: IJavaTestItem = pathsData[0];
if (projectData.testLevel !== TestLevel.Project) {
return;
}
const belongingProject: TestItem | undefined = testController?.items.get(projectData.id);
if (!belongingProject) {
return;
}
const packageData: IJavaTestItem = pathsData[1];
if (packageData.testLevel !== TestLevel.Package) {
return;
}
const belongingPackage: TestItem | undefined = belongingProject.children.get(packageData.id);
if (!belongingPackage) {
return;
}
belongingPackage.children.forEach((item: TestItem) => {
if (item.uri?.toString() === uri.toString()) {
belongingPackage.children.delete(item.id);
}
});
if (belongingPackage.children.size === 0) {
belongingProject.children.delete(belongingPackage.id);
}
}),
);
}
}
}
async function runHandler(request: TestRunRequest, token: CancellationToken): Promise<void> {
await runTests(request, { token, isDebug: !!request.profile?.label.includes('Debug') });
}
export const runTests: (request: TestRunRequest, option: IRunOption) => any = instrumentOperation('java.test.runTests', async (operationId: string, request: TestRunRequest, option: IRunOption) => {
sendInfo(operationId, {
isDebug: `${option.isDebug}`,
});
const testItems: TestItem[] = await new Promise<TestItem[]>(async (resolve: (result: TestItem[]) => void): Promise<void> => {
option.progressReporter = option.progressReporter ?? progressProvider?.createProgressReporter(option.isDebug ? 'Debug Tests' : 'Run Tests');
option.token?.onCancellationRequested(() => {
option.progressReporter?.done();
return resolve([]);
});
const progressToken: CancellationToken | undefined = option.progressReporter?.getCancellationToken();
option.onProgressCancelHandler = progressToken?.onCancellationRequested(() => {
option.progressReporter?.done();
return resolve([]);
});
option.progressReporter?.report('Searching tests...');
const result: TestItem[] = await getIncludedItems(request, progressToken);
await expandTests(result, TestLevel.Method, progressToken);
return resolve(result);
});
if (testItems.length === 0) {
option.progressReporter?.done();
return;
}
const run: TestRun = testController!.createTestRun(request);
try {
await new Promise<void>(async (resolve: () => void): Promise<void> => {
const token: CancellationToken = option.token ?? run.token;
token.onCancellationRequested(() => {
option.progressReporter?.done();
run.end();
return resolve();
});
enqueueTestMethods(testItems, run);
const queue: TestItem[][] = mergeTestMethods(testItems);
for (const testsInQueue of queue) {
if (testsInQueue.length === 0) {
continue;
}
const testProjectMapping: Map<string, TestItem[]> = mapTestItemsByProject(testsInQueue);
for (const [projectName, itemsPerProject] of testProjectMapping.entries()) {
const testKindMapping: Map<TestKind, TestItem[]> = mapTestItemsByKind(itemsPerProject);
for (const [kind, items] of testKindMapping.entries()) {
if (option.progressReporter?.isCancelled()) {
option.progressReporter = progressProvider?.createProgressReporter(option.isDebug ? 'Debug Tests' : 'Run Tests');
}
let delegatedToDebugger: boolean = false;
option.onProgressCancelHandler?.dispose();
option.progressReporter?.getCancellationToken().onCancellationRequested(() => {
if (delegatedToDebugger) {
// If the progress reporter has been delegated to debugger, a cancellation event
// might be emitted due to debug session finished, thus we will ignore such event.
return;
}
option.progressReporter?.done();
return resolve();
});
option.progressReporter?.report('Resolving launch configuration...');
// TODO: improve the config experience
const workspaceFolder: WorkspaceFolder | undefined = workspace.getWorkspaceFolder(items[0].uri!);
if (!workspaceFolder) {
window.showErrorMessage(`Failed to get workspace folder from test item: ${items[0].label}.`);
continue;
}
const config: IExecutionConfig | undefined = await loadRunConfig(workspaceFolder);
if (!config) {
continue;
}
const testContext: IRunTestContext = {
isDebug: option.isDebug,
kind,
projectName,
testItems: items,
testRun: run,
workspaceFolder,
};
sendInfo(operationId, {
testFramework: TestKind[testContext.kind],
});
const runner: BaseRunner | undefined = getRunnerByContext(testContext);
if (!runner) {
window.showErrorMessage(`Failed to get suitable runner for the test kind: ${testContext.kind}.`);
continue;
}
try {
await runner.setup();
const resolvedConfiguration: DebugConfiguration = option.launchConfiguration ?? await resolveLaunchConfigurationForRunner(runner, testContext, config);
resolvedConfiguration.__progressId = option.progressReporter?.getId();
delegatedToDebugger = true;
await runner.run(resolvedConfiguration, token, option.progressReporter);
} finally {
await runner.tearDown();
}
}
}
}
return resolve();
});
} finally {
run.end();
}
});
/**
* Set all the test item to queued state
*/
function enqueueTestMethods(testItems: TestItem[], run: TestRun): void {
const queuedTests: TestItem[] = [...testItems];
while (queuedTests.length) {
const queuedTest: TestItem = queuedTests.shift()!;
run.enqueued(queuedTest);
queuedTest.children.forEach((child: TestItem) => {
queuedTests.push(child);
});
}
}
/**
* Filter out the tests which are in the excluding list
* @param request the test run request
* @returns
*/
async function getIncludedItems(request: TestRunRequest, token?: CancellationToken): Promise<TestItem[]> {
let testItems: TestItem[] = [];
if (request.include) {
testItems.push(...request.include);
} else {
testController?.items.forEach((item: TestItem) => {
testItems.push(item);
});
}
if (testItems.length === 0) {
return [];
}
removeTestInvocations(testItems);
testItems = await expandTests(testItems, TestLevel.Class, token);
// @ts-expect-error
const excludingItems: TestItem[] = await expandTests(request.exclude || [], TestLevel.Class, token);
testItems = _.differenceBy(testItems, excludingItems, 'id');
return testItems;
}
/**
* Expand the test items to the target level
* @param testItems items to expand
* @param targetLevel target level to expand
*/
async function expandTests(testItems: TestItem[], targetLevel: TestLevel, token?: CancellationToken): Promise<TestItem[]> {
const results: Set<TestItem> = new Set();
const queue: TestItem[] = [...testItems];
while (queue.length) {
const item: TestItem = queue.shift()!;
const testLevel: TestLevel | undefined = dataCache.get(item)?.testLevel;
if (testLevel === undefined) {
continue;
}
if (testLevel >= targetLevel) {
results.add(item);
} else {
await loadChildren(item, token);
item.children.forEach((child: TestItem) => {
queue.push(child);
});
}
}
return Array.from(results);
}
/**
* Remove the test invocations since they might be changed
*/
function removeTestInvocations(testItems: TestItem[]): void {
const queue: TestItem[] = [...testItems];
while (queue.length) {
const item: TestItem = queue.shift()!;
if (item.id.startsWith(INVOCATION_PREFIX)) {
item.parent?.children.delete(item.id);
continue;
}
item.children.forEach((child: TestItem) => {
queue.push(child);
});
}
}
/**
* Eliminate the test methods if they are contained in the test class.
* Because the current test runner cannot run class and methods for the same time,
* in the returned array, all the classes are in one group and each method is a group.
*/
function mergeTestMethods(testItems: TestItem[]): TestItem[][] {
// tslint:disable-next-line: typedef
const classMapping: Map<string, TestItem> = testItems.reduce((map, i) => {
const testLevel: TestLevel | undefined = dataCache.get(i)?.testLevel;
if (testLevel === undefined) {
return map;
}
if (testLevel === TestLevel.Class) {
map.set(i.id, i);
}
return map;
}, new Map());
// tslint:disable-next-line: typedef
const testMapping: Map<TestItem, Set<TestItem>> = testItems.reduce((map, i) => {
const testLevel: TestLevel | undefined = dataCache.get(i)?.testLevel;
if (testLevel === undefined) {
return map;
}
if (testLevel !== TestLevel.Method) {
return map;
}
// skip the method if it's contained in test classes
if (classMapping.has(i.parent?.id || '')) {
return map;
}
const value: Set<TestItem> | undefined = map.get(i.parent);
if (value) {
value.add(i as TestItem);
} else {
map.set(i.parent, new Set([i]));
}
return map;
}, new Map());
const testMethods: TestItem[][] = [];
for (const [key, value] of testMapping) {
if (key.children.size === value.size) {
classMapping.set(key.id, key);
} else {
for (const method of value.values()) {
testMethods.push([method]);
}
}
}
return [[...classMapping.values()], ...testMethods];
}
function mapTestItemsByProject(items: TestItem[]): Map<string, TestItem[]> {
const map: Map<string, TestItem[]> = new Map<string, TestItem[]>();
for (const item of items) {
const projectName: string | undefined = dataCache.get(item)?.projectName;
if (!projectName) {
sendError(new Error('Item does not have project name.'));
continue;
}
const itemsPerProject: TestItem[] | undefined = map.get(projectName);
if (itemsPerProject) {
itemsPerProject.push(item);
} else {
map.set(projectName, [item]);
}
}
return map;
}
function mapTestItemsByKind(items: TestItem[]): Map<TestKind, TestItem[]> {
const map: Map<TestKind, TestItem[]> = new Map<TestKind, TestItem[]>();
for (const item of items) {
const testKind: TestKind | undefined = dataCache.get(item)?.testKind;
if (testKind === undefined) {
continue;
}
const itemsPerKind: TestItem[] | undefined = map.get(testKind);
if (itemsPerKind) {
itemsPerKind.push(item);
} else {
map.set(testKind, [item]);
}
}
return map;
}
function getRunnerByContext(testContext: IRunTestContext): BaseRunner | undefined {
switch (testContext.kind) {
case TestKind.JUnit:
case TestKind.JUnit5:
return new JUnitRunner(testContext);
case TestKind.TestNG:
return new TestNGRunner(testContext);
default:
return undefined;
}
}
interface IRunOption {
isDebug: boolean;
progressReporter?: IProgressReporter;
onProgressCancelHandler?: Disposable;
launchConfiguration?: DebugConfiguration;
token?: CancellationToken;
} | the_stack |
import * as os from 'os';
import * as nls from 'vscode-nls';
import * as vscode from 'vscode';
import { configPrefix } from '../LanguageServer/extension';
nls.config({ messageFormat: nls.MessageFormat.bundle, bundleFormat: nls.BundleFormat.standalone })();
const localize: nls.LocalizeFunc = nls.loadMessageBundle();
export function isDebugLaunchStr(str: string): boolean {
return str.startsWith("(gdb) ") || str.startsWith("(lldb) ") || str.startsWith("(Windows) ");
}
export interface ConfigMenu extends vscode.QuickPickItem {
configuration: CppDebugConfiguration;
}
export enum DebuggerType {
cppvsdbg = "cppvsdbg",
cppdbg = "cppdbg",
all = "all"
}
export enum DebuggerEvent {
debugPanel = "debugPanel", // F5 or "Run and Debug" Panel
playButton = "playButton", // "Run and Debug" play button
addConfigGear = "AddConfigGear"
}
export enum TaskStatus {
recentlyUsed = "Recently Used Task", // A configured task that has been used recently.
configured = "Configured Task", // The tasks that are configured in tasks.json file.
detected = "Detected Task" // The tasks that are available based on detected compilers.
}
export enum ConfigSource {
singleFile = "singleFile", // a debug config defined for a single mode file
workspaceFolder = "workspaceFolder", // a debug config defined in launch.json
workspace = "workspace", // a debug config defined in workspace level
global = "global", // a debug config defined in user level
unknown = "unknown"
}
export enum ConfigMode {
launchConfig = "launchConfig",
noLaunchConfig = "noLaunchConfig",
unknown = "unknown"
}
export enum DebugType {
debug = "debug",
run = "run"
}
export interface CppDebugConfiguration extends vscode.DebugConfiguration {
detail?: string;
taskStatus?: TaskStatus;
isDefault?: boolean; // The debug configuration is considered as default, if the prelaunch task is set as default.
configSource?: ConfigSource;
debuggerEvent?: DebuggerEvent;
debugType?: DebugType;
existing?: boolean;
}
export interface IConfigurationSnippet {
label: string;
description: string;
bodyText: string;
// Internal
isInitialConfiguration?: boolean;
debuggerType: DebuggerType;
}
export function indentJsonString(json: string, numTabs: number = 1): string {
return json.split('\n').map(line => '\t'.repeat(numTabs) + line).join('\n').trim();
}
function formatString(format: string, args: string[]): string {
args.forEach((arg: string, index: number) => {
format = format.replace("{" + index + "}", arg);
});
return format;
}
function createLaunchString(name: string, type: string, executable: string): string {
return `"name": "${name}",
"type": "${type}",
"request": "launch",
"program": "${localize("enter.program.name", "enter program name, for example {0}", "$\{workspaceFolder\}" + "/" + executable).replace(/\"/g, "\\\"")}",
"args": [],
"stopAtEntry": false,
"cwd": "$\{fileDirname\}",
"environment": [],
${ type === "cppdbg" ? `"externalConsole": false` : `"console": "externalTerminal"` }
`;
}
function createAttachString(name: string, type: string, executable: string): string {
return formatString(`
"name": "${name}",
"type": "${type}",
"request": "attach",{0}
"processId": "$\{command:pickProcess\}"
`, [type === "cppdbg" ? `${os.EOL}"program": "${localize("enter.program.name", "enter program name, for example {0}", "$\{workspaceFolder\}" + "/" + executable).replace(/\"/g, "\\\"")}",` : ""]);
}
function createRemoteAttachString(name: string, type: string, executable: string): string {
return `
"name": "${name}",
"type": "${type}",
"request": "attach",
"program": "${localize("enter.program.name", "enter program name, for example {0}", "$\{workspaceFolder\}" + "/" + executable).replace(/\"/g, "\\\"")}",
"processId": "$\{command:pickRemoteProcess\}"
`;
}
function createPipeTransportString(pipeProgram: string, debuggerProgram: string, pipeArgs: string[] = []): string {
return `
"pipeTransport": {
\t"debuggerPath": "/usr/bin/${debuggerProgram}",
\t"pipeProgram": "${pipeProgram}",
\t"pipeArgs": ${JSON.stringify(pipeArgs)},
\t"pipeCwd": ""
}`;
}
export interface IConfiguration {
GetLaunchConfiguration(): IConfigurationSnippet;
GetAttachConfiguration(): IConfigurationSnippet;
}
abstract class Configuration implements IConfiguration {
public executable: string;
public pipeProgram: string;
public MIMode: string;
public additionalProperties: string;
public miDebugger = "cppdbg";
public windowsDebugger = "cppvsdbg";
constructor(MIMode: string, executable: string, pipeProgram: string, additionalProperties: string = "") {
this.MIMode = MIMode;
this.executable = executable;
this.pipeProgram = pipeProgram;
this.additionalProperties = additionalProperties;
}
abstract GetLaunchConfiguration(): IConfigurationSnippet;
abstract GetAttachConfiguration(): IConfigurationSnippet;
}
export class MIConfigurations extends Configuration {
public GetLaunchConfiguration(): IConfigurationSnippet {
const name: string = `(${this.MIMode}) ${localize("launch.string", "Launch").replace(/\"/g, "\\\"")}`;
const body: string = formatString(`{
\t${indentJsonString(createLaunchString(name, this.miDebugger, this.executable))},
\t"MIMode": "${this.MIMode}"{0}{1}
}`, [this.miDebugger === "cppdbg" && os.platform() === "win32" ? `,${os.EOL}\t"miDebuggerPath": "/path/to/gdb"` : "",
this.additionalProperties ? `,${os.EOL}\t${indentJsonString(this.additionalProperties)}` : ""]);
return {
"label": configPrefix + name,
"description": localize("launch.with", "Launch with {0}.", this.MIMode).replace(/\"/g, "\\\""),
"bodyText": body.trim(),
"isInitialConfiguration": true,
"debuggerType": DebuggerType.cppdbg
};
}
public GetAttachConfiguration(): IConfigurationSnippet {
const name: string = `(${this.MIMode}) ${localize("attach.string", "Attach").replace(/\"/g, "\\\"")}`;
const body: string = formatString(`{
\t${indentJsonString(createAttachString(name, this.miDebugger, this.executable))},
\t"MIMode": "${this.MIMode}"{0}{1}
}`, [this.miDebugger === "cppdbg" && os.platform() === "win32" ? `,${os.EOL}\t"miDebuggerPath": "/path/to/gdb"` : "",
this.additionalProperties ? `,${os.EOL}\t${indentJsonString(this.additionalProperties)}` : ""]);
return {
"label": configPrefix + name,
"description": localize("attach.with", "Attach with {0}.", this.MIMode).replace(/\"/g, "\\\""),
"bodyText": body.trim(),
"debuggerType": DebuggerType.cppdbg
};
}
}
export class PipeTransportConfigurations extends Configuration {
public GetLaunchConfiguration(): IConfigurationSnippet {
const name: string = `(${this.MIMode}) ${localize("pipe.launch", "Pipe Launch").replace(/\"/g, "\\\"")}`;
const body: string = formatString(`
{
\t${indentJsonString(createLaunchString(name, this.miDebugger, this.executable))},
\t${indentJsonString(createPipeTransportString(this.pipeProgram, this.MIMode))},
\t"MIMode": "${this.MIMode}"{0}
}`, [this.additionalProperties ? `,${os.EOL}\t${indentJsonString(this.additionalProperties)}` : ""]);
return {
"label": configPrefix + name,
"description": localize("pipe.launch.with", "Pipe Launch with {0}.", this.MIMode).replace(/\"/g, "\\\""),
"bodyText": body.trim(),
"debuggerType": DebuggerType.cppdbg
};
}
public GetAttachConfiguration(): IConfigurationSnippet {
const name: string = `(${this.MIMode}) ${localize("pipe.attach", "Pipe Attach").replace(/\"/g, "\\\"")}`;
const body: string = formatString(`
{
\t${indentJsonString(createRemoteAttachString(name, this.miDebugger, this.executable))},
\t${indentJsonString(createPipeTransportString(this.pipeProgram, this.MIMode))},
\t"MIMode": "${this.MIMode}"{0}
}`, [this.additionalProperties ? `,${os.EOL}\t${indentJsonString(this.additionalProperties)}` : ""]);
return {
"label": configPrefix + name,
"description": localize("pipe.attach.with", "Pipe Attach with {0}.", this.MIMode).replace(/\"/g, "\\\""),
"bodyText": body.trim(),
"debuggerType": DebuggerType.cppdbg
};
}
}
export class WindowsConfigurations extends Configuration {
public GetLaunchConfiguration(): IConfigurationSnippet {
const name: string = `(Windows) ${localize("launch.string", "Launch").replace(/\"/g, "\\\"")}`;
const body: string = `
{
\t${indentJsonString(createLaunchString(name, this.windowsDebugger, this.executable))}
}`;
return {
"label": configPrefix + name,
"description": localize("launch.with.vs.debugger", "Launch with the Visual Studio C/C++ debugger.").replace(/\"/g, "\\\""),
"bodyText": body.trim(),
"isInitialConfiguration": true,
"debuggerType": DebuggerType.cppvsdbg
};
}
public GetAttachConfiguration(): IConfigurationSnippet {
const name: string = `(Windows) ${localize("attach.string", "Attach").replace(/\"/g, "\\\"")}`;
const body: string = `
{
\t${indentJsonString(createAttachString(name, this.windowsDebugger, this.executable))}
}`;
return {
"label": configPrefix + name,
"description": localize("attach.with.vs.debugger", "Attach to a process with the Visual Studio C/C++ debugger.").replace(/\"/g, "\\\""),
"bodyText": body.trim(),
"debuggerType": DebuggerType.cppvsdbg
};
}
}
export class WSLConfigurations extends Configuration {
// Detects if the current VSCode is 32-bit and uses the correct bash.exe
public bashPipeProgram = process.arch === 'ia32' ? "${env:windir}\\\\sysnative\\\\bash.exe" : "${env:windir}\\\\system32\\\\bash.exe";
public GetLaunchConfiguration(): IConfigurationSnippet {
const name: string = `(${this.MIMode}) ${localize("bash.on.windows.launch", "Bash on Windows Launch").replace(/\"/g, "\\\"")}`;
const body: string = formatString(`
{
\t${indentJsonString(createLaunchString(name, this.miDebugger, this.executable))},
\t${indentJsonString(createPipeTransportString(this.bashPipeProgram, this.MIMode, ["-c"]))}{0}
}`, [this.additionalProperties ? `,${os.EOL}\t${indentJsonString(this.additionalProperties)}` : ""]);
return {
"label": configPrefix + name,
"description": localize("launch.bash.windows", "Launch in Bash on Windows using {0}.", this.MIMode).replace(/\"/g, "\\\""),
"bodyText": body.trim(),
"debuggerType": DebuggerType.cppdbg
};
}
public GetAttachConfiguration(): IConfigurationSnippet {
const name: string = `(${this.MIMode}) ${localize("bash.on.windows.attach", "Bash on Windows Attach").replace(/\"/g, "\\\"")}`;
const body: string = formatString(`
{
\t${indentJsonString(createRemoteAttachString(name, this.miDebugger, this.executable))},
\t${indentJsonString(createPipeTransportString(this.bashPipeProgram, this.MIMode, ["-c"]))}{0}
}`, [this.additionalProperties ? `,${os.EOL}\t${indentJsonString(this.additionalProperties)}` : ""]);
return {
"label": configPrefix + name,
"description": localize("remote.attach.bash.windows", "Attach to a remote process running in Bash on Windows using {0}.", this.MIMode).replace(/\"/g, "\\\""),
"bodyText": body.trim(),
"debuggerType": DebuggerType.cppdbg
};
}
} | the_stack |
import type { Atom, WritableAtom } from './atom'
import {
cancelSuspensePromise,
createSuspensePromise,
isEqualSuspensePromise,
isSuspensePromise,
isSuspensePromiseAlreadyCancelled,
} from './suspensePromise'
import type { SuspensePromise } from './suspensePromise'
type ResolveType<T> = T extends Promise<infer V> ? V : T
type AnyAtom = Atom<unknown>
type AnyWritableAtom = WritableAtom<unknown, unknown, void | Promise<void>>
type OnUnmount = () => void
type WriteGetter = Parameters<WritableAtom<unknown, unknown>['write']>[0]
type Setter = Parameters<WritableAtom<unknown, unknown>['write']>[1]
const hasInitialValue = <T extends Atom<unknown>>(
atom: T
): atom is T & (T extends Atom<infer Value> ? { init: Value } : never) =>
'init' in atom
type ReadError = unknown
type Revision = number
type InvalidatedRevision = number
type ReadDependencies = Map<AnyAtom, Revision>
// immutable atom state
export type AtomState<Value = unknown> = {
r: Revision
i?: InvalidatedRevision
d: ReadDependencies
} & ({ e: ReadError } | { p: SuspensePromise } | { v: ResolveType<Value> })
export type VersionObject = { p?: VersionObject } // "p"arent version
type Listeners = Set<(version?: VersionObject) => void>
type Dependents = Set<AnyAtom>
type Mounted = {
l: Listeners
d: Dependents
u?: OnUnmount
}
// for debugging purpose only
type StateListener = (updatedAtom: AnyAtom, isNewAtom: boolean) => void
type MountedAtoms = Set<AnyAtom>
// store methods
export const READ_ATOM = 'r'
export const WRITE_ATOM = 'w'
export const COMMIT_ATOM = 'c'
export const SUBSCRIBE_ATOM = 's'
export const RESTORE_ATOMS = 'h'
// store dev methods (these are tentative and subject to change)
export const DEV_SUBSCRIBE_STATE = 'n'
export const DEV_GET_MOUNTED_ATOMS = 'l'
export const DEV_GET_ATOM_STATE = 'a'
export const DEV_GET_MOUNTED = 'm'
export const createStore = (
initialValues?: Iterable<readonly [AnyAtom, unknown]>
) => {
const committedAtomStateMap = new WeakMap<AnyAtom, AtomState>()
const mountedMap = new WeakMap<AnyAtom, Mounted>()
const pendingMap = new Map<
AnyAtom,
AtomState /* prevAtomState */ | undefined
>()
let stateListeners: Set<StateListener>
let mountedAtoms: MountedAtoms
if (typeof process === 'object' && process.env.NODE_ENV !== 'production') {
stateListeners = new Set()
mountedAtoms = new Set()
}
if (initialValues) {
for (const [atom, value] of initialValues) {
const atomState: AtomState = { v: value, r: 0, d: new Map() }
if (
typeof process === 'object' &&
process.env.NODE_ENV !== 'production'
) {
Object.freeze(atomState)
if (!hasInitialValue(atom)) {
console.warn(
'Found initial value for derived atom which can cause unexpected behavior',
atom
)
}
}
committedAtomStateMap.set(atom, atomState)
}
}
type SuspensePromiseCache = Map<VersionObject | undefined, SuspensePromise>
const suspensePromiseCacheMap = new WeakMap<AnyAtom, SuspensePromiseCache>()
const addSuspensePromiseToCache = (
version: VersionObject | undefined,
atom: AnyAtom,
suspensePromise: SuspensePromise
): void => {
let cache = suspensePromiseCacheMap.get(atom)
if (!cache) {
cache = new Map()
suspensePromiseCacheMap.set(atom, cache)
}
suspensePromise.then(() => {
if ((cache as SuspensePromiseCache).get(version) === suspensePromise) {
;(cache as SuspensePromiseCache).delete(version)
if (!(cache as SuspensePromiseCache).size) {
suspensePromiseCacheMap.delete(atom)
}
}
})
cache.set(version, suspensePromise)
}
const cancelAllSuspensePromiseInCache = (
atom: AnyAtom
): Set<VersionObject | undefined> => {
const versionSet = new Set<VersionObject | undefined>()
const cache = suspensePromiseCacheMap.get(atom)
if (cache) {
suspensePromiseCacheMap.delete(atom)
cache.forEach((suspensePromise, version) => {
cancelSuspensePromise(suspensePromise)
versionSet.add(version)
})
}
return versionSet
}
const versionedAtomStateMapMap = new WeakMap<
VersionObject,
Map<AnyAtom, AtomState>
>()
const getVersionedAtomStateMap = (version: VersionObject) => {
let versionedAtomStateMap = versionedAtomStateMapMap.get(version)
if (!versionedAtomStateMap) {
versionedAtomStateMap = new Map()
versionedAtomStateMapMap.set(version, versionedAtomStateMap)
}
return versionedAtomStateMap
}
const getAtomState = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>
): AtomState<Value> | undefined => {
if (version) {
const versionedAtomStateMap = getVersionedAtomStateMap(version)
let atomState = versionedAtomStateMap.get(atom) as
| AtomState<Value>
| undefined
if (!atomState) {
atomState = getAtomState(version.p, atom)
if (atomState) {
if ('p' in atomState) {
atomState.p.then(() => versionedAtomStateMap.delete(atom))
}
versionedAtomStateMap.set(atom, atomState)
}
}
return atomState
}
return committedAtomStateMap.get(atom) as AtomState<Value> | undefined
}
const setAtomState = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>,
atomState: AtomState<Value>
): void => {
if (typeof process === 'object' && process.env.NODE_ENV !== 'production') {
Object.freeze(atomState)
}
if (version) {
const versionedAtomStateMap = getVersionedAtomStateMap(version)
versionedAtomStateMap.set(atom, atomState)
} else {
const prevAtomState = committedAtomStateMap.get(atom)
committedAtomStateMap.set(atom, atomState)
if (!pendingMap.has(atom)) {
pendingMap.set(atom, prevAtomState)
}
}
}
const getReadDependencies = (
version: VersionObject | undefined,
dependencies: Set<AnyAtom>
): ReadDependencies =>
new Map(
Array.from(dependencies).map((a) => [a, getAtomState(version, a)?.r || 0])
)
const setAtomValue = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>,
value: ResolveType<Value>,
dependencies?: Set<AnyAtom>,
suspensePromise?: SuspensePromise
): AtomState<Value> => {
const atomState = getAtomState(version, atom)
if (atomState) {
if (
suspensePromise &&
(!('p' in atomState) ||
!isEqualSuspensePromise(atomState.p, suspensePromise))
) {
// newer async read is running, not updating
return atomState
}
if ('p' in atomState) {
cancelSuspensePromise(atomState.p)
}
}
const nextAtomState: AtomState<Value> = {
v: value,
r: atomState?.r || 0,
d: dependencies
? getReadDependencies(version, dependencies)
: atomState?.d || new Map(),
}
if (
!atomState ||
!('v' in atomState) || // new value, or
!Object.is(atomState.v, value) // different value
) {
++nextAtomState.r // increment revision
if (nextAtomState.d.has(atom)) {
nextAtomState.d.set(atom, nextAtomState.r)
}
}
setAtomState(version, atom, nextAtomState)
return nextAtomState
}
const setAtomReadError = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>,
error: ReadError,
dependencies?: Set<AnyAtom>,
suspensePromise?: SuspensePromise
): AtomState<Value> => {
const atomState = getAtomState(version, atom)
if (atomState) {
if (
suspensePromise &&
(!('p' in atomState) ||
!isEqualSuspensePromise(atomState.p, suspensePromise))
) {
// newer async read is running, not updating
return atomState
}
if ('p' in atomState) {
cancelSuspensePromise(atomState.p)
}
}
const nextAtomState: AtomState<Value> = {
e: error, // set read error
r: atomState?.r || 0,
d: dependencies
? getReadDependencies(version, dependencies)
: atomState?.d || new Map(),
}
setAtomState(version, atom, nextAtomState)
return nextAtomState
}
const setAtomSuspensePromise = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>,
suspensePromise: SuspensePromise,
dependencies?: Set<AnyAtom>
): AtomState<Value> => {
const atomState = getAtomState(version, atom)
if (atomState && 'p' in atomState) {
if (isEqualSuspensePromise(atomState.p, suspensePromise)) {
// the same promise, not updating
return atomState
}
cancelSuspensePromise(atomState.p)
}
addSuspensePromiseToCache(version, atom, suspensePromise)
const nextAtomState: AtomState<Value> = {
p: suspensePromise,
r: atomState?.r || 0,
d: dependencies
? getReadDependencies(version, dependencies)
: atomState?.d || new Map(),
}
setAtomState(version, atom, nextAtomState)
return nextAtomState
}
const setAtomPromiseOrValue = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>,
promiseOrValue: Value,
dependencies?: Set<AnyAtom>
): AtomState<Value> => {
if (promiseOrValue instanceof Promise) {
const suspensePromise = createSuspensePromise(
promiseOrValue
.then((value: ResolveType<Value>) => {
setAtomValue(version, atom, value, dependencies, suspensePromise)
flushPending(version)
})
.catch((e) => {
if (e instanceof Promise) {
if (
isSuspensePromise(e) &&
isSuspensePromiseAlreadyCancelled(e)
) {
// schedule another read later
// FIXME not 100% confident with this code
e.then(() => readAtomState(version, atom, true))
}
return e
}
setAtomReadError(version, atom, e, dependencies, suspensePromise)
flushPending(version)
})
)
return setAtomSuspensePromise(
version,
atom,
suspensePromise,
dependencies
)
}
return setAtomValue(
version,
atom,
promiseOrValue as ResolveType<Value>,
dependencies
)
}
const setAtomInvalidated = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>
): void => {
const atomState = getAtomState(version, atom)
if (atomState) {
const nextAtomState: AtomState<Value> = {
...atomState, // copy everything
i: atomState.r, // set invalidated revision
}
setAtomState(version, atom, nextAtomState)
} else if (
typeof process === 'object' &&
process.env.NODE_ENV !== 'production'
) {
console.warn('[Bug] could not invalidate non existing atom', atom)
}
}
const readAtomState = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>,
force?: boolean
): AtomState<Value> => {
if (!force) {
const atomState = getAtomState(version, atom)
if (atomState) {
atomState.d.forEach((_, a) => {
if (a !== atom) {
if (!mountedMap.has(a)) {
// not mounted
readAtomState(version, a)
} else {
const aState = getAtomState(version, a)
if (
aState &&
aState.r === aState.i // revision is invalidated
) {
readAtomState(version, a)
}
}
}
})
if (
Array.from(atomState.d.entries()).every(([a, r]) => {
const aState = getAtomState(version, a)
return (
aState &&
!('e' in aState) && // no read error
!('p' in aState) && // no suspense promise
aState.r === r // revision is equal to the last one
)
})
) {
return atomState
}
}
}
const dependencies = new Set<AnyAtom>()
try {
const promiseOrValue = atom.read(<V>(a: Atom<V>) => {
dependencies.add(a)
const aState =
(a as AnyAtom) === atom
? getAtomState(version, a)
: readAtomState(version, a)
if (aState) {
if ('e' in aState) {
throw aState.e // read error
}
if ('p' in aState) {
throw aState.p // suspense promise
}
return aState.v as ResolveType<V> // value
}
if (hasInitialValue(a)) {
return a.init
}
// NOTE invalid derived atoms can reach here
throw new Error('no atom init')
})
return setAtomPromiseOrValue(version, atom, promiseOrValue, dependencies)
} catch (errorOrPromise) {
if (errorOrPromise instanceof Promise) {
const suspensePromise = createSuspensePromise(errorOrPromise)
return setAtomSuspensePromise(
version,
atom,
suspensePromise,
dependencies
)
}
return setAtomReadError(version, atom, errorOrPromise, dependencies)
}
}
const readAtom = <Value>(
readingAtom: Atom<Value>,
version?: VersionObject
): AtomState<Value> => {
const atomState = readAtomState(version, readingAtom)
return atomState
}
const addAtom = (addingAtom: AnyAtom): Mounted => {
let mounted = mountedMap.get(addingAtom)
if (!mounted) {
mounted = mountAtom(addingAtom)
}
return mounted
}
// FIXME doesn't work with mutally dependent atoms
const canUnmountAtom = (atom: AnyAtom, mounted: Mounted) =>
!mounted.l.size &&
(!mounted.d.size || (mounted.d.size === 1 && mounted.d.has(atom)))
const delAtom = (deletingAtom: AnyAtom): void => {
const mounted = mountedMap.get(deletingAtom)
if (mounted && canUnmountAtom(deletingAtom, mounted)) {
unmountAtom(deletingAtom)
}
}
const invalidateDependents = <Value>(
version: VersionObject | undefined,
atom: Atom<Value>
): void => {
const mounted = mountedMap.get(atom)
mounted?.d.forEach((dependent) => {
if (dependent !== atom) {
setAtomInvalidated(version, dependent)
invalidateDependents(version, dependent)
}
})
}
const writeAtomState = <Value, Update, Result extends void | Promise<void>>(
version: VersionObject | undefined,
atom: WritableAtom<Value, Update, Result>,
update: Update
): void | Promise<void> => {
let isSync = true
const writeGetter: WriteGetter = <V>(
a: Atom<V>,
options?: {
unstable_promise: boolean
}
) => {
if (typeof options === 'boolean') {
console.warn('[DEPRECATED] Please use { unstable_promise: true }')
options = { unstable_promise: options }
}
const aState = readAtomState(version, a)
if ('e' in aState) {
throw aState.e // read error
}
if ('p' in aState) {
if (options?.unstable_promise) {
return aState.p.then(() =>
writeGetter(a as unknown as Atom<Promise<unknown>>, options as any)
) as Promise<ResolveType<V>> // FIXME proper typing
}
if (
typeof process === 'object' &&
process.env.NODE_ENV !== 'production'
) {
console.info(
'Reading pending atom state in write operation. We throw a promise for now.',
a
)
}
throw aState.p // suspense promise
}
if ('v' in aState) {
return aState.v as ResolveType<V> // value
}
if (
typeof process === 'object' &&
process.env.NODE_ENV !== 'production'
) {
console.warn(
'[Bug] no value found while reading atom in write operation. This is probably a bug.',
a
)
}
throw new Error('no value found')
}
const setter: Setter = <V, U, R extends void | Promise<void>>(
a: WritableAtom<V, U, R>,
v?: V
) => {
let promiseOrVoid: void | Promise<void>
if ((a as AnyWritableAtom) === atom) {
if (!hasInitialValue(a)) {
// NOTE technically possible but restricted as it may cause bugs
throw new Error('atom not writable')
}
const versionSet = cancelAllSuspensePromiseInCache(a)
versionSet.forEach((cancelledVersion) => {
if (cancelledVersion !== version) {
setAtomPromiseOrValue(cancelledVersion, a, v)
}
})
setAtomPromiseOrValue(version, a, v)
invalidateDependents(version, a)
} else {
promiseOrVoid = writeAtomState(version, a as AnyWritableAtom, v)
}
if (!isSync) {
flushPending(version)
}
return promiseOrVoid
}
const promiseOrVoid = atom.write(writeGetter, setter, update)
isSync = false
version = undefined
return promiseOrVoid
}
const writeAtom = <Value, Update, Result extends void | Promise<void>>(
writingAtom: WritableAtom<Value, Update, Result>,
update: Update,
version?: VersionObject
): void | Promise<void> => {
const promiseOrVoid = writeAtomState(version, writingAtom, update)
flushPending(version)
return promiseOrVoid
}
const isActuallyWritableAtom = (atom: AnyAtom): atom is AnyWritableAtom =>
!!(atom as AnyWritableAtom).write
const mountAtom = <Value>(
atom: Atom<Value>,
initialDependent?: AnyAtom
): Mounted => {
// mount self
const mounted: Mounted = {
d: new Set(initialDependent && [initialDependent]),
l: new Set(),
}
mountedMap.set(atom, mounted)
if (typeof process === 'object' && process.env.NODE_ENV !== 'production') {
mountedAtoms.add(atom)
}
// mount read dependencies before onMount
const atomState = readAtomState(undefined, atom)
atomState.d.forEach((_, a) => {
if (a !== atom) {
const aMounted = mountedMap.get(a)
if (aMounted) {
aMounted.d.add(atom) // add dependent
} else {
mountAtom(a, atom)
}
}
})
// onMount
if (isActuallyWritableAtom(atom) && atom.onMount) {
const setAtom = (update: unknown) => writeAtom(atom, update)
const onUnmount = atom.onMount(setAtom)
if (onUnmount) {
mounted.u = onUnmount
}
}
return mounted
}
const unmountAtom = <Value>(atom: Atom<Value>): void => {
// unmount self
const onUnmount = mountedMap.get(atom)?.u
if (onUnmount) {
onUnmount()
}
mountedMap.delete(atom)
if (typeof process === 'object' && process.env.NODE_ENV !== 'production') {
mountedAtoms.delete(atom)
}
// unmount read dependencies afterward
const atomState = getAtomState(undefined, atom)
if (atomState) {
atomState.d.forEach((_, a) => {
if (a !== atom) {
const mounted = mountedMap.get(a)
if (mounted) {
mounted.d.delete(atom)
if (canUnmountAtom(a, mounted)) {
unmountAtom(a)
}
}
}
})
} else if (
typeof process === 'object' &&
process.env.NODE_ENV !== 'production'
) {
console.warn('[Bug] could not find atom state to unmount', atom)
}
}
const mountDependencies = <Value>(
atom: Atom<Value>,
atomState: AtomState<Value>,
prevReadDependencies: ReadDependencies
): void => {
const dependencies = new Set(atomState.d.keys())
prevReadDependencies.forEach((_, a) => {
if (dependencies.has(a)) {
// not changed
dependencies.delete(a)
return
}
const mounted = mountedMap.get(a)
if (mounted) {
mounted.d.delete(atom) // delete from dependents
if (canUnmountAtom(a, mounted)) {
unmountAtom(a)
}
}
})
dependencies.forEach((a) => {
const mounted = mountedMap.get(a)
if (mounted) {
mounted.d.add(atom) // add to dependents
} else {
mountAtom(a, atom)
}
})
}
const flushPending = (version: VersionObject | undefined): void => {
if (version) {
const versionedAtomStateMap = getVersionedAtomStateMap(version)
versionedAtomStateMap.forEach((atomState, atom) => {
if (atomState !== committedAtomStateMap.get(atom)) {
const mounted = mountedMap.get(atom)
mounted?.l.forEach((listener) => listener(version))
}
})
return
}
const pending = Array.from(pendingMap)
pendingMap.clear()
pending.forEach(([atom, prevAtomState]) => {
const atomState = getAtomState(undefined, atom)
if (atomState && atomState.d !== prevAtomState?.d) {
mountDependencies(atom, atomState, prevAtomState?.d || new Map())
}
const mounted = mountedMap.get(atom)
mounted?.l.forEach((listener) => listener())
if (
typeof process === 'object' &&
process.env.NODE_ENV !== 'production'
) {
stateListeners.forEach((l) => l(atom, !prevAtomState))
}
})
}
const commitVersionedAtomStateMap = (version: VersionObject) => {
const versionedAtomStateMap = getVersionedAtomStateMap(version)
versionedAtomStateMap.forEach((atomState, atom) => {
const prevAtomState = committedAtomStateMap.get(atom)
if (atomState.r > (prevAtomState?.r || 0)) {
committedAtomStateMap.set(atom, atomState)
if (atomState.d !== prevAtomState?.d) {
mountDependencies(atom, atomState, prevAtomState?.d || new Map())
}
}
})
}
const commitAtom = (_atom: AnyAtom | null, version?: VersionObject) => {
if (version) {
commitVersionedAtomStateMap(version)
}
flushPending(undefined)
}
const subscribeAtom = (
atom: AnyAtom,
callback: (version?: VersionObject) => void
) => {
const mounted = addAtom(atom)
const listeners = mounted.l
listeners.add(callback)
return () => {
listeners.delete(callback)
delAtom(atom)
}
}
const restoreAtoms = (
values: Iterable<readonly [AnyAtom, unknown]>,
version?: VersionObject
): void => {
for (const [atom, value] of values) {
if (hasInitialValue(atom)) {
setAtomPromiseOrValue(version, atom, value)
invalidateDependents(version, atom)
}
}
flushPending(version)
}
if (typeof process === 'object' && process.env.NODE_ENV !== 'production') {
return {
[READ_ATOM]: readAtom,
[WRITE_ATOM]: writeAtom,
[COMMIT_ATOM]: commitAtom,
[SUBSCRIBE_ATOM]: subscribeAtom,
[RESTORE_ATOMS]: restoreAtoms,
[DEV_SUBSCRIBE_STATE]: (l: StateListener) => {
stateListeners.add(l)
return () => {
stateListeners.delete(l)
}
},
[DEV_GET_MOUNTED_ATOMS]: () => mountedAtoms.values(),
[DEV_GET_ATOM_STATE]: (a: AnyAtom) => committedAtomStateMap.get(a),
[DEV_GET_MOUNTED]: (a: AnyAtom) => mountedMap.get(a),
}
}
return {
[READ_ATOM]: readAtom,
[WRITE_ATOM]: writeAtom,
[COMMIT_ATOM]: commitAtom,
[SUBSCRIBE_ATOM]: subscribeAtom,
[RESTORE_ATOMS]: restoreAtoms,
}
}
export type Store = ReturnType<typeof createStore> | the_stack |
import React from "react";
import { connect, ConnectedProps } from "react-redux";
import clsx from "clsx";
import { BrowserRouter as Router, Switch, Route } from "react-router-dom";
import { makeStyles, Theme, ThemeProvider } from "@material-ui/core/styles";
import AppBar from "@material-ui/core/AppBar";
import Drawer from "@material-ui/core/Drawer";
import Toolbar from "@material-ui/core/Toolbar";
import List from "@material-ui/core/List";
import ListItem from "@material-ui/core/ListItem";
import ListItemIcon from "@material-ui/core/ListItemIcon";
import ListItemText from "@material-ui/core/ListItemText";
import Snackbar from "@material-ui/core/Snackbar";
import SnackbarContent from "@material-ui/core/SnackbarContent";
import Typography from "@material-ui/core/Typography";
import IconButton from "@material-ui/core/IconButton";
import Slide from "@material-ui/core/Slide";
import { TransitionProps } from "@material-ui/core/transitions";
import MenuIcon from "@material-ui/icons/Menu";
import BarChartIcon from "@material-ui/icons/BarChart";
import LayersIcon from "@material-ui/icons/Layers";
import SettingsIcon from "@material-ui/icons/Settings";
import ScheduleIcon from "@material-ui/icons/Schedule";
import FeedbackIcon from "@material-ui/icons/Feedback";
import DoubleArrowIcon from "@material-ui/icons/DoubleArrow";
import CloseIcon from "@material-ui/icons/Close";
import { AppState } from "./store";
import { paths } from "./paths";
import { isDarkTheme, useTheme } from "./theme";
import { closeSnackbar } from "./actions/snackbarActions";
import { toggleDrawer } from "./actions/settingsActions";
import ListItemLink from "./components/ListItemLink";
import SchedulersView from "./views/SchedulersView";
import DashboardView from "./views/DashboardView";
import TasksView from "./views/TasksView";
import TaskDetailsView from "./views/TaskDetailsView";
import SettingsView from "./views/SettingsView";
import ServersView from "./views/ServersView";
import RedisInfoView from "./views/RedisInfoView";
import PageNotFoundView from "./views/PageNotFoundView";
const drawerWidth = 220;
// FIXME: For some reason, the following code does not work:
// makeStyles(theme => ({ /* use theme here */}));
// Using closure to work around this problem.
const useStyles = (theme: Theme) =>
makeStyles({
root: {
display: "flex",
},
toolbar: {
paddingRight: 24, // keep right padding when drawer closed
},
toolbarIcon: {
display: "flex",
alignItems: "center",
justifyContent: "flex-end",
padding: "0 8px",
...theme.mixins.toolbar,
},
appBar: {
backgroundColor: theme.palette.background.paper,
zIndex: theme.zIndex.drawer + 1,
},
menuButton: {
marginRight: theme.spacing(1),
color: isDarkTheme(theme)
? theme.palette.grey[100]
: theme.palette.grey[700],
},
menuButtonHidden: {
display: "none",
},
title: {
flexGrow: 1,
},
drawerPaper: {
position: "relative",
whiteSpace: "nowrap",
width: drawerWidth,
transition: theme.transitions.create("width", {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
border: "none",
},
drawerPaperClose: {
overflowX: "hidden",
transition: theme.transitions.create("width", {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
width: theme.spacing(7),
[theme.breakpoints.up("sm")]: {
width: theme.spacing(9),
},
},
snackbar: {
background: theme.palette.grey["A400"],
color: "#ffffff",
},
snackbarCloseIcon: {
color: theme.palette.grey[400],
},
appBarSpacer: theme.mixins.toolbar,
mainContainer: {
display: "flex",
width: "100vw",
},
content: {
flex: 1,
height: "100vh",
overflow: "hidden",
background: theme.palette.background.paper,
},
contentWrapper: {
height: "100%",
display: "flex",
paddingTop: "64px", // app-bar height
overflow: "scroll",
},
sidebarContainer: {
display: "flex",
justifyContent: "space-between",
height: "100%",
flexDirection: "column",
},
listItem: {
borderTopRightRadius: "24px",
borderBottomRightRadius: "24px",
},
});
function mapStateToProps(state: AppState) {
return {
snackbar: state.snackbar,
themePreference: state.settings.themePreference,
isDrawerOpen: state.settings.isDrawerOpen,
};
}
const mapDispatchToProps = {
closeSnackbar,
toggleDrawer,
};
const connector = connect(mapStateToProps, mapDispatchToProps);
function SlideUpTransition(props: TransitionProps) {
return <Slide {...props} direction="up" />;
}
function App(props: ConnectedProps<typeof connector>) {
const theme = useTheme(props.themePreference);
const classes = useStyles(theme)();
return (
<ThemeProvider theme={theme}>
<Router>
<div className={classes.root}>
<AppBar
position="absolute"
className={classes.appBar}
variant="outlined"
>
<Toolbar className={classes.toolbar}>
<IconButton
edge="start"
color="inherit"
aria-label="open drawer"
onClick={props.toggleDrawer}
className={classes.menuButton}
>
<MenuIcon />
</IconButton>
<Typography
component="h1"
variant="h6"
noWrap
className={classes.title}
color="textPrimary"
>
Asynq Monitoring
</Typography>
</Toolbar>
</AppBar>
<div className={classes.mainContainer}>
<Drawer
variant="permanent"
classes={{
paper: clsx(
classes.drawerPaper,
!props.isDrawerOpen && classes.drawerPaperClose
),
}}
open={props.isDrawerOpen}
>
<Snackbar
anchorOrigin={{ vertical: "bottom", horizontal: "left" }}
open={props.snackbar.isOpen}
autoHideDuration={6000}
onClose={props.closeSnackbar}
TransitionComponent={SlideUpTransition}
>
<SnackbarContent
message={props.snackbar.message}
className={classes.snackbar}
action={
<IconButton
size="small"
aria-label="close"
color="inherit"
onClick={props.closeSnackbar}
>
<CloseIcon
className={classes.snackbarCloseIcon}
fontSize="small"
/>
</IconButton>
}
/>
</Snackbar>
<div className={classes.appBarSpacer} />
<div className={classes.sidebarContainer}>
<List>
<div>
<ListItemLink
to={paths.HOME}
primary="Queues"
icon={<BarChartIcon />}
/>
<ListItemLink
to={paths.SERVERS}
primary="Servers"
icon={<DoubleArrowIcon />}
/>
<ListItemLink
to={paths.SCHEDULERS}
primary="Schedulers"
icon={<ScheduleIcon />}
/>
<ListItemLink
to={paths.REDIS}
primary="Redis"
icon={<LayersIcon />}
/>
</div>
</List>
<List>
<ListItemLink
to={paths.SETTINGS}
primary="Settings"
icon={<SettingsIcon />}
/>
<ListItem
button
component="a"
className={classes.listItem}
href="https://github.com/hibiken/asynqmon/issues"
target="_blank"
>
<ListItemIcon>
<FeedbackIcon />
</ListItemIcon>
<ListItemText primary="Send Feedback" />
</ListItem>
</List>
</div>
</Drawer>
<main className={classes.content}>
<div className={classes.contentWrapper}>
<Switch>
<Route exact path={paths.TASK_DETAILS}>
<TaskDetailsView />
</Route>
<Route exact path={paths.QUEUE_DETAILS}>
<TasksView />
</Route>
<Route exact path={paths.SCHEDULERS}>
<SchedulersView />
</Route>
<Route exact path={paths.SERVERS}>
<ServersView />
</Route>
<Route exact path={paths.REDIS}>
<RedisInfoView />
</Route>
<Route exact path={paths.SETTINGS}>
<SettingsView />
</Route>
<Route exact path={paths.HOME}>
<DashboardView />
</Route>
<Route path="*">
<PageNotFoundView />
</Route>
</Switch>
</div>
</main>
</div>
</div>
</Router>
</ThemeProvider>
);
}
export default connector(App); | the_stack |
import { logger } from "firebase-functions";
import { storage } from "../models/firebase";
import {
Collection,
Collections,
CollectionType,
Conflict,
Conflicts,
ConflictResolutionType,
ImageComposite,
ImageComposites,
ImageLayer,
ImageLayers,
OrderedImageLayer,
RarityValue,
Trait,
Traits,
TraitSets,
TraitValue,
TraitValuePair,
TraitValues,
TraitSet,
} from "../models/models";
const path = require("path");
const os = require("os");
const fs = require("fs");
const tempDir = os.tmpdir();
const TRAITVALUES_RARITY_MAX_PRECISION: number = 4;
export class ArtworkGenerator {
projectId: string;
collectionId: string;
compositeGroupId: string;
traitSetId: string | null;
startIndex: number;
endIndex: number;
batchSize: number;
isFirstBatchInTraitSet: boolean;
constructor(
projectId: string,
collectionId: string,
compositeGroupId: string,
traitSetId: string,
startIndex: number,
endIndex: number,
batchSize: number,
isFirstBatchInTraitSet: boolean
) {
this.projectId = projectId;
this.collectionId = collectionId;
this.compositeGroupId = compositeGroupId;
this.traitSetId = traitSetId == "-1" ? null : traitSetId;
this.startIndex = startIndex;
this.endIndex = endIndex;
this.batchSize = batchSize;
this.isFirstBatchInTraitSet = isFirstBatchInTraitSet;
}
async generate(): Promise<(ImageComposite | null)[]> {
const collection = await Collections.withId(
this.collectionId,
this.projectId
);
const traits = await Traits.all(
this.projectId,
this.collectionId,
this.traitSetId
);
const imageLayers = await ImageLayers.all(
this.projectId,
this.collectionId,
this.traitSetId
);
const traitSet = this.traitSetId
? await TraitSets.withId(
this.traitSetId,
this.projectId,
this.collectionId
)
: null;
let conflicts: Conflict[] = [];
const traitValueIdToImageLayers: { [traitValueId: string]: ImageLayer } =
{};
if (collection.type == CollectionType.Generative) {
conflicts = await Conflicts.all(
this.projectId,
this.collectionId,
this.traitSetId
);
}
logger.info(
"Generate Artwork for project: " +
this.projectId +
" collection: " +
collection.name +
"(" +
this.collectionId +
", type: " +
collection.type +
")"
);
imageLayers.forEach((imageLayer) => {
if (imageLayer.traitValueId) {
traitValueIdToImageLayers[imageLayer.traitValueId] = imageLayer;
}
});
const valuesWithImagesInTraitSet = Object.keys(traitValueIdToImageLayers);
let traitValues: { [traitId: string]: TraitValue[] } = {};
// prefetch all trait values
for (let i = 0; i < traits.length; i++) {
const trait = traits[i];
traitValues[trait.id] = await TraitValues.all(
this.projectId,
this.collectionId,
this.compositeGroupId,
trait,
valuesWithImagesInTraitSet
);
}
const projectDownloadPath = this.projectDownloadPath();
// setup only necessary at the beginning of a run,
// so only do this for batchNum = 0
if (this.startIndex == 0) {
// create download directory for all images
await fs.promises.mkdir(
projectDownloadPath,
{ recursive: true },
(err: Error) => {
if (err) {
logger.error("error creating project directory");
logger.error(err);
}
}
);
const layerDownloadPath = this.layerDownloadPath();
// create download directory for all artwork
await fs.promises.mkdir(
layerDownloadPath,
{ recursive: true },
(err: Error) => {
if (err) {
logger.error("error creating layers directory");
logger.error(err);
}
}
);
}
if (this.isFirstBatchInTraitSet) {
// predownload all uncomposited artwork
await Promise.all(
imageLayers.map((imageLayer) => this.downloadImageFile(imageLayer))
);
}
// generate artwork for each item in the collection supply
let composites: (ImageComposite | null)[] = [];
logger.info("Generating: " + this.startIndex + " - " + this.endIndex);
logger.info("Trait Set (" + this.traitSetId + "): " + traitSet?.name);
logger.info("Matching Traits: " + traits.length);
logger.info("Matching Trait Values: " + Object.values(traitValues).length);
logger.info("Matching Image Layers: " + imageLayers.length);
if (collection.type != CollectionType.Prerendered) {
if (traits.length == 0) {
logger.info("no matching traits");
return [];
}
if (Object.values(traitValues).length == 0) {
logger.info("no matching trait values");
return [];
}
}
if (imageLayers.length == 0) {
logger.info("no matching image layers");
return [];
}
let continuousFailures = 0;
let i = this.startIndex;
while (i < this.endIndex) {
let compositeData: ImageComposite | null = null;
switch (collection.type) {
case CollectionType.Generative:
compositeData = await this.layeredArtworkForItem(
i,
collection,
traitSet,
traits,
traitValues,
traitValueIdToImageLayers,
imageLayers,
conflicts,
projectDownloadPath
);
break;
case CollectionType.Prerendered:
compositeData = await this.prerenderedArtworkForItem(
i,
collection,
imageLayers,
projectDownloadPath
);
break;
}
if (compositeData) {
continuousFailures = 0;
const composite = await ImageComposites.create(
compositeData,
this.projectId,
this.collectionId,
this.compositeGroupId
);
composites.push(composite);
// remove any possible values for always unique traits
// so that they can only be used once
traitValues = this.removeUsedAlwaysUniqueTraitValues(
traits,
traitValues,
composite
);
} else {
continuousFailures++;
console.error("no composite data");
if (continuousFailures > 10) {
break;
}
continue;
}
i++;
}
// only do cleanup if we just finished the last batch of the run
if (this.endIndex == collection.supply) {
// delete all downloaded images and composites
await fs.promises.rmdir(
projectDownloadPath,
{ recursive: true, force: true },
(err: Error) => {
if (err) {
logger.error("directory cleanup failed");
logger.error(err.message);
}
}
);
}
return composites;
}
async layeredArtworkForItem(
itemIndex: number,
collection: Collection,
traitSet: TraitSet | null,
traits: Trait[],
traitValues: { [traitId: string]: TraitValue[] },
traitValueIdToImageLayers: { [traitValueId: string]: ImageLayer },
imageLayers: ImageLayer[],
conflicts: Conflict[],
projectDownloadPath: string
): Promise<ImageComposite | null> {
let traitValuePairs: TraitValuePair[] = [];
let hasUnusedTraitValuePair = false;
const numRetries = 20;
let retriesRemaining = numRetries;
let failedToFindUnusedTraitPair = false;
let hash: string = "";
while (!hasUnusedTraitValuePair) {
// generate a pair mapping trait to a random trait value
traitValuePairs = await this.randomTraitValues(traits, traitValues);
hash = ImageComposites.traitsHash(traitValuePairs);
hasUnusedTraitValuePair = await ImageComposites.isUniqueTraitsHash(
hash,
this.projectId,
this.collectionId,
this.compositeGroupId
);
retriesRemaining--;
if (retriesRemaining == 0) {
failedToFindUnusedTraitPair = true;
console.error(
"Unable to find unused trait pair after " + numRetries + " retries."
);
console.log(
"generated trait value pairs: " +
traitValuePairs
.map((pair) => {
return pair.trait.name + ": " + pair.traitValue?.name;
})
.join(", ")
);
break;
}
}
if (failedToFindUnusedTraitPair) {
return null;
}
// deal with any pairs that conflict / we dont want to happen
traitValuePairs = await this.resolveConflicts(
traitValuePairs,
conflicts,
traitValues
);
// for all trait value pairs, fetch the artwork representing random value
const traitValueImagePairs = traitValuePairs.map((traitValuePair) => {
const traitValueId = traitValuePair.traitValue?.id;
const imageLayer = traitValueId
? // needs to be null not undefined for firestore
traitValueIdToImageLayers[traitValueId] ?? null
: null;
traitValuePair.imageLayer = imageLayer;
return traitValuePair;
});
// composite all of the images representing trait values together into one image
const sortedTraitValueImagePairs =
this.sortTraitValuePairs(traitValueImagePairs);
// for any image layers with companions, inject them at the right layer level
const sortedImageLayers = this.sortedImageLayersInjectingCompanions(
sortedTraitValueImagePairs,
imageLayers
);
const inputFilePaths = sortedImageLayers.map((imageLayer) => {
return imageLayer ? this.downloadPathForImageLayer(imageLayer) : null;
});
const outputFilePath: string = path.join(
projectDownloadPath,
itemIndex + ".png"
);
let succeeded = await this.compositeLayeredImages(
inputFilePaths,
outputFilePath
);
if (succeeded) {
// upload the composite back to the bucket
const bucket = storage.bucket();
const uploadFilePath =
this.projectId +
"/" +
collection.id +
"/generated/" +
this.compositeGroupId +
"/" +
itemIndex +
".png";
const uploadFile = bucket.file(uploadFilePath);
const downloadURL = await bucket
.upload(outputFilePath, {
destination: uploadFilePath,
metadata: {
contentType: "image/png",
},
})
.then(() => {
return uploadFile.publicUrl();
})
.catch((err: Error) => {
logger.error("error uploading file to bucket");
logger.error(err);
});
const imageComposite = {
externalURL: downloadURL,
traits: sortedTraitValueImagePairs,
traitsHash: hash,
} as ImageComposite;
return imageComposite;
} else {
return null;
}
}
async prerenderedArtworkForItem(
itemIndex: number,
collection: Collection,
imageLayers: ImageLayer[],
projectDownloadPath: string
): Promise<ImageComposite | null> {
const imageLayer = imageLayers[itemIndex];
const inputFilePath = imageLayer
? this.downloadPathForImageLayer(imageLayer)
: null;
const outputFilePath: string = path.join(
projectDownloadPath,
itemIndex + ".png"
);
let succeeded = await this.compositeLayeredImages(
[inputFilePath],
outputFilePath
);
if (succeeded) {
// upload the composite back to the bucket
const bucket = storage.bucket();
const uploadFilePath =
this.projectId +
"/" +
collection.id +
"/generated/" +
this.compositeGroupId +
"/" +
itemIndex +
".png";
const uploadFile = bucket.file(uploadFilePath);
const downloadURL = await bucket
.upload(outputFilePath, {
destination: uploadFilePath,
// metadata: {
// contentType: "image/png",
// },
})
.then(() => {
return uploadFile.publicUrl();
})
.catch((err: Error) => {
logger.error("error uploading file to bucket");
logger.error(err);
});
const imageComposite = {
externalURL: downloadURL,
traits: [] as TraitValuePair[],
traitsHash: itemIndex.toString(),
} as ImageComposite;
return imageComposite;
} else {
return null;
}
}
async randomTraitValues(
traits: Trait[],
traitValues: { [traitId: string]: TraitValue[] }
): Promise<TraitValuePair[]> {
// for each trait fetch a randomly chosen value
// based upon the distribution of rarity
const traitValueTasks = traits.map(async (trait) => {
return await this.randomValue<TraitValue>(
traitValues[trait.id],
trait.isAlwaysUnique
).then(
(value) => ({ trait: trait, traitValue: value } as TraitValuePair)
);
});
return await Promise.all(traitValueTasks);
}
async resolveConflicts(
traitValuePairs: TraitValuePair[],
conflicts: Conflict[],
traitValuesDict: { [traitId: string]: TraitValue[] }
): Promise<TraitValuePair[]> {
for (let i = 0; i < conflicts.length; i++) {
const conflict = conflicts[i];
const trait1Index = traitValuePairs.findIndex(
(pair) => pair.trait.id == conflict.trait1Id
);
if (trait1Index == -1) {
continue;
}
const trait2Index = traitValuePairs.findIndex(
(pair) => pair.trait.id == conflict.trait2Id
);
if (trait2Index == -1) {
continue;
}
let trait1Value = traitValuePairs[trait1Index].traitValue;
if (
(conflict.trait1ValueId !== null &&
conflict.trait1ValueId !== trait1Value?.id) ||
trait1Value?.id == null
) {
continue;
}
let trait2Value = traitValuePairs[trait2Index].traitValue;
if (
(conflict.trait2ValueId !== null &&
conflict.trait2ValueId !== trait2Value?.id) ||
trait2Value?.id == null
) {
continue;
}
const trait1Name = traitValuePairs[trait1Index].trait.name;
const trait2Name = traitValuePairs[trait2Index].trait.name;
const trait1ValueName = conflict.trait1ValueId ? trait1Value.name : "Any";
const trait2ValueName = conflict.trait2ValueId ? trait2Value.name : "Any";
let resolution: string;
// all matches means we have a conflict - time to handle resolution:
switch (conflict.resolutionType) {
case ConflictResolutionType.Trait1None:
traitValuePairs[trait1Index].traitValue = null;
resolution = "dropped " + trait1Name;
break;
case ConflictResolutionType.Trait2None:
traitValuePairs[trait2Index].traitValue = null;
resolution = "dropped " + trait2Name;
break;
case ConflictResolutionType.Trait1Random:
const pair1 = traitValuePairs[trait1Index];
const newRandomValue1 = await this.randomValue<TraitValue>(
traitValuesDict[pair1.trait.id],
pair1.trait.isAlwaysUnique,
pair1.traitValue?.id
);
traitValuePairs[trait1Index].traitValue = newRandomValue1;
resolution = "updated " + trait1Name + " to ";
break;
case ConflictResolutionType.Trait2Random:
const pair2 = traitValuePairs[trait2Index];
const newRandomValue2 = await this.randomValue<TraitValue>(
traitValuesDict[pair2.trait.id],
pair2.trait.isAlwaysUnique,
pair2.traitValue?.id
);
traitValuePairs[trait2Index].traitValue = newRandomValue2;
resolution = "updated " + trait2Name + " to ";
break;
}
console.log(
"resolved conflict for " +
trait1Name +
":" +
trait1ValueName +
" (" +
trait1Value.name +
") and " +
trait2Name +
":" +
trait2ValueName +
" (" +
trait2Value.name +
") " +
resolution
);
}
return traitValuePairs;
}
removeUsedAlwaysUniqueTraitValues(
traits: Trait[],
traitValues: { [traitId: string]: TraitValue[] },
composite: ImageComposite
): { [traitId: string]: TraitValue[] } {
for (let i = 0; i < traits.length; i++) {
const trait = traits[i];
if (trait.isAlwaysUnique) {
let values = traitValues[trait.id];
const compositeTraitPair = composite.traits.find((traitPair) => {
return traitPair.trait.id == trait.id;
});
const compositeValue = compositeTraitPair?.traitValue;
const matchingValueIndex = values.findIndex((value) => {
return value.id == compositeValue?.id;
});
if (matchingValueIndex > -1) {
values.splice(matchingValueIndex, 1);
traitValues[trait.id] = values;
}
}
}
return traitValues;
}
/**
* picturing a trait with 5 values (A-E) on a bar from 0 to 1
* where each value's rarity covers some percentage of the bar
* min 0 [--A--|-----B-----|-C-|--D--|-----E-----] max 1
*
* we walk through the segments until our random number
* between 0 and 1 lands within one of the segments
* @param values array of possible trait values each with specified % rarity
* @returns a secure pseudorandom value from the array factoring in rarity
*/
async randomValue<T extends RarityValue>(
values: T[],
isTraitAlwaysUnique: boolean,
excludeValueId: string | null = null
): Promise<T | null> {
if (isTraitAlwaysUnique) {
const randomIndex = Math.floor(Math.random() * values.length);
const randomValue = values[randomIndex];
return randomValue;
}
const precision = TRAITVALUES_RARITY_MAX_PRECISION;
let value: T | null;
const maxAttempts = 10;
let attempts = 0;
do {
if (attempts == maxAttempts) {
return null;
}
value = await this.randomNumber(precision).then((randomNumber) => {
let totalRarityRangeMax = 0;
let segment = 0;
while (segment < values.length) {
const value = values[segment];
totalRarityRangeMax += value.rarity;
if (randomNumber <= totalRarityRangeMax) {
return value;
}
segment++;
}
return null;
});
attempts++;
} while (excludeValueId != null && value?.id === excludeValueId);
return value;
}
/**
* generate a secure random number from 0.0 -> 1.0
* with specified digits of precision using the
* random-number-csprng library
*
* @param digitsPrecision number of decimal places of precision
* @returns a secure pseudorandom number
*/
async randomNumber(digitsPrecision: number): Promise<number> {
const rand = require("random-number-csprng");
const max = Math.pow(10, digitsPrecision);
const result: Promise<number> = rand(0, max).then((random: number) => {
return random / max;
});
return result;
}
downloadPathForImageLayer(imageLayer: ImageLayer): string {
return path.join(this.layerDownloadPath(), imageLayer.id + ".png");
}
async downloadImageFile(imageLayer: ImageLayer): Promise<string> {
const bucket = storage.bucket();
const file = bucket.file(
this.projectId + "/" + this.collectionId + "/" + imageLayer.bucketFilename
);
const tempFilePath = this.downloadPathForImageLayer(imageLayer);
// TODO: why does validation always fail if I don't disable it?
return file
.download({ destination: tempFilePath, validation: false })
.then(() => {
return tempFilePath;
})
.catch(() => {
logger.error("failed to download to " + tempFilePath);
logger.error(file.name);
return tempFilePath;
});
}
sortTraitValuePairs(pairs: TraitValuePair[]): TraitValuePair[] {
return pairs.sort((a, b) => {
const zIndexA = a.trait.zIndex;
const zIndexB = b.trait.zIndex;
if (zIndexA == zIndexB) return 0;
return zIndexA < zIndexB ? -1 : 1;
});
}
sortedImageLayersInjectingCompanions(
sortedTraitValueImagePairs: TraitValuePair[],
imageLayers: ImageLayer[]
): ImageLayer[] {
const imageLayerPairs: OrderedImageLayer[] = [];
sortedTraitValueImagePairs.forEach((pair) => {
if (pair.imageLayer) {
imageLayerPairs.push({
imageLayer: pair.imageLayer,
zIndex: pair.trait.zIndex,
} as OrderedImageLayer);
}
const companionId = pair.imageLayer?.companionLayerId;
const companionZIndex = pair.imageLayer?.companionLayerZIndex;
if (companionId != null && companionZIndex != null) {
const companionImageLayer = imageLayers.find((imageLayer) => {
return imageLayer.id == companionId;
});
if (companionImageLayer) {
imageLayerPairs.push({
imageLayer: companionImageLayer,
zIndex: companionZIndex,
} as OrderedImageLayer);
}
}
});
const orderedImageLayerPairs = imageLayerPairs.sort((a, b) => {
const zIndexA = a.zIndex;
const zIndexB = b.zIndex;
if (zIndexA == zIndexB) return 0;
return zIndexA < zIndexB ? -1 : 1;
});
const orderedImageLayers = orderedImageLayerPairs.map((a) => {
return a.imageLayer;
});
return orderedImageLayers;
}
compositeLayeredImages(
optInputFilePaths: (string | null)[],
outputFilePath: string
): Promise<boolean> {
const inputFilePaths = optInputFilePaths.filter((f) => f);
if (inputFilePaths.length == 0) {
return Promise.resolve(false);
}
const sharp = require("sharp");
const firstPath = inputFilePaths.shift();
if (inputFilePaths.length == 0) {
return sharp(firstPath).png().toFile(outputFilePath);
}
const inputs = inputFilePaths.map((inputFilePath) => {
return {
input: inputFilePath,
};
});
return sharp(firstPath)
.composite(inputs)
.png()
.toFile(outputFilePath)
.then((_: any) => {
return true;
})
.catch((err: Error) => {
logger.error("error compositing");
logger.error("first path: " + firstPath);
logger.error(inputs);
logger.error(err);
return false;
});
}
projectDownloadPath(): string {
return path.join(tempDir, "treattoolbox", this.projectId);
}
layerDownloadPath(): string {
return path.join(this.projectDownloadPath(), "layered-images");
}
} | the_stack |
import { ComponentItemConfig, ItemConfig } from '../config/config';
import { ResolvedComponentItemConfig, ResolvedHeaderedItemConfig, ResolvedItemConfig, ResolvedStackItemConfig } from '../config/resolved-config';
import { Header } from '../controls/header';
import { AssertError, UnexpectedNullError, UnexpectedUndefinedError } from '../errors/internal-error';
import { LayoutManager } from '../layout-manager';
import { DomConstants } from '../utils/dom-constants';
import { DragListener } from '../utils/drag-listener';
import { EventEmitter } from '../utils/event-emitter';
import { getJQueryOffset } from '../utils/jquery-legacy';
import { AreaLinkedRect, ItemType, JsonValue, Side, WidthAndHeight, WidthOrHeightPropertyName } from '../utils/types';
import {
getElementHeight,
getElementWidth,
getElementWidthAndHeight,
numberToPixels,
setElementDisplayVisibility
} from '../utils/utils';
import { ComponentItem } from './component-item';
import { ComponentParentableItem } from './component-parentable-item';
import { ContentItem } from './content-item';
/** @public */
export class Stack extends ComponentParentableItem {
/** @internal */
private readonly _headerConfig: ResolvedHeaderedItemConfig.Header | undefined;
/** @internal */
private readonly _header: Header;
/** @internal */
private readonly _childElementContainer: HTMLElement;
/** @internal */
private readonly _maximisedEnabled: boolean;
/** @internal */
private _activeComponentItem: ComponentItem | undefined;
/** @internal */
private _dropSegment: Stack.Segment;
/** @internal */
private _dropIndex: number;
/** @internal */
private _contentAreaDimensions: Stack.ContentAreaDimensions;
/** @internal */
private _headerSideChanged = false;
/** @internal */
private readonly _initialWantMaximise: boolean;
/** @internal */
private _initialActiveItemIndex: number;
/** @internal */
private _resizeListener = () => this.handleResize();
/** @internal */
private _maximisedListener = () => this.handleMaximised();
/** @internal */
private _minimisedListener = () => this.handleMinimised();
get childElementContainer(): HTMLElement { return this._childElementContainer; }
get header(): Header { return this._header; }
get headerShow(): boolean { return this._header.show; }
get headerSide(): Side { return this._header.side; }
get headerLeftRightSided(): boolean { return this._header.leftRightSided; }
/** @internal */
get contentAreaDimensions(): Stack.ContentAreaDimensions | undefined { return this._contentAreaDimensions; }
/** @internal */
get initialWantMaximise(): boolean { return this._initialWantMaximise; }
get isMaximised(): boolean { return this === this.layoutManager.maximisedStack; }
get stackParent(): ContentItem {
if (!this.parent) {
throw new Error('Stack should always have a parent');
}
return this.parent;
}
/** @internal */
constructor(layoutManager: LayoutManager, config: ResolvedStackItemConfig, parent: ContentItem) {
super(layoutManager, config, parent, Stack.createElement(document));
this._headerConfig = config.header;
const layoutHeaderConfig = layoutManager.layoutConfig.header;
const configContent = config.content;
// If stack has only one component, then we can also check this for header settings
let componentHeaderConfig: ResolvedHeaderedItemConfig.Header | undefined;
if (configContent.length !== 1) {
componentHeaderConfig = undefined;
} else {
const firstChildItemConfig = configContent[0];
componentHeaderConfig = (firstChildItemConfig as ResolvedHeaderedItemConfig).header; // will be undefined if not component (and wont be stack)
}
this._initialWantMaximise = config.maximised;
this._initialActiveItemIndex = config.activeItemIndex ?? 0; // make sure defined
// check for defined value for each item in order of Stack (this Item), Component (first child), Manager.
const show = this._headerConfig?.show ?? componentHeaderConfig?.show ?? layoutHeaderConfig.show;
const popout = this._headerConfig?.popout ?? componentHeaderConfig?.popout ?? layoutHeaderConfig.popout;
const maximise = this._headerConfig?.maximise ?? componentHeaderConfig?.maximise ?? layoutHeaderConfig.maximise;
const close = this._headerConfig?.close ?? componentHeaderConfig?.close ?? layoutHeaderConfig.close;
const minimise = this._headerConfig?.minimise ?? componentHeaderConfig?.minimise ?? layoutHeaderConfig.minimise;
const tabDropdown = this._headerConfig?.tabDropdown ?? componentHeaderConfig?.tabDropdown ?? layoutHeaderConfig.tabDropdown;
this._maximisedEnabled = maximise !== false;
const headerSettings: Header.Settings = {
show: show !== false,
side: show === false ? Side.top : show,
popoutEnabled: popout !== false,
popoutLabel: popout === false ? '' : popout,
maximiseEnabled: this._maximisedEnabled,
maximiseLabel: maximise === false ? '' : maximise,
closeEnabled: close !== false,
closeLabel: close === false ? '' : close,
minimiseEnabled: true,
minimiseLabel: minimise,
tabDropdownEnabled: tabDropdown !== false,
tabDropdownLabel: tabDropdown === false ? '' : tabDropdown,
};
this._header = new Header(layoutManager,
this, headerSettings,
config.isClosable && close !== false,
() => this.getActiveComponentItem(),
() => this.remove(),
() => this.handlePopoutEvent(),
() => this.toggleMaximise(),
(ev) => this.handleHeaderClickEvent(ev),
(ev) => this.handleHeaderTouchStartEvent(ev),
(item) => this.handleHeaderComponentRemoveEvent(item),
(item) => this.handleHeaderComponentFocusEvent(item),
(x, y, dragListener, item) => this.handleHeaderComponentStartDragEvent(x, y, dragListener, item),
);
// this._dropZones = {};
this.isStack = true;
this._childElementContainer = document.createElement('section');
this._childElementContainer.classList.add(DomConstants.ClassName.Items);
this.on('resize', this._resizeListener);
if (this._maximisedEnabled) {
this.on('maximised', this._maximisedListener);
this.on('minimised', this._minimisedListener);
}
this.element.appendChild(this._header.element);
this.element.appendChild(this._childElementContainer);
this.setupHeaderPosition();
this._header.updateClosability();
}
/** @internal */
override updateSize(force: boolean): void {
this.layoutManager.beginVirtualSizedContainerAdding();
try {
this.updateNodeSize();
this.updateContentItemsSize(force);
} finally {
this.layoutManager.endVirtualSizedContainerAdding();
}
}
/** @internal */
override init(): void {
if (this.isInitialised === true) return;
this.updateNodeSize();
for (let i = 0; i < this.contentItems.length; i++) {
this._childElementContainer.appendChild(this.contentItems[i].element);
}
super.init();
const contentItems = this.contentItems;
const contentItemCount = contentItems.length;
if (contentItemCount > 0) { // contentItemCount will be 0 on drag drop
if (this._initialActiveItemIndex < 0 || this._initialActiveItemIndex >= contentItemCount) {
throw new Error(`ActiveItemIndex out of range: ${this._initialActiveItemIndex} id: ${this.id}`);
} else {
for (let i = 0; i < contentItemCount; i++) {
const contentItem = contentItems[i];
if (!(contentItem instanceof ComponentItem)) {
throw new Error(`Stack Content Item is not of type ComponentItem: ${i} id: ${this.id}`);
} else {
this._header.createTab(contentItem, i);
contentItem.hide();
contentItem.container.setBaseLogicalZIndex();
}
}
this.setActiveComponentItem(contentItems[this._initialActiveItemIndex] as ComponentItem, false);
this._header.updateTabSizes();
}
}
this._header.updateClosability();
this.initContentItems();
}
/** @deprecated Use {@link (Stack:class).setActiveComponentItem} */
setActiveContentItem(item: ContentItem): void {
if (!ContentItem.isComponentItem(item)) {
throw new Error('Stack.setActiveContentItem: item is not a ComponentItem');
} else {
this.setActiveComponentItem(item, false);
}
}
setActiveComponentItem(componentItem: ComponentItem, focus: boolean, suppressFocusEvent = false): void {
if (this._activeComponentItem !== componentItem) {
if (this.contentItems.indexOf(componentItem) === -1) {
throw new Error('componentItem is not a child of this stack');
} else {
this.layoutManager.beginSizeInvalidation();
try {
if (this._activeComponentItem !== undefined) {
this._activeComponentItem.hide();
}
this._activeComponentItem = componentItem;
this._header.processActiveComponentChanged(componentItem);
componentItem.show();
} finally {
this.layoutManager.endSizeInvalidation();
}
this.emit('activeContentItemChanged', componentItem);
this.layoutManager.emit('activeContentItemChanged', componentItem);
this.emitStateChangedEvent();
}
}
if (this.focused || focus) {
this.layoutManager.setFocusedComponentItem(componentItem, suppressFocusEvent);
}
}
/** @deprecated Use {@link (Stack:class).getActiveComponentItem} */
getActiveContentItem(): ContentItem | null {
return this.getActiveComponentItem() ?? null;
}
getActiveComponentItem(): ComponentItem | undefined {
return this._activeComponentItem;
}
/** @internal */
focusActiveContentItem(): void {
this._activeComponentItem?.focus();
}
/** @internal */
override setFocusedValue(value: boolean): void {
this._header.applyFocusedValue(value);
super.setFocusedValue(value);
}
/** @internal */
setRowColumnClosable(value: boolean): void {
this._header.setRowColumnClosable(value);
}
newComponent(componentType: JsonValue, componentState?: JsonValue, title?: string, index?: number): ComponentItem {
const itemConfig: ComponentItemConfig = {
type: 'component',
componentType,
componentState,
title,
};
return this.newItem(itemConfig, index) as ComponentItem;
}
addComponent(componentType: JsonValue, componentState?: JsonValue, title?: string, index?: number): number {
const itemConfig: ComponentItemConfig = {
type: 'component',
componentType,
componentState,
title,
};
return this.addItem(itemConfig, index);
}
newItem(itemConfig: ComponentItemConfig, index?: number): ContentItem {
index = this.addItem(itemConfig, index);
return this.contentItems[index];
}
addItem(itemConfig: ComponentItemConfig, index?: number): number {
this.layoutManager.checkMinimiseMaximisedStack();
const resolvedItemConfig = ItemConfig.resolve(itemConfig);
const contentItem = this.layoutManager.createAndInitContentItem(resolvedItemConfig, this);
return this.addChild(contentItem, index);
}
override addChild(contentItem: ContentItem, index?: number, focus = false): number {
if(index !== undefined && index > this.contentItems.length){
index -= 1;
throw new AssertError('SAC99728'); // undisplayChild() removed so this condition should no longer occur
}
if (!(contentItem instanceof ComponentItem)) {
throw new AssertError('SACC88532'); // Stacks can only have Component children
} else {
index = super.addChild(contentItem, index);
this._childElementContainer.appendChild(contentItem.element);
this._header.createTab(contentItem, index);
this.setActiveComponentItem(contentItem, focus);
this._header.updateTabSizes();
this.updateSize(false);
contentItem.container.setBaseLogicalZIndex();
this._header.updateClosability();
this.emitStateChangedEvent();
return index;
}
}
override removeChild(contentItem: ContentItem, keepChild: boolean): void {
const componentItem = contentItem as ComponentItem;
const index = this.contentItems.indexOf(componentItem);
const stackWillBeDeleted = this.contentItems.length === 1;
if (this._activeComponentItem === componentItem) {
if (componentItem.focused) {
componentItem.blur();
}
if (!stackWillBeDeleted) {
// At this point we're already sure we have at least one content item left *after*
// removing contentItem, so we can safely assume index 1 is a valid one if
// the index of contentItem is 0, otherwise we just use the previous content item.
const newActiveComponentIdx = index === 0 ? 1 : index - 1;
this.setActiveComponentItem(this.contentItems[newActiveComponentIdx] as ComponentItem, false);
}
}
this._header.removeTab(componentItem);
super.removeChild(componentItem, keepChild);
if (!stackWillBeDeleted) {
this._header.updateClosability();
}
this.emitStateChangedEvent();
}
/**
* Maximises the Item or minimises it if it is already maximised
*/
toggleMaximise(): void {
if (this.isMaximised) {
this.minimise();
} else {
this.maximise();
}
}
maximise(): void {
if (!this.isMaximised) {
this.layoutManager.setMaximisedStack(this);
const contentItems = this.contentItems;
const contentItemCount = contentItems.length;
for (let i = 0; i < contentItemCount; i++) {
const contentItem = contentItems[i];
if (contentItem instanceof ComponentItem) {
contentItem.enterStackMaximised();
} else {
throw new AssertError('SMAXI87773');
}
}
this.emitStateChangedEvent();
}
}
minimise(): void {
if (this.isMaximised) {
this.layoutManager.setMaximisedStack(undefined);
const contentItems = this.contentItems;
const contentItemCount = contentItems.length;
for (let i = 0; i < contentItemCount; i++) {
const contentItem = contentItems[i];
if (contentItem instanceof ComponentItem) {
contentItem.exitStackMaximised();
} else {
throw new AssertError('SMINI87773');
}
}
this.emitStateChangedEvent();
}
}
/** @internal */
override destroy(): void {
if (this._activeComponentItem?.focused) {
this._activeComponentItem.blur();
}
super.destroy();
this.off('resize', this._resizeListener);
if (this._maximisedEnabled) {
this.off('maximised', this._maximisedListener);
this.off('minimised', this._minimisedListener);
}
this._header.destroy();
}
toConfig(): ResolvedStackItemConfig {
let activeItemIndex: number | undefined;
if (this._activeComponentItem) {
activeItemIndex = this.contentItems.indexOf(this._activeComponentItem);
if (activeItemIndex < 0) {
throw new Error('active component item not found in stack');
}
}
if (this.contentItems.length > 0 && activeItemIndex === undefined) {
throw new Error('expected non-empty stack to have an active component item');
} else {
const result: ResolvedStackItemConfig = {
type: 'stack',
content: this.calculateConfigContent() as ResolvedComponentItemConfig[],
width: this.width,
minWidth: this.minWidth,
height: this.height,
minHeight: this.minHeight,
id: this.id,
isClosable: this.isClosable,
maximised: this.isMaximised,
header: this.createHeaderConfig(),
activeItemIndex,
}
return result;
}
}
/**
* Ok, this one is going to be the tricky one: The user has dropped a {@link (ContentItem:class)} onto this stack.
*
* It was dropped on either the stacks header or the top, right, bottom or left bit of the content area
* (which one of those is stored in this._dropSegment). Now, if the user has dropped on the header the case
* is relatively clear: We add the item to the existing stack... job done (might be good to have
* tab reordering at some point, but lets not sweat it right now)
*
* If the item was dropped on the content part things are a bit more complicated. If it was dropped on either the
* top or bottom region we need to create a new column and place the items accordingly.
* Unless, of course if the stack is already within a column... in which case we want
* to add the newly created item to the existing column...
* either prepend or append it, depending on wether its top or bottom.
*
* Same thing for rows and left / right drop segments... so in total there are 9 things that can potentially happen
* (left, top, right, bottom) * is child of the right parent (row, column) + header drop
*
* @internal
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
override onDrop(contentItem: ContentItem, area: ContentItem.Area): void {
/*
* The item was dropped on the header area. Just add it as a child of this stack and
* get the hell out of this logic
*/
if (this._dropSegment === Stack.Segment.Header) {
this.resetHeaderDropZone();
if (this._dropIndex === undefined) {
throw new UnexpectedUndefinedError('SODDI68990');
} else {
this.addChild(contentItem, this._dropIndex);
return;
}
}
/*
* The stack is empty. Let's just add the element.
*/
if (this._dropSegment === Stack.Segment.Body) {
this.addChild(contentItem, 0, true);
return;
}
/*
* The item was dropped on the top-, left-, bottom- or right- part of the content. Let's
* aggregate some conditions to make the if statements later on more readable
*/
const isVertical = this._dropSegment === Stack.Segment.Top || this._dropSegment === Stack.Segment.Bottom;
const isHorizontal = this._dropSegment === Stack.Segment.Left || this._dropSegment === Stack.Segment.Right;
const insertBefore = this._dropSegment === Stack.Segment.Top || this._dropSegment === Stack.Segment.Left;
const hasCorrectParent = (isVertical && this.stackParent.isColumn) || (isHorizontal && this.stackParent.isRow);
const dimension = isVertical ? 'height' : 'width';
/*
* The content item can be either a component or a stack. If it is a component, wrap it into a stack
*/
if (contentItem.isComponent) {
const itemConfig = ResolvedStackItemConfig.createDefault();
itemConfig.header = this.createHeaderConfig();
const stack = this.layoutManager.createAndInitContentItem(itemConfig, this);
stack.addChild(contentItem);
contentItem = stack;
}
/*
* If the contentItem that's being dropped is not dropped on a Stack (cases which just passed above and
* which would wrap the contentItem in a Stack) we need to check whether contentItem is a RowOrColumn.
* If it is, we need to re-wrap it in a Stack like it was when it was dragged by its Tab (it was dragged!).
*/
if(contentItem.type === ItemType.row || contentItem.type === ItemType.column){
const itemConfig = ResolvedStackItemConfig.createDefault();
itemConfig.header = this.createHeaderConfig();
const stack = this.layoutManager.createContentItem(itemConfig, this);
stack.addChild(contentItem)
contentItem = stack
}
/*
* If the item is dropped on top or bottom of a column or left and right of a row, it's already
* layd out in the correct way. Just add it as a child
*/
if (hasCorrectParent) {
const index = this.stackParent.contentItems.indexOf(this);
this.stackParent.addChild(contentItem, insertBefore ? index : index + 1, true);
this[dimension] *= 0.5;
contentItem[dimension] = this[dimension];
this.stackParent.updateSize(false);
/*
* This handles items that are dropped on top or bottom of a row or left / right of a column. We need
* to create the appropriate contentItem for them to live in
*/
} else {
const type = isVertical ? ItemType.column : ItemType.row;
const itemConfig = ResolvedItemConfig.createDefault(type) as ResolvedItemConfig;
const rowOrColumn = this.layoutManager.createContentItem(itemConfig, this);
this.stackParent.replaceChild(this, rowOrColumn);
rowOrColumn.addChild(contentItem, insertBefore ? 0 : undefined, true);
rowOrColumn.addChild(this, insertBefore ? undefined : 0, true);
this[dimension] = 50;
contentItem[dimension] = 50;
rowOrColumn.updateSize(false);
}
}
/**
* If the user hovers above the header part of the stack, indicate drop positions for tabs.
* otherwise indicate which segment of the body the dragged item would be dropped on
*
* @param x - Absolute Screen X
* @param y - Absolute Screen Y
* @internal
*/
override highlightDropZone(x: number, y: number): void {
for (const key in this._contentAreaDimensions) {
const segment = key as Stack.Segment;
const area = this._contentAreaDimensions[segment].hoverArea;
if (area.x1 < x && area.x2 > x && area.y1 < y && area.y2 > y) {
if (segment === Stack.Segment.Header) {
this._dropSegment = Stack.Segment.Header;
this.highlightHeaderDropZone(this._header.leftRightSided ? y : x);
} else {
this.resetHeaderDropZone();
this.highlightBodyDropZone(segment);
}
return;
}
}
}
/** @internal */
getArea(): ContentItem.Area | null {
if (this.element.style.display === 'none') {
return null;
}
const headerArea = super.getElementArea(this._header.element);
const contentArea = super.getElementArea(this._childElementContainer);
if (headerArea === null || contentArea === null) {
throw new UnexpectedNullError('SGAHC13086');
}
const contentWidth = contentArea.x2 - contentArea.x1;
const contentHeight = contentArea.y2 - contentArea.y1;
this._contentAreaDimensions = {
header: {
hoverArea: {
x1: headerArea.x1,
y1: headerArea.y1,
x2: headerArea.x2,
y2: headerArea.y2
},
highlightArea: {
x1: headerArea.x1,
y1: headerArea.y1,
x2: headerArea.x2,
y2: headerArea.y2
}
}
};
/**
* Highlight the entire body if the stack is empty
*/
if (this.contentItems.length === 0) {
this._contentAreaDimensions.body = {
hoverArea: {
x1: contentArea.x1,
y1: contentArea.y1,
x2: contentArea.x2,
y2: contentArea.y2
},
highlightArea: {
x1: contentArea.x1,
y1: contentArea.y1,
x2: contentArea.x2,
y2: contentArea.y2
}
};
return super.getElementArea(this.element);
} else {
this._contentAreaDimensions.left = {
hoverArea: {
x1: contentArea.x1,
y1: contentArea.y1,
x2: contentArea.x1 + contentWidth * 0.25,
y2: contentArea.y2
},
highlightArea: {
x1: contentArea.x1,
y1: contentArea.y1,
x2: contentArea.x1 + contentWidth * 0.5,
y2: contentArea.y2
}
};
this._contentAreaDimensions.top = {
hoverArea: {
x1: contentArea.x1 + contentWidth * 0.25,
y1: contentArea.y1,
x2: contentArea.x1 + contentWidth * 0.75,
y2: contentArea.y1 + contentHeight * 0.5
},
highlightArea: {
x1: contentArea.x1,
y1: contentArea.y1,
x2: contentArea.x2,
y2: contentArea.y1 + contentHeight * 0.5
}
};
this._contentAreaDimensions.right = {
hoverArea: {
x1: contentArea.x1 + contentWidth * 0.75,
y1: contentArea.y1,
x2: contentArea.x2,
y2: contentArea.y2
},
highlightArea: {
x1: contentArea.x1 + contentWidth * 0.5,
y1: contentArea.y1,
x2: contentArea.x2,
y2: contentArea.y2
}
};
this._contentAreaDimensions.bottom = {
hoverArea: {
x1: contentArea.x1 + contentWidth * 0.25,
y1: contentArea.y1 + contentHeight * 0.5,
x2: contentArea.x1 + contentWidth * 0.75,
y2: contentArea.y2
},
highlightArea: {
x1: contentArea.x1,
y1: contentArea.y1 + contentHeight * 0.5,
x2: contentArea.x2,
y2: contentArea.y2
}
};
return super.getElementArea(this.element);
}
}
/**
* Programmatically operate with header position.
*
* @param position -
*
* @returns previous header position
* @internal
*/
positionHeader(position: Side): void {
if (this._header.side !== position) {
this._header.setSide(position);
this._headerSideChanged = true;
this.setupHeaderPosition();
}
}
/** @internal */
private updateNodeSize(): void {
if (this.element.style.display !== 'none') {
const content: WidthAndHeight = getElementWidthAndHeight(this.element);
if (this._header.show) {
const dimension = this._header.leftRightSided ? WidthOrHeightPropertyName.width : WidthOrHeightPropertyName.height;
content[dimension] -= this.layoutManager.layoutConfig.dimensions.headerHeight;
}
this._childElementContainer.style.width = numberToPixels(content.width);
this._childElementContainer.style.height = numberToPixels(content.height);
for (let i = 0; i < this.contentItems.length; i++) {
this.contentItems[i].element.style.width = numberToPixels(content.width);
this.contentItems[i].element.style.height = numberToPixels(content.height);
}
this.emit('resize');
this.emitStateChangedEvent();
}
}
/** @internal */
private highlightHeaderDropZone(x: number): void {
// Only walk over the visible tabs
const tabsLength = this._header.lastVisibleTabIndex + 1;
const dropTargetIndicator = this.layoutManager.dropTargetIndicator;
if (dropTargetIndicator === null) {
throw new UnexpectedNullError('SHHDZDTI97110');
}
let area: AreaLinkedRect;
// Empty stack
if (tabsLength === 0) {
const headerOffset = getJQueryOffset(this._header.element);
const elementHeight = getElementHeight(this._header.element);
area = {
x1: headerOffset.left,
x2: headerOffset.left + 100,
y1: headerOffset.top + elementHeight - 20,
y2: headerOffset.top + elementHeight,
};
this._dropIndex = 0;
} else {
let tabIndex = 0;
// This indicates whether our cursor is exactly over a tab
let isAboveTab = false;
let tabTop: number;
let tabLeft: number;
let tabWidth: number;
let tabElement: HTMLElement;
do {
tabElement = this._header.tabs[tabIndex].element;
const offset = getJQueryOffset(tabElement);
if (this._header.leftRightSided) {
tabLeft = offset.top;
tabTop = offset.left;
tabWidth = getElementHeight(tabElement);
} else {
tabLeft = offset.left;
tabTop = offset.top;
tabWidth = getElementWidth(tabElement);
}
if (x >= tabLeft && x < tabLeft + tabWidth) {
isAboveTab = true;
} else {
tabIndex++;
}
} while (tabIndex < tabsLength && !isAboveTab);
// If we're not above any tabs, or to the right of any tab, we are out of the area, so give up
if (isAboveTab === false && x < tabLeft) {
return;
}
const halfX = tabLeft + tabWidth / 2;
if (x < halfX) {
this._dropIndex = tabIndex;
tabElement.insertAdjacentElement('beforebegin', this.layoutManager.tabDropPlaceholder);
} else {
this._dropIndex = Math.min(tabIndex + 1, tabsLength);
tabElement.insertAdjacentElement('afterend', this.layoutManager.tabDropPlaceholder);
}
const tabDropPlaceholderOffset = getJQueryOffset(this.layoutManager.tabDropPlaceholder);
const tabDropPlaceholderWidth = getElementWidth(this.layoutManager.tabDropPlaceholder)
if (this._header.leftRightSided) {
const placeHolderTop = tabDropPlaceholderOffset.top;
area = {
x1: tabTop,
x2: tabTop + tabElement.clientHeight,
y1: placeHolderTop,
y2: placeHolderTop + tabDropPlaceholderWidth,
};
} else {
const placeHolderLeft = tabDropPlaceholderOffset.left;
area = {
x1: placeHolderLeft,
x2: placeHolderLeft + tabDropPlaceholderWidth,
y1: tabTop,
y2: tabTop + tabElement.clientHeight,
};
}
}
dropTargetIndicator.highlightArea(area);
return;
}
/** @internal */
private resetHeaderDropZone() {
this.layoutManager.tabDropPlaceholder.remove();
}
/** @internal */
private setupHeaderPosition() {
setElementDisplayVisibility(this._header.element, this._header.show);
this.element.classList.remove(DomConstants.ClassName.Left, DomConstants.ClassName.Right, DomConstants.ClassName.Bottom);
if (this._header.leftRightSided) {
this.element.classList.add('lm_' + this._header.side);
}
//if ([Side.right, Side.bottom].includes(this._header.side)) {
// // move the header behind the content.
// this.element.appendChild(this._header.element);
//}
this.updateSize(false);
}
/** @internal */
private highlightBodyDropZone(segment: Stack.Segment): void {
if (this._contentAreaDimensions === undefined) {
throw new UnexpectedUndefinedError('SHBDZC82265');
} else {
const highlightArea = this._contentAreaDimensions[segment].highlightArea;
const dropTargetIndicator = this.layoutManager.dropTargetIndicator;
if (dropTargetIndicator === null) {
throw new UnexpectedNullError('SHBDZD96110');
} else {
dropTargetIndicator.highlightArea(highlightArea);
this._dropSegment = segment;
}
}
}
/** @internal */
private handleResize() {
this._header.updateTabSizes()
}
/** @internal */
private handleMaximised() {
this._header.processMaximised();
}
/** @internal */
private handleMinimised() {
this._header.processMinimised();
}
/** @internal */
private handlePopoutEvent() {
this.popout();
}
/** @internal */
private handleHeaderClickEvent(ev: MouseEvent) {
const eventName = EventEmitter.headerClickEventName;
const bubblingEvent = new EventEmitter.ClickBubblingEvent(eventName, this, ev);
this.emit(eventName, bubblingEvent);
}
/** @internal */
private handleHeaderTouchStartEvent(ev: TouchEvent) {
const eventName = EventEmitter.headerTouchStartEventName;
const bubblingEvent = new EventEmitter.TouchStartBubblingEvent(eventName, this, ev);
this.emit(eventName, bubblingEvent);
}
/** @internal */
private handleHeaderComponentRemoveEvent(item: ComponentItem) {
this.removeChild(item, false);
}
/** @internal */
private handleHeaderComponentFocusEvent(item: ComponentItem) {
this.setActiveComponentItem(item, true);
}
/** @internal */
private handleHeaderComponentStartDragEvent(x: number, y: number, dragListener: DragListener, componentItem: ComponentItem) {
if (this.isMaximised === true) {
this.toggleMaximise();
}
this.layoutManager.startComponentDrag(x, y, dragListener, componentItem, this);
}
/** @internal */
private createHeaderConfig() {
if (!this._headerSideChanged) {
return ResolvedHeaderedItemConfig.Header.createCopy(this._headerConfig);
} else {
const show = this._header.show ? this._header.side : false;
let result = ResolvedHeaderedItemConfig.Header.createCopy(this._headerConfig, show);
if (result === undefined) {
result = {
show,
popout: undefined,
maximise: undefined,
close: undefined,
minimise: undefined,
tabDropdown: undefined,
};
}
return result;
}
}
/** @internal */
private emitStateChangedEvent() {
this.emitBaseBubblingEvent('stateChanged');
}
}
/** @public */
export namespace Stack {
/** @internal */
export const enum Segment {
Header = 'header',
Body = 'body',
Left = 'left',
Right = 'right',
Top = 'top',
Bottom = 'bottom',
}
/** @internal */
export interface ContentAreaDimension {
hoverArea: AreaLinkedRect;
highlightArea: AreaLinkedRect;
}
/** @internal */
export type ContentAreaDimensions = {
[segment: string]: ContentAreaDimension;
};
/** @internal */
export function createElement(document: Document): HTMLDivElement {
const element = document.createElement('div');
element.classList.add(DomConstants.ClassName.Item);
element.classList.add(DomConstants.ClassName.Stack);
return element;
}
} | the_stack |
import * as d3 from "d3";
import { assert } from "chai";
import * as sinon from "sinon";
import * as Plottable from "../../src";
import * as TestMethods from "../testMethods";
describe("Plots", () => {
describe("LinePlot", () => {
describe("Basic Usage", () => {
let div: d3.Selection<HTMLDivElement, any, any, any>;
let xScale: Plottable.Scales.Linear;
let yScale: Plottable.Scales.Linear;
let linePlot: Plottable.Plots.Line<number>;
const data = [{x: 0, y: 0}, {x: 1, y: 1}];
let dataset: Plottable.Dataset;
beforeEach(() => {
div = TestMethods.generateDiv();
xScale = new Plottable.Scales.Linear();
yScale = new Plottable.Scales.Linear();
linePlot = new Plottable.Plots.Line<number>();
linePlot.x((d: any) => d.x, xScale);
linePlot.y((d: any) => d.y, yScale);
dataset = new Plottable.Dataset(data);
});
it("does not throw error when given NaN values", () => {
const dataWithNaN = [
{ x: 0.0, y: 0.0 },
{ x: 0.2, y: 0.2 },
{ x: 0.4, y: NaN },
{ x: 0.6, y: 0.6 },
{ x: 0.8, y: 0.8 },
];
linePlot.addDataset(new Plottable.Dataset(dataWithNaN));
assert.doesNotThrow(() => linePlot.renderTo(div), Error, "does not throw error with NaN data");
const entities = linePlot.entities();
const expectedLength = dataWithNaN.length - 1;
assert.lengthOf(entities, expectedLength, "NaN data was not returned");
div.remove();
});
it("does not throw error when rendering without data", () => {
assert.doesNotThrow(() => linePlot.renderTo(div), Error, "does not throw error rendering without data");
div.remove();
});
it("retains original classes when setting class with attr", () => {
const cssClass = "pink";
linePlot.attr("class", cssClass);
linePlot.renderTo(div);
linePlot.addDataset(dataset);
const linePath = linePlot.content().select(".line");
assert.isTrue(linePath.classed(cssClass), "custom class is applied");
assert.isTrue(linePath.classed("line"), "default class is retained");
div.remove();
});
it("draws a line with correct data points and fill and stroke settings", () => {
linePlot.addDataset(dataset);
linePlot.attr("stroke", (d, i, m) => d3.rgb(d.x, d.y, i).toString());
linePlot.renderTo(div);
const linePath = linePlot.content().select(".line");
TestMethods.assertPathEqualToDataPoints(linePath.attr("d"), data, xScale, yScale);
assert.strictEqual(linePath.style("fill"), "none", "line fill renders as \"none\"");
assert.strictEqual(linePath.attr("stroke"), "rgb(0, 0, 0)", "stroke set correctly");
div.remove();
});
it("can set attributes and render accordingly", () => {
linePlot.addDataset(dataset);
linePlot.renderTo(div);
const newColor = "pink";
linePlot.attr("stroke", newColor);
linePlot.render();
const linePath = linePlot.content().select(".line");
assert.strictEqual(linePath.attr("stroke"), newColor, "stroke changed correctly");
div.remove();
});
it("skips NaN and undefined x and y values", () => {
const lineData = [
{ x: 0.0, y: 0.0 },
{ x: 0.2, y: 0.2 },
{ x: 0.4, y: 0.4 },
{ x: 0.6, y: 0.6 },
{ x: 0.8, y: 0.8 },
];
const mutableDataset = new Plottable.Dataset(lineData);
linePlot.addDataset(mutableDataset);
linePlot.renderTo(div);
const linePath = linePlot.content().select(".line");
const validPoints = lineData.slice(0, 2).concat(lineData.slice(3, 5));
const dataWithNaN = lineData.slice();
dataWithNaN[2] = { x: 0.4, y: NaN };
mutableDataset.data(dataWithNaN);
linePlot.render();
TestMethods.assertPathEqualToDataPoints(linePath.attr("d"), validPoints, xScale, yScale);
dataWithNaN[2] = { x: NaN, y: 0.4 };
mutableDataset.data(dataWithNaN);
TestMethods.assertPathEqualToDataPoints(linePath.attr("d"), validPoints, xScale, yScale);
const dataWithUndefined = lineData.slice();
dataWithUndefined[2] = { x: 0.4, y: undefined };
mutableDataset.data(dataWithUndefined);
TestMethods.assertPathEqualToDataPoints(linePath.attr("d"), validPoints, xScale, yScale);
dataWithUndefined[2] = { x: undefined, y: 0.4 };
mutableDataset.data(dataWithUndefined);
TestMethods.assertPathEqualToDataPoints(linePath.attr("d"), validPoints, xScale, yScale);
div.remove();
});
});
describe("Collapse Dense Vertical Lines", () => {
const collapsableData = [
// start
[0, 10],
// bucket 1
[1, 7],
[1.1, 5],
[1, 7],
[1.1, 5],
[1, 7],
[1.1, 5],
[1, 7],
[1.1, 5],
// bucket 2
[4, 8],
[4.1, 2],
[4, 8],
[4.1, 2],
[4, 8],
[4.1, 2],
// end
[10, 10],
]; // data that should be collapsed
it("can set line collapse flag", () => {
const linePlot = new Plottable.Plots.Line<number>();
linePlot.collapseDenseLinesEnabled(true);
assert.isTrue(linePlot.collapseDenseLinesEnabled());
linePlot.collapseDenseLinesEnabled(false);
assert.isFalse(linePlot.collapseDenseLinesEnabled());
});
it("collapses lines when enabled", () => {
const div = TestMethods.generateDiv();
const linePlot = new Plottable.Plots.Line<number>();
linePlot.x((d: any) => d[0]);
linePlot.y((d: any) => d[1]);
linePlot.addDataset(new Plottable.Dataset(collapsableData));
linePlot.collapseDenseLinesEnabled(true);
const spy = sinon.spy(linePlot, "_bucketByX");
linePlot.renderTo(div);
div.remove();
assert.equal(spy.callCount, 1, "called once");
assert.isDefined(spy.returnValues, "has returns");
assert.isDefined(spy.returnValues[0], "returned valid");
assert.equal(spy.returnValues[0].length, 8, "line was collapsed");
});
});
describe("interpolation", () => {
let div: d3.Selection<HTMLDivElement, any, any, any>;
let xScale: Plottable.Scales.Linear;
let yScale: Plottable.Scales.Linear;
let linePlot: Plottable.Plots.Line<number>;
beforeEach(() => {
div = TestMethods.generateDiv();
xScale = new Plottable.Scales.Linear();
yScale = new Plottable.Scales.Linear();
linePlot = new Plottable.Plots.Line<number>();
linePlot.x((d: any) => d.x, xScale);
linePlot.y((d: any) => d.y, yScale);
});
it("sets interploation correctly", () => {
assert.strictEqual(linePlot.curve(), "linear", "the default interpolation mode is linear");
assert.strictEqual(linePlot.curve("step"), linePlot, "setting an interpolation mode returns the plot");
assert.strictEqual(linePlot.curve(), "step", "setting an interpolation mode works");
div.remove();
});
it("draws step function when interpolator is set to step", () => {
const data = [
{x: 0.0, y: 0},
{x: 0.8, y: 0.717},
{x: 1.6, y: 0.999},
{x: 2.4, y: 0.675},
{x: 3.2, y: -0.058},
{x: 4.0, y: -0.756},
{x: 4.8, y: -0.996},
{x: 5.6, y: -0.631},
];
linePlot.addDataset(new Plottable.Dataset(data));
linePlot.renderTo(div);
let svgPath = linePlot.content().select("path").attr("d");
TestMethods.assertPathEqualToDataPoints(svgPath, data, xScale, yScale);
linePlot.curve("step");
svgPath = linePlot.content().select("path").attr("d");
const stepPoints = [data[0]];
for (let i = 1; i < data.length; i ++ ) {
const p1 = data[i - 1];
const p2 = data[i];
stepPoints.push({ x: (p1.x + p2.x) / 2, y: p1.y });
stepPoints.push({ x: (p1.x + p2.x) / 2, y: p2.y });
}
stepPoints.push({ x: data[data.length - 1].x, y: data[data.length - 1].y });
TestMethods.assertPathEqualToDataPoints(svgPath, stepPoints, xScale, yScale);
div.remove();
});
});
describe("selections", () => {
let div: d3.Selection<HTMLDivElement, any, any, any>;
let xScale: Plottable.Scales.Linear;
let yScale: Plottable.Scales.Linear;
const data = [{ x: 0, y: 0.75 }, { x: 1, y: 0.25 }];
const data2 = [{ x: 0, y: 1 }, { x: 1, y: 0.95 }];
let dataset: Plottable.Dataset;
let dataset2: Plottable.Dataset;
let linePlot: Plottable.Plots.Line<number>;
beforeEach(() => {
div = TestMethods.generateDiv();
xScale = new Plottable.Scales.Linear();
yScale = new Plottable.Scales.Linear();
dataset = new Plottable.Dataset(data);
dataset2 = new Plottable.Dataset(data2);
linePlot = new Plottable.Plots.Line<number>();
linePlot.x((d) => d.x, xScale);
linePlot.y((d) => d.y, yScale);
linePlot.addDataset(dataset);
linePlot.renderTo(div);
});
afterEach(() => {
div.remove();
});
it("can retrieve entities in a certain range", () => {
linePlot.addDataset(dataset2);
const entities = linePlot.entitiesIn({
min: xScale.scale(0),
max: xScale.scale(0),
}, {
min: yScale.scale(1),
max: yScale.scale(0),
});
assert.lengthOf(entities, 2, "only two Entities have been retrieved");
assert.deepEqual(entities[0].datum, { x: 0, y: 0.75 }, "correct datum has been retrieved");
assert.deepEqual(entities[1].datum, { x: 0, y: 1 }, "correct datum has been retrieved");
});
it("can retrieve Entities in a certain bounds", () => {
linePlot.addDataset(dataset2);
const entities = linePlot.entitiesIn({
topLeft: {
x: xScale.scale(0),
y: yScale.scale(1),
},
bottomRight: {
x: xScale.scale(0),
y: yScale.scale(0),
},
});
assert.lengthOf(entities, 2, "only two Entities have been retrieved");
assert.deepEqual(entities[0].datum, { x: 0, y: 0.75 }, "correct datum has been retrieved");
assert.deepEqual(entities[1].datum, { x: 0, y: 1 }, "correct datum has been retrieved");
});
it("doesn't return entities outside of the bounds", () => {
const entities = linePlot.entitiesIn({
topLeft: {
x: xScale.scale(0.01),
y: yScale.scale(1),
},
bottomRight: {
x: xScale.scale(0.01),
y: yScale.scale(0),
},
});
assert.lengthOf(entities, 0, "no entities have been retrieved");
});
it("retrieves all dataset selections with no args", () => {
linePlot.addDataset(dataset2);
const allLines = linePlot.selections();
assert.strictEqual(allLines.size(), 2, "all lines retrieved");
div.remove();
});
it("retrieves selections for selected dataset", () => {
linePlot.addDataset(dataset2);
const allLines = linePlot.selections([dataset]);
assert.strictEqual(allLines.size(), 1, "all lines retrieved");
const selectionData = allLines.data()[0];
assert.deepEqual(selectionData, dataset.data(), "third dataset data in selection data");
});
it("skips invalid Dataset", () => {
const dummyDataset = new Plottable.Dataset([]);
const allLines = linePlot.selections([dataset, dummyDataset]);
assert.strictEqual(allLines.size(), 1, "all lines retrieved");
const selectionData = allLines.data()[0];
assert.deepEqual(selectionData, dataset.data(), "third dataset data in selection data");
});
it("can retrieve the nearest Entity", () => {
let px = xScale.scale(data[0].x);
let py = yScale.scale(data[0].y);
let closest = linePlot.entityNearest({x: px, y: py + 1});
assert.strictEqual(closest.datum, data[0], "it retrieves the closest point from above");
closest = linePlot.entityNearest({x: px, y: py - 1});
assert.strictEqual(closest.datum, data[0], "it retrieves the closest point from below");
px = xScale.scale(data[1].x);
py = yScale.scale(data[1].y);
closest = linePlot.entityNearest({x: px + 1, y: py});
assert.strictEqual(closest.datum, data[1], "it retrieves the closest point from the right");
closest = linePlot.entityNearest({x: px - 1, y: py});
assert.strictEqual(closest.datum, data[1], "it retrieves the closest point from the left");
});
it("considers only in-view points for the nearest Entity", () => {
xScale.domain([0.25, 1]);
const closest = linePlot.entityNearest({ x: xScale.scale(data[0].x), y: yScale.scale(data[0].y) });
assert.strictEqual(closest.datum, data[1], "it retrieves the closest point in-view");
});
it("returns undefined if no Entities are visible", () => {
dataset.data([]);
const closest = linePlot.entityNearest({ x: 0, y: 0 });
assert.isUndefined(closest, "returns undefined if no Entity can be found");
});
});
describe("smooth autoranging", () => {
let div: d3.Selection<HTMLDivElement, any, any, any>;
let xScale: Plottable.Scales.Linear;
let yScale: Plottable.Scales.Linear;
const data = [
{x: 0.0, y: -1},
{x: 1.8, y: -2},
];
let dataset: Plottable.Dataset;
let line: Plottable.Plots.Line<number>;
beforeEach(() => {
div = TestMethods.generateDiv();
xScale = new Plottable.Scales.Linear();
yScale = new Plottable.Scales.Linear();
xScale.domain([0.1, 1.1]);
line = new Plottable.Plots.Line<number>();
line.x(function(d) { return d.x; }, xScale);
line.y(function(d) { return d.y; }, yScale);
dataset = new Plottable.Dataset(data);
});
it("handles autoranging smoothly ", () => {
line.addDataset(dataset);
line.autorangeMode("y");
xScale.padProportion(0);
yScale.padProportion(0);
line.renderTo(div);
assert.deepEqual(yScale.domain(), [0, 1], "when there are no visible points in the view, the y-scale domain defaults to [0, 1]");
line.autorangeSmooth(true);
const base = data[0].y;
let x1 = xScale.domain()[1] - data[0].x;
const x2 = data[1].x - data[0].x;
const y2 = data[1].y - data[0].y;
const expectedBottom = base + y2 * x1 / x2;
x1 = xScale.domain()[0] - data[0].x;
const expectedTop = base + y2 * x1 / x2;
assert.closeTo(yScale.domain()[0], expectedBottom, 0.001, "smooth autoranging forces the domain to include the line (left)");
assert.closeTo(yScale.domain()[1], expectedTop, 0.001, "smooth autoranging forces the domain to include the line (right)");
line.autorangeSmooth(false);
assert.deepEqual(yScale.domain(), [0, 1], "resetting the smooth autorange works");
xScale.domain([data[0].x, data[1].x]);
assert.deepEqual(yScale.domain(), [-2, -1], "no changes for autoranging smooth with same edge poitns (no smooth)");
line.autorangeSmooth(true);
assert.deepEqual(yScale.domain(), [-2, -1], "no changes for autoranging smooth with same edge points (smooth)");
div.remove();
});
it("handles autoranging smoothly when autorangeSmooth set before accessors", () => {
line = new Plottable.Plots.Line<number>();
line.autorangeSmooth(true);
line.x(function(d) { return d.x; }, xScale);
line.y(function(d) { return d.y; }, yScale);
line.addDataset(dataset);
line.autorangeMode("y");
line.renderTo(div);
const base = data[0].y;
let x1 = (xScale.domain()[1] - data[0].x) - (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const x2 = data[1].x - data[0].x;
const y2 = data[1].y - data[0].y;
const expectedTop = base + y2 * x1 / x2;
x1 = (xScale.domain()[0] - data[0].x) + (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const expectedBottom = base + y2 * x1 / x2;
assert.closeTo(yScale.domain()[0], expectedBottom, 0.001, "smooth autoranging forces the domain to include the line (left)");
assert.closeTo(yScale.domain()[1], expectedTop, 0.001, "smooth autoranging forces the domain to include the line (right)");
div.remove();
});
it("handles autoranging smoothly when autorangeSmooth set before autorangeMode", () => {
line.addDataset(dataset);
line.autorangeSmooth(true);
line.autorangeMode("y");
line.renderTo(div);
const base = data[0].y;
let x1 = (xScale.domain()[1] - data[0].x) - (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const x2 = data[1].x - data[0].x;
const y2 = data[1].y - data[0].y;
const expectedTop = base + y2 * x1 / x2;
x1 = (xScale.domain()[0] - data[0].x) + (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const expectedBottom = base + y2 * x1 / x2;
assert.closeTo(yScale.domain()[0], expectedBottom, 0.001, "smooth autoranging forces the domain to include the line (left)");
assert.closeTo(yScale.domain()[1], expectedTop, 0.001, "smooth autoranging forces the domain to include the line (right)");
div.remove();
});
it("handles autoranging smoothly when autorangeSmooth set before rendering", () => {
line.addDataset(dataset);
line.autorangeMode("y");
line.autorangeSmooth(true);
line.renderTo(div);
const base = data[0].y;
let x1 = (xScale.domain()[1] - data[0].x) - (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const x2 = data[1].x - data[0].x;
const y2 = data[1].y - data[0].y;
const expectedTop = base + y2 * x1 / x2;
x1 = (xScale.domain()[0] - data[0].x) + (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const expectedBottom = base + y2 * x1 / x2;
assert.closeTo(yScale.domain()[0], expectedBottom, 0.001, "smooth autoranging forces the domain to include the line (left)");
assert.closeTo(yScale.domain()[1], expectedTop, 0.001, "smooth autoranging forces the domain to include the line (right)");
div.remove();
});
it("handles autoranging smoothly when autorangeSmooth set after rendering", () => {
line.addDataset(dataset);
line.autorangeMode("y");
line.renderTo(div);
line.autorangeSmooth(true);
const base = data[0].y;
let x1 = (xScale.domain()[1] - data[0].x) - (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const x2 = data[1].x - data[0].x;
const y2 = data[1].y - data[0].y;
const expectedTop = base + y2 * x1 / x2;
x1 = (xScale.domain()[0] - data[0].x) + (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const expectedBottom = base + y2 * x1 / x2;
assert.closeTo(yScale.domain()[0], expectedBottom, 0.001, "smooth autoranging forces the domain to include the line (left)");
assert.closeTo(yScale.domain()[1], expectedTop, 0.001, "smooth autoranging forces the domain to include the line (right)");
div.remove();
});
it("handles autoranging smoothly when autorangeSmooth set after rendering, before autorangeMode", () => {
line.addDataset(dataset);
line.renderTo(div);
line.autorangeSmooth(true);
line.autorangeMode("y");
const base = data[0].y;
let x1 = (xScale.domain()[1] - data[0].x) - (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const x2 = data[1].x - data[0].x;
const y2 = data[1].y - data[0].y;
const expectedTop = base + y2 * x1 / x2;
x1 = (xScale.domain()[0] - data[0].x) + (xScale.domain()[1] - xScale.domain()[0]) * (1 + yScale.padProportion() / 2);
const expectedBottom = base + y2 * x1 / x2;
assert.closeTo(yScale.domain()[0], expectedBottom, 0.001, "smooth autoranging forces the domain to include the line (left)");
assert.closeTo(yScale.domain()[1], expectedTop, 0.001, "smooth autoranging forces the domain to include the line (right)");
div.remove();
});
it("handles autoDomain correcly with and without autorangeSmooth", () => {
const expectedDomain = [-0.2, 2];
xScale.domain([-0.2, 2]);
line.addDataset(dataset);
line.autorangeMode("y");
line.autorangeSmooth(true);
xScale.autoDomain();
line.renderTo(div);
assert.deepEqual(xScale.domain(), expectedDomain, "autoDomain works even when autoranging is done smoothly");
line.autorangeSmooth(false);
assert.deepEqual(xScale.domain(), expectedDomain, "autoDomain works when smooth autoranging is disabled back");
div.remove();
});
it("handles autoDomain correcly with smooth autoranging set after rendering", () => {
const expectedDomain = [-0.2, 2];
xScale.domain([-0.2, 2]);
line.addDataset(dataset);
line.renderTo(div);
line.autorangeSmooth(true);
xScale.autoDomain();
assert.deepEqual(xScale.domain(), expectedDomain, "autoDomain works even when autoranging is done smoothly");
line.autorangeSmooth(false);
assert.deepEqual(xScale.domain(), expectedDomain, "autoDomain works when smooth autoranging is disabled back");
div.remove();
});
it("handles autorange smoothly for vertical lines", () => {
yScale.domain([0.1, 1.1]);
const verticalLinesData = [
{x: -2, y: 1.8},
{x: -1, y: 0.0},
];
line.addDataset(new Plottable.Dataset(verticalLinesData));
line.autorangeMode("x");
xScale.padProportion(0);
yScale.padProportion(0);
line.renderTo(div);
assert.deepEqual(xScale.domain(), [0, 1], "when there are no visible points in the view, the x-scale domain defaults to [0, 1]");
line.autorangeSmooth(true);
const base = verticalLinesData[0].x;
let x1 = (yScale.domain()[1] - verticalLinesData[0].y);
const x2 = verticalLinesData[1].y - verticalLinesData[0].y;
const y2 = verticalLinesData[1].x - verticalLinesData[0].x;
const expectedTop = base + y2 * x1 / x2;
x1 = (yScale.domain()[0] - verticalLinesData[0].y);
const expectedBottom = base + y2 * x1 / x2;
assert.closeTo(xScale.domain()[0], expectedTop, 0.001, "smooth autoranging forces the domain to include the line (left)");
assert.closeTo(xScale.domain()[1], expectedBottom, 0.001, "smooth autoranging forces the domain to include the line (right)");
line.autorangeSmooth(false);
assert.deepEqual(xScale.domain(), [0, 1], "resetting the smooth autorange works");
yScale.domain([verticalLinesData[0].y, verticalLinesData[1].y]);
assert.deepEqual(xScale.domain(), [-2, -1], "no changes for autoranging smooth with same edge poitns (no smooth)");
line.autorangeSmooth(true);
assert.deepEqual(xScale.domain(), [-2, -1], "no changes for autoranging smooth with same edge points (smooth)");
div.remove();
});
});
describe("Cropped Rendering Performance", () => {
let div: d3.Selection<HTMLDivElement, any, any, any>;
let plot: Plottable.Plots.Line<number>;
let xScale: Plottable.Scales.Linear;
let yScale: Plottable.Scales.Linear;
beforeEach(() => {
div = TestMethods.generateDiv();
xScale = new Plottable.Scales.Linear();
yScale = new Plottable.Scales.Linear();
plot = new Plottable.Plots.Line<number>();
plot.x((d) => d.x, xScale).y((d) => d.y, yScale);
});
it("can set the croppedRendering option", () => {
plot.renderTo(div);
assert.isTrue(plot.croppedRenderingEnabled(), "croppedRendering is enabled by default");
assert.strictEqual(plot.croppedRenderingEnabled(false), plot, "setting the croppedRendering option returns the plot");
assert.isFalse(plot.croppedRenderingEnabled(), "can disable the croppedRendering option");
assert.strictEqual(plot.croppedRenderingEnabled(true), plot, "setting the croppedRendering option returns the plot");
assert.isTrue(plot.croppedRenderingEnabled(), "can enable the croppedRendering option");
div.remove();
});
it("does not render lines that are outside the viewport", () => {
const data = [
{x: 1, y: 1},
{x: 2, y: 2},
{x: 3, y: 1},
{x: 4, y: 2},
{x: 5, y: 1},
];
plot.addDataset(new Plottable.Dataset(data));
// Only middle point is in viewport
xScale.domain([2.5, 3.5]);
plot.croppedRenderingEnabled(true);
plot.renderTo(div);
const path = plot.content().select("path.line").attr("d");
const expectedRenderedData = [1, 2, 3].map((d) => data[d]);
TestMethods.assertPathEqualToDataPoints(path, expectedRenderedData, xScale, yScale);
div.remove();
});
it("works when the performance option is set after rendering to svg", () => {
const data = [
{x: 1, y: 1},
{x: 2, y: 2},
{x: 3, y: 1},
{x: 4, y: 2},
{x: 5, y: 1},
];
plot.addDataset(new Plottable.Dataset(data));
// Only middle point is in viewport
xScale.domain([2.5, 3.5]);
plot.renderTo(div);
plot.croppedRenderingEnabled(true);
const path = plot.content().select("path.line").attr("d");
const expectedRenderedData = [1, 2, 3].map((d) => data[d]);
TestMethods.assertPathEqualToDataPoints(path, expectedRenderedData, xScale, yScale);
div.remove();
});
it("works for vertical line plots", () => {
const data = [
{x: 1, y: 1},
{x: 2, y: 2},
{x: 1, y: 3},
{x: 2, y: 4},
{x: 1, y: 5},
];
plot.addDataset(new Plottable.Dataset(data));
xScale.padProportion(0);
// Only middle point is in viewport
yScale.domain([2.5, 3.5]);
plot.croppedRenderingEnabled(true);
plot.renderTo(div);
const path = plot.content().select("path.line").attr("d");
const expectedRenderedData = [1, 2, 3].map((d) => data[d]);
TestMethods.assertPathEqualToDataPoints(path, expectedRenderedData, xScale, yScale);
div.remove();
});
it("adapts to scale changes", () => {
const data = [
{x: 1, y: 1},
{x: 2, y: 2},
{x: 3, y: 1},
{x: 4, y: 2},
{x: 5, y: 1},
];
plot.addDataset(new Plottable.Dataset(data));
plot.croppedRenderingEnabled(true);
plot.renderTo(div);
let path = plot.content().select("path.line").attr("d");
TestMethods.assertPathEqualToDataPoints(path, [0, 1, 2, 3, 4].map((d) => data[d]), xScale, yScale);
// Only middle point is in viewport
xScale.domain([2.5, 3.5]);
path = plot.content().select("path.line").attr("d");
TestMethods.assertPathEqualToDataPoints(path, [1, 2, 3].map((d) => data[d]), xScale, yScale);
// Only first point is in viewport
xScale.domain([-0.5, 1.5]);
path = plot.content().select("path.line").attr("d");
TestMethods.assertPathEqualToDataPoints(path, [0, 1].map((d) => data[d]), xScale, yScale);
div.remove();
});
});
describe("Downsampling Performance", () => {
let div: d3.Selection<HTMLDivElement, any, any, any>;
let plot: Plottable.Plots.Line<number>;
let xScale: Plottable.Scales.Linear;
let yScale: Plottable.Scales.Linear;
beforeEach(() => {
div = TestMethods.generateDiv(50, 50);
xScale = new Plottable.Scales.Linear();
yScale = new Plottable.Scales.Linear();
plot = new Plottable.Plots.Line<number>();
plot.x((d) => d.x, xScale).y((d) => d.y, yScale);
});
it("can set the downsampling option", () => {
plot.renderTo(div);
assert.isFalse(plot.downsamplingEnabled(), "downsampling is not enabled by default");
assert.strictEqual(plot.downsamplingEnabled(true), plot, "enabling the downsampling option returns the plot");
assert.isTrue(plot.downsamplingEnabled(), "can enable the downsampling option");
plot.downsamplingEnabled(false);
assert.isFalse(plot.downsamplingEnabled(), "can disable the downsampling option");
div.remove();
});
it("does not render points that should be removed in downsampling in horizontal line plots" , () => {
const data = [
{x: -100, y: -1}, // last element in previous bucket
{x: 0, y: 2}, // first element in current bucket
{x: 0.5, y: 1.5}, // the point to be removed
{x: 1, y: 1}, // minimum y in current bucket
{x: 2, y: 4}, // maximum y in current bucket
{x: 3, y: 3}, // last element in current bucket
{x: 100, y: 2}, // first element in next bucket
];
plot.addDataset(new Plottable.Dataset(data));
xScale.domain([-100, 100]);
plot.downsamplingEnabled(true);
plot.renderTo(div);
const lineScaledXValue = Math.floor(xScale.scale(data[1].x));
assert.notStrictEqual(Math.floor(xScale.scale(data[0].x)), lineScaledXValue,
`point(${data[0].x},${data[0].y}) should not have the same scaled x value as the horizontal line`);
data.slice(1, 6).forEach((d, i) => {
assert.strictEqual(Math.floor(xScale.scale(d.x)), lineScaledXValue,
`point(${d.x},${d.y} should have the same scaled x value as the horizontal line`);
});
assert.notStrictEqual(Math.floor(xScale.scale(data[6].x)), lineScaledXValue,
`point(${data[6].x},${data[6].y}) should not have the same scaled x value as the horizontal line`);
const path = plot.content().select("path.line").attr("d");
const expectedRenderedData = [0, 1, 4, 3, 5, 6].map((d) => data[d]);
TestMethods.assertPathEqualToDataPoints(path, expectedRenderedData, xScale, yScale);
div.remove();
});
it("does not render points that should be removed in downsampling in vertical line plots", () => {
const data = [
{x: -1, y: -50}, // last element in previous bucket
{x: 2, y: 1}, // first element in current bucket
{x: 1.5, y: 1.5}, // the point to be removed
{x: 1, y: 2}, // minimum x in current bucket
{x: 4, y: 3}, // maximum x in current bucket
{x: 3, y: 4}, // last element in current bucket
{x: 2, y: 100}, // first element in next bucket
];
plot.addDataset(new Plottable.Dataset(data));
yScale.domain([-200, 200]);
plot.downsamplingEnabled(true);
plot.renderTo(div);
const lineScaledYValue = Math.floor(yScale.scale(data[1].y));
assert.notStrictEqual(Math.floor(yScale.scale(data[0].y)), lineScaledYValue,
`point(${data[0].x},${data[0].y}) should not have the same scaled y value as the vertical line`);
data.slice(1, 6).forEach((d, i) => {
assert.strictEqual(Math.floor(yScale.scale(d.y)), lineScaledYValue,
`point(${d.x},${d.y}) should have the same scaled y value as the vertical line`);
});
assert.notStrictEqual(Math.floor(yScale.scale(data[6].y)), lineScaledYValue,
`point(${data[6].x},${data[6].y}) should not have the same scaled y value as the vertical line`);
const path = plot.content().select("path.line").attr("d");
const expectedRenderedData = [0, 1, 3, 4, 5, 6].map((d) => data[d]);
TestMethods.assertPathEqualToDataPoints(path, expectedRenderedData, xScale, yScale);
div.remove();
});
it("does not render points that are on the same line except for the first, the last, the largest and the smallest points", () => {
const data = [
{x: 3, y: 1}, // last element in previous bucket
{x: 2, y: 2}, // first element in the bucket
{x: 1, y: 1}, // minimum element in the bucket
{x: 10, y: 10}, // maximum element in the bucket
{x: 2.5, y: 2.5}, // the point to be removed
{x: 3, y: 3}, // last element in the bucket
{x: 3, y: 1}, // first element in next bucket
];
plot.addDataset(new Plottable.Dataset(data));
const expectedYValue = (p1: any, p2: any, slope: number) => {
return p1.y + (p2.x - p1.x) * slope;
};
const lineCurrentSlope = (data[2].y - data[1].y) / (data[2].x - data[1].x);
assert.notStrictEqual(Math.floor(expectedYValue(data[1], data[0], lineCurrentSlope)), Math.floor(data[0].y),
`point(${data[0].x},${data[0].y}) is not on the line with slope ${lineCurrentSlope}`);
data.slice(1, 6).forEach((d, i) => {
assert.strictEqual(Math.floor(expectedYValue(data[1], d, lineCurrentSlope)), Math.floor(d.y),
`point(${d.x},${d.y}) is on the line with slope ${lineCurrentSlope}`);
});
assert.notStrictEqual(Math.floor(expectedYValue(data[1], data[6], lineCurrentSlope)), Math.floor(data[6].y),
`point(${data[6].x},${data[6].y}) is not on the line with slope ${lineCurrentSlope}`);
plot.downsamplingEnabled(true);
plot.renderTo(div);
const path = plot.content().select("path.line").attr("d");
const expectedRenderedData = [0, 1, 2, 3, 5, 6].map((d) => data[d]);
TestMethods.assertPathEqualToDataPoints(path, expectedRenderedData, xScale, yScale);
div.remove();
});
});
describe("canvas rendering", () => {
let div: d3.Selection<HTMLDivElement, any, any, any>;
let plot: Plottable.Plots.Line<number>;
let xScale: Plottable.Scales.Linear;
let yScale: Plottable.Scales.Linear;
beforeEach(() => {
div = TestMethods.generateDiv();
xScale = new Plottable.Scales.Linear();
yScale = new Plottable.Scales.Linear();
plot = new Plottable.Plots.Line<number>();
plot.x((d) => d.x, xScale).y((d) => d.y, yScale);
plot.renderer("canvas");
});
afterEach(() => {
div.remove();
});
it("passes the right datum to the accessors", () => {
const datum = {x: 1, y: 2};
const dataset = new Plottable.Dataset([datum]);
const attrSpy = sinon.spy();
plot.datasets([dataset]);
plot.attr("stroke", attrSpy);
plot.renderTo(div);
assert.isTrue(attrSpy.calledWith(datum, 0, dataset), "attr is passed individual datum");
});
});
});
}); | the_stack |
import { expect } from "chai";
import { from as rxjsFrom } from "rxjs/internal/observable/from";
import * as sinon from "sinon";
import * as moq from "typemoq";
import { CheckBoxState } from "@itwin/core-react";
import { SelectionMode } from "../../../components-react/common/selection/SelectionModes";
import { RangeSelection, TreeSelectionManager } from "../../../components-react/tree/controlled/internal/TreeSelectionManager";
import { from } from "../../../components-react/tree/controlled/Observable";
import { TreeEventDispatcher } from "../../../components-react/tree/controlled/TreeEventDispatcher";
import {
TreeCheckboxStateChangeEventArgs, TreeEvents, TreeSelectionModificationEventArgs, TreeSelectionReplacementEventArgs,
} from "../../../components-react/tree/controlled/TreeEvents";
import {
isTreeModelNode, isTreeModelRootNode, MutableTreeModelNode, TreeModel, TreeModelNodePlaceholder, VisibleTreeNodes,
} from "../../../components-react/tree/controlled/TreeModel";
import { ITreeNodeLoader } from "../../../components-react/tree/controlled/TreeNodeLoader";
import { extractSequence } from "../../common/ObservableTestHelpers";
import { createRandomMutableTreeModelNode, createRandomMutableTreeModelNodes } from "./RandomTreeNodesHelpers";
describe("TreeEventDispatcher", () => {
let dispatcher: TreeEventDispatcher;
const treeEventsMock = moq.Mock.ofType<TreeEvents>();
const treeNodeLoaderMock = moq.Mock.ofType<ITreeNodeLoader>();
const visibleNodesMock = moq.Mock.ofType<VisibleTreeNodes>();
const modelMock = moq.Mock.ofType<TreeModel>();
let selectionManager: TreeSelectionManager;
let selectedNodes: MutableTreeModelNode[];
let deselectedNodes: MutableTreeModelNode[];
let placeholderNode: TreeModelNodePlaceholder;
let placeholderChildNode: TreeModelNodePlaceholder;
let loadedNode: MutableTreeModelNode;
let loadedChildNode: MutableTreeModelNode;
let testNodes: MutableTreeModelNode[];
beforeEach(() => {
treeEventsMock.reset();
treeNodeLoaderMock.reset();
dispatcher = new TreeEventDispatcher(treeEventsMock.object, treeNodeLoaderMock.object, SelectionMode.Extended, () => visibleNodesMock.object);
selectionManager = (dispatcher as any)._selectionManager;
mockVisibleNodes();
});
const mockVisibleNodes = (addRootLevelPlaceholderNode = false, addChildPlaceholderNode = false) => {
modelMock.reset();
visibleNodesMock.reset();
treeNodeLoaderMock.reset();
selectedNodes = createRandomMutableTreeModelNodes(4).map((node) => ({ ...node, isSelected: true }));
deselectedNodes = createRandomMutableTreeModelNodes(4).map((node) => ({ ...node, isSelected: false }));
placeholderNode = { childIndex: 0, depth: 0 };
placeholderChildNode = { childIndex: 0, depth: 1, parentId: selectedNodes[3].id };
loadedNode = createRandomMutableTreeModelNode();
loadedChildNode = createRandomMutableTreeModelNode(selectedNodes[3].id);
testNodes = [...selectedNodes, ...deselectedNodes];
const iterator = function* () {
for (const node of selectedNodes)
yield node;
if (addChildPlaceholderNode)
yield placeholderChildNode;
if (addRootLevelPlaceholderNode)
yield placeholderNode;
for (const node of deselectedNodes)
yield node;
};
visibleNodesMock.setup((x) => x.getModel()).returns(() => modelMock.object);
visibleNodesMock.setup((x) => x.getNumNodes()).returns(() => testNodes.length + 2);
visibleNodesMock.setup((x) => x[Symbol.iterator]).returns(() => iterator);
for (const node of testNodes) {
modelMock.setup((x) => x.getNode(node.id)).returns(() => node);
}
modelMock.setup((x) => x.getRootNode()).returns(() => ({ depth: -1, id: undefined, numChildren: undefined }));
modelMock.setup((x) => x.getNode(loadedNode.id)).returns(() => loadedNode);
modelMock.setup((x) => x.getNode(loadedChildNode.id)).returns(() => loadedChildNode);
modelMock.setup((x) => x.getNode(selectedNodes[3].id)).returns(() => selectedNodes[3]);
treeNodeLoaderMock.setup((x) => x.loadNode(moq.It.is((parent) => isTreeModelRootNode(parent)), 0)).returns(() => from([{ loadedNodes: [loadedNode.item] }]));
treeNodeLoaderMock.setup((x) => x.loadNode(moq.It.is((parent) => isTreeModelNode(parent) && parent.id === selectedNodes[3].id), 0))
.returns(() => from([{ loadedNodes: [loadedChildNode.item] }]));
};
describe("constructor", () => {
describe("onDragSelection handler", () => {
it("selects range of nodes", async () => {
const rangeSelection: RangeSelection = {
from: deselectedNodes[0].id,
to: deselectedNodes[deselectedNodes.length - 1].id,
};
const expectedSelectedNodeItems = deselectedNodes.map((node) => node.item);
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onSelectionModified).returns(() => spy);
selectionManager.onDragSelection.emit({ selectionChanges: from([{ selectedNodes: rangeSelection, deselectedNodes: [] }]) });
expect(spy).to.be.called;
const spyArgs = spy.args[0][0] as TreeSelectionModificationEventArgs;
const results = await extractSequence(rxjsFrom(spyArgs.modifications));
expect(results).to.not.be.empty;
const selectionChange = results[0];
expect(selectionChange.selectedNodeItems).to.be.deep.eq(expectedSelectedNodeItems);
expect(selectionChange.deselectedNodeItems).to.be.empty;
});
it("selects range of nodes and loads unloaded nodes", async () => {
mockVisibleNodes(false, true);
const rangeSelection = {
from: selectedNodes[3].id,
to: deselectedNodes[0].id,
};
const expectedSelectedNodeIds = [selectedNodes[3].item, deselectedNodes[0].item, loadedChildNode.item];
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onSelectionModified).returns(() => spy);
selectionManager.onDragSelection.emit({ selectionChanges: from([{ selectedNodes: rangeSelection, deselectedNodes: [] }]) });
expect(spy).to.be.called;
const spyArgs = spy.args[0][0] as TreeSelectionModificationEventArgs;
const results = await extractSequence(rxjsFrom(spyArgs.modifications));
expect(results).to.not.be.empty;
const selectionChange = results[0];
expect(selectionChange.selectedNodeItems).to.be.deep.eq(expectedSelectedNodeIds);
expect(selectionChange.deselectedNodeItems).to.be.empty;
});
it("selects range of nodes and loads unloaded nodes hierarchy", async () => {
mockVisibleNodes(true);
const rangeSelection = {
from: selectedNodes[3].id,
to: deselectedNodes[0].id,
};
treeNodeLoaderMock.reset();
treeNodeLoaderMock.setup((x) => x.loadNode(moq.It.is((parent) => isTreeModelRootNode(parent)), 0))
.returns(() => from([{ loadedNodes: [loadedNode.item, loadedChildNode.item] }]));
const expectedSelectedNodeItems = [selectedNodes[3].item, deselectedNodes[0].item, loadedNode.item, loadedChildNode.item];
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onSelectionModified).returns(() => spy);
selectionManager.onDragSelection.emit({ selectionChanges: from([{ selectedNodes: rangeSelection, deselectedNodes: [] }]) });
expect(spy).to.be.called;
const spyArgs = spy.args[0][0] as TreeSelectionModificationEventArgs;
const results = await extractSequence(rxjsFrom(spyArgs.modifications));
expect(results).to.not.be.empty;
const selectionChange = results[0];
expect(selectionChange.selectedNodeItems).to.be.deep.eq(expectedSelectedNodeItems);
expect(selectionChange.deselectedNodeItems).to.be.empty;
});
it("does not select nodes if visible nodes are not set", async () => {
dispatcher.setVisibleNodes(undefined!);
const rangeSelection: RangeSelection = {
from: deselectedNodes[0].id,
to: deselectedNodes[deselectedNodes.length - 1].id,
};
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onSelectionModified).returns(() => spy);
selectionManager.onDragSelection.emit({ selectionChanges: from([{ selectedNodes: rangeSelection, deselectedNodes: [] }]) });
expect(spy).to.be.called;
const spyArgs = spy.args[0][0] as TreeSelectionModificationEventArgs;
const results = await extractSequence(rxjsFrom(spyArgs.modifications));
expect(results).to.not.be.empty;
expect(results[0].selectedNodeItems).to.be.empty;
expect(results[0].deselectedNodeItems).to.be.empty;
});
});
describe("onSelectionChanged handler", () => {
it("selects and deselects nodes", async () => {
const deselectedNodeIds = selectedNodes.map((node) => node.id);
const selectedNodeIds = deselectedNodes.map((node) => node.id);
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onSelectionModified).returns(() => spy);
selectionManager.onSelectionChanged.emit({ selectedNodes: selectedNodeIds, deselectedNodes: deselectedNodeIds });
expect(spy).to.be.called;
const selectedNodeItems = deselectedNodes.map((node) => node.item);
const deselectedNodeItems = selectedNodes.map((node) => node.item);
const spyArgs = spy.args[0][0] as TreeSelectionModificationEventArgs;
const results = await extractSequence(rxjsFrom(spyArgs.modifications));
expect(results).to.not.be.empty;
const selectionChange = results[0];
expect(selectionChange.selectedNodeItems).to.be.deep.eq(selectedNodeItems);
expect(selectionChange.deselectedNodeItems).to.be.deep.eq(deselectedNodeItems);
});
});
describe("onSelectionReplaced handler", () => {
it("replaces selected nodes", async () => {
const selectedNodeIds = deselectedNodes.map((node) => node.id);
const selectedNodeItems = deselectedNodes.map((node) => node.item);
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onSelectionReplaced).returns(() => spy);
selectionManager.onSelectionReplaced.emit({ selectedNodeIds });
expect(spy).to.be.called;
const spyArgs = spy.args[0][0] as TreeSelectionReplacementEventArgs;
const results = await extractSequence(rxjsFrom(spyArgs.replacements));
expect(results).to.not.be.empty;
const selectionChange = results[0];
expect(selectionChange.selectedNodeItems).to.be.deep.eq(selectedNodeItems);
});
it("replaces selected nodes using range selection from one node", async () => {
const selection = {
from: deselectedNodes[0].id,
to: deselectedNodes[0].id,
};
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onSelectionReplaced).returns(() => spy);
selectionManager.onSelectionReplaced.emit({ selectedNodeIds: selection });
expect(spy).to.be.called;
const spyArgs = spy.args[0][0] as TreeSelectionReplacementEventArgs;
const results = await extractSequence(rxjsFrom(spyArgs.replacements));
expect(results).to.not.be.empty;
const selectionChange = results[0];
expect(selectionChange.selectedNodeItems).to.be.deep.eq([deselectedNodes[0].item]);
});
});
});
describe("onNodeCheckboxClicked", () => {
it("changes state for clicked node", async () => {
const expectedAffectedNodeItems = [deselectedNodes[0].item];
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onCheckboxStateChanged).returns(() => spy);
dispatcher.onNodeCheckboxClicked(deselectedNodes[0].id, CheckBoxState.On);
expect(spy).to.be.calledOnce;
const changes = spy.args[0][0] as TreeCheckboxStateChangeEventArgs;
const results = await extractSequence(rxjsFrom(changes.stateChanges));
expect(results).to.not.be.empty;
const affectedNodeItems = results[0].map((change) => change.nodeItem);
expect(affectedNodeItems).to.be.deep.eq(expectedAffectedNodeItems);
});
it("changes state for all selected nodes", async () => {
const expectedAffectedNodeItems = [...selectedNodes.map((node) => node.item)];
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onCheckboxStateChanged).returns(() => spy);
dispatcher.onNodeCheckboxClicked(selectedNodes[0].id, CheckBoxState.On);
const changes = spy.args[0][0] as TreeCheckboxStateChangeEventArgs;
const results = await extractSequence(rxjsFrom(changes.stateChanges));
expect(results).to.not.be.empty;
const affectedItems = results[0].map((change) => change.nodeItem);
expect(affectedItems).to.be.deep.eq(expectedAffectedNodeItems);
});
it("changes state for all selected nodes including pending selection", async () => {
// simulate selection event in progress
// if selection modified event is still in progress, dispatcher saves on going event data in _activeSelections set
(dispatcher as any)._activeSelections.add(from([{ selectedNodeItems: [deselectedNodes[0].item], deselectedNodeItems: [] }]));
const expectedAffectedItems = [...selectedNodes.map((node) => node.item), deselectedNodes[0].item];
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onCheckboxStateChanged).returns(() => spy);
dispatcher.onNodeCheckboxClicked(selectedNodes[0].id, CheckBoxState.On);
const checkboxChanges = spy.args[0][0] as TreeCheckboxStateChangeEventArgs;
const results = await extractSequence(rxjsFrom(checkboxChanges.stateChanges));
expect(results).to.not.be.empty;
const affectedItems = results
.reduce((acc, el) => acc.concat(el), [])
.map((change) => change.nodeItem);
expect(affectedItems).to.be.deep.eq(expectedAffectedItems);
});
it("does not dispatch event if visibleNodes are not set", async () => {
dispatcher.setVisibleNodes(undefined!);
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onCheckboxStateChanged).returns(() => spy);
dispatcher.onNodeCheckboxClicked(selectedNodes[0].id, CheckBoxState.On);
expect(spy).to.not.be.called;
});
it("does not dispatch event if clicked node is not found", async () => {
const spy = sinon.spy();
treeEventsMock.setup((x) => x.onCheckboxStateChanged).returns(() => spy);
modelMock.setup((x) => x.getNode("NoNode")).returns(() => undefined);
dispatcher.onNodeCheckboxClicked("NoNode", CheckBoxState.On);
expect(spy).to.not.be.called;
});
});
describe("onNodeExpanded", () => {
it("emits tree event", () => {
treeEventsMock.setup((x) => x.onNodeExpanded!({ nodeId: testNodes[0].id })).verifiable(moq.Times.once());
dispatcher.onNodeExpanded(testNodes[0].id);
treeEventsMock.verifyAll();
});
});
describe("onNodeCollapsed", () => {
it("emits tree event", () => {
treeEventsMock.setup((x) => x.onNodeCollapsed!({ nodeId: testNodes[0].id })).verifiable(moq.Times.once());
dispatcher.onNodeCollapsed(testNodes[0].id);
treeEventsMock.verifyAll();
});
});
describe("onNodeClicked", () => {
it("calls selection manager onNodeClicked", () => {
const eventMock = moq.Mock.ofType<React.MouseEvent<Element, MouseEvent>>();
const spy = sinon.spy(selectionManager, "onNodeClicked");
dispatcher.onNodeClicked(testNodes[0].id, eventMock.object);
expect(spy).to.be.calledWith(testNodes[0].id, eventMock.object);
});
it("calls tree events onDelayedNodeClick if node is selected", () => {
const eventMock = moq.Mock.ofType<React.MouseEvent<Element, MouseEvent>>();
testNodes[0].isSelected = true;
dispatcher.onNodeClicked(testNodes[0].id, eventMock.object);
treeEventsMock.verify((x) => x.onDelayedNodeClick!({ nodeId: testNodes[0].id }), moq.Times.exactly(2));
});
it("does not call tree events onDelayedNodeClick if node is not selected", () => {
const eventMock = moq.Mock.ofType<React.MouseEvent<Element, MouseEvent>>();
testNodes[0].isSelected = false;
dispatcher.onNodeClicked(testNodes[0].id, eventMock.object);
treeEventsMock.verify((x) => x.onDelayedNodeClick!({ nodeId: testNodes[0].id }), moq.Times.never());
});
it("does not call tree events onDelayedNodeClick if node does not exist", () => {
modelMock.reset();
const eventMock = moq.Mock.ofType<React.MouseEvent<Element, MouseEvent>>();
testNodes[0].isSelected = false;
dispatcher.onNodeClicked(testNodes[0].id, eventMock.object);
treeEventsMock.verify((x) => x.onDelayedNodeClick!({ nodeId: testNodes[0].id }), moq.Times.never());
});
it("does not call tree events onDelayedNodeClick if visible nodes are not set", () => {
dispatcher.setVisibleNodes(undefined!);
const eventMock = moq.Mock.ofType<React.MouseEvent<Element, MouseEvent>>();
testNodes[0].isSelected = false;
dispatcher.onNodeClicked(testNodes[0].id, eventMock.object);
treeEventsMock.verify((x) => x.onDelayedNodeClick!({ nodeId: testNodes[0].id }), moq.Times.never());
});
});
describe("onNodeMouseDown", () => {
it("calls selection manager onNodeMouseDown", () => {
const spy = sinon.spy(selectionManager, "onNodeMouseDown");
dispatcher.onNodeMouseDown(testNodes[0].id);
expect(spy).to.be.calledWith(testNodes[0].id);
});
});
describe("onNodeMouseMove", () => {
it("calls selection manager onNodeMouseMove", () => {
const spy = sinon.spy(selectionManager, "onNodeMouseMove");
dispatcher.onNodeMouseMove(testNodes[0].id);
expect(spy).to.be.calledWith(testNodes[0].id);
});
});
describe("Keyboard Events", () => {
const keyEventMock = moq.Mock.ofType<React.KeyboardEvent>();
beforeEach(() => {
keyEventMock.reset();
});
it("calls selection manager onTreeKeyDown", () => {
const spy = sinon.spy(selectionManager, "onTreeKeyDown");
dispatcher.onTreeKeyDown(keyEventMock.object);
expect(spy).to.be.called;
});
it("calls selection manager onTreeKeyUp", () => {
const spy = sinon.spy(selectionManager, "onTreeKeyUp");
dispatcher.onTreeKeyUp(keyEventMock.object);
expect(spy).to.be.called;
});
});
describe("onNodeEditorActivated", () => {
it("calls tree events onNodeEditorActivated if node is selected", () => {
testNodes[0].isSelected = true;
dispatcher.onNodeEditorActivated(testNodes[0].id);
treeEventsMock.verify((x) => x.onNodeEditorActivated!({ nodeId: testNodes[0].id }), moq.Times.exactly(2));
});
it("does not call tree events onNodeEditorActivated if node is not selected", () => {
testNodes[0].isSelected = false;
dispatcher.onNodeEditorActivated(testNodes[0].id);
treeEventsMock.verify((x) => x.onNodeEditorActivated!({ nodeId: testNodes[0].id }), moq.Times.never());
});
it("does not call tree events onNodeEditorActivated if node id is invalid", () => {
const nodeId = "invalid";
dispatcher.onNodeEditorActivated(nodeId);
treeEventsMock.verify((x) => x.onNodeEditorActivated!({ nodeId }), moq.Times.never());
});
});
}); | the_stack |
import {
NoBundleOrRedirectToError,
NoMatchingEntrypointError,
} from "../../../src/common/errors";
import { htmlWith, test } from "./testUtils";
describe("usecase RespondToEndpointRequest (routing)", () => {
/*
* NoMatchingEntrypointError
*/
test("NoMatchingEntrypointError when no matching entrypoint is found", {
entrypoints: [],
testCases: [
{
requestedUrl: "domain.com/asset",
expectedError: NoMatchingEntrypointError,
},
],
});
/*
* NoBundleOrRedirectToError
*/
test(
"NoBundleOrRedirectToError when the matching entrypoint has no linked bundle and no configured redirect",
{
entrypoints: [{ urlMatcher: "domain.com/" }],
testCases: [
{
requestedUrl: "domain.com/asset",
expectedError: NoBundleOrRedirectToError,
},
],
}
);
/*
* 301 responses
*/
test(
"PermanentRedirect to trailing slash when urlMatcher without trailing slash is requested",
{
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
path: "domain.com/ + /path",
pathAsset: "domain.com/ + /pathAsset",
fallback: "domain.com + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
{
urlMatcher: "domain.com/path/",
bundleContent: {
subpath: "domain.com/path/ + /subpath",
subpathAsset: "domain.com/path/ + /subpathAsset",
fallback: "domain.com/path/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
{
urlMatcher: "domain.com/path/subpath/",
bundleContent: {
fallback: "domain.com/path/subpath/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
// Cases where redirection is needed
{
requestedUrl: "domain.com/path",
expectedStatusCode: 301,
expectedLocationHeader: "/path/",
},
{
requestedUrl: "domain.com/path/subpath",
expectedStatusCode: 301,
expectedLocationHeader: "/path/subpath/",
},
// Cases where redirection is not needed
{
requestedUrl: "domain.com/pathAsset",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /pathAsset",
},
{
requestedUrl: "domain.com/path/subpathAsset",
expectedStatusCode: 200,
expectedBody: "domain.com/path/ + /subpathAsset",
},
],
}
);
test("PermanentRedirect to the canonical path", {
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
asset: "domain.com/ + /asset",
nested: {
asset: "domain.com/ + /nested/asset",
"index.html": "domain.com/ + /nested/index.html",
},
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
{
urlMatcher: "domain.com/path/",
bundleContent: {
asset: "domain.com/path/ + /asset",
nested: { asset: "domain.com/path/ + /nested/asset" },
fallback: "domain.com/path/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
// domain.com/ cases
{
requestedUrl: "domain.com/prefix/asset",
expectedStatusCode: 301,
expectedLocationHeader: "/asset",
},
{
requestedUrl: "domain.com/prefix/nested/asset",
expectedStatusCode: 301,
expectedLocationHeader: "/nested/asset",
},
{
requestedUrl: "domain.com/nested/prefix/asset",
expectedStatusCode: 301,
expectedLocationHeader: "/asset",
},
{
requestedUrl: "domain.com/nested/prefix/nested/asset",
expectedStatusCode: 301,
expectedLocationHeader: "/nested/asset",
},
// domain.com/path/ cases
{
requestedUrl: "domain.com/path/prefix/asset",
expectedStatusCode: 301,
expectedLocationHeader: "/path/asset",
},
{
requestedUrl: "domain.com/path/prefix/nested/asset",
expectedStatusCode: 301,
expectedLocationHeader: "/path/nested/asset",
},
{
requestedUrl: "domain.com/path/nested/prefix/asset",
expectedStatusCode: 301,
expectedLocationHeader: "/path/asset",
},
{
requestedUrl: "domain.com/path/nested/prefix/nested/asset",
expectedStatusCode: 301,
expectedLocationHeader: "/path/nested/asset",
},
],
});
/*
* 302 responses
*/
test("TemporaryRedirect to the entrypoint's redirectTo when specified", {
entrypoints: [
{
urlMatcher: "domain.com/",
redirectTo: "https://redirect.location",
},
],
testCases: [
{
requestedUrl: "domain.com/",
expectedStatusCode: 302,
expectedLocationHeader: "https://redirect.location",
},
],
});
/*
* 200 and fallback responses
*/
test("gives precedence to longer-matching entrypoints", {
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
path: { asset: "domain.com/ + /path/asset" },
fallback: "domain.com/ + fallback",
},
bundleFallbackAssetPath: "/fallback",
},
{
urlMatcher: "domain.com/path/",
bundleContent: {
asset: "domain.com/path/ + /asset",
subpath: { asset: "domain.com/path/ + /subpath/asset" },
fallback: "domain.com/path/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
{
urlMatcher: "domain.com/path/subpath/",
bundleContent: {
asset: "domain.com/path/subpath/ + /asset",
fallback: "domain.com/path/subpath/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
{
requestedUrl: "domain.com/path/asset",
expectedStatusCode: 200,
expectedBody: "domain.com/path/ + /asset",
},
{
requestedUrl: "domain.com/path/subpath/asset",
expectedStatusCode: 200,
expectedBody: "domain.com/path/subpath/ + /asset",
},
],
});
test("serves the correct asset when requested by canonical path", {
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
asset: "domain.com/ + /asset",
nested: { asset: "domain.com/ + /nested/asset" },
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
{
requestedUrl: "domain.com/asset",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /asset",
},
{
requestedUrl: "domain.com/nested/asset",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /nested/asset",
},
],
});
test(
"serves the correct asset when requested with a hostname ending with dot",
{
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
asset: "domain.com/ + /asset",
nested: { asset: "domain.com/ + /nested/asset" },
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
{
requestedUrl: "domain.com./asset",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /asset",
},
{
requestedUrl: "domain.com./nested/asset",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /nested/asset",
},
],
}
);
test(
"when file requestedPath doesn't exist, but requestPath + .html exists, serves it",
{
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
"path.html": htmlWith("domain.com/ + /path.html"),
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
{
requestedUrl: "domain.com/path",
expectedStatusCode: 200,
expectedBody: htmlWith("domain.com/ + /path.html"),
},
{
requestedUrl: "domain.com/path/",
expectedStatusCode: 200,
expectedBody: htmlWith("domain.com/ + /path.html"),
},
],
}
);
test(
"when file requestedPath doesn't exist, but requestPath + /index.html exists, serves it",
{
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
"index.html": htmlWith("domain.com/ + /index.html"),
path: {
"index.html": htmlWith(
"domain.com/ + /path/index.html"
),
},
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
{
requestedUrl: "domain.com/",
expectedStatusCode: 200,
expectedBody: htmlWith("domain.com/ + /index.html"),
},
{
requestedUrl: "domain.com/path",
expectedStatusCode: 200,
expectedBody: htmlWith("domain.com/ + /path/index.html"),
},
{
requestedUrl: "domain.com/path/",
expectedStatusCode: 200,
expectedBody: htmlWith("domain.com/ + /path/index.html"),
},
],
}
);
test(
"when files requestedPath and requestedPath + .html exist, serves the first",
{
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
path: "domain.com/ + /path",
"path.html": htmlWith("domain.com/ + /path.html"),
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
{
requestedUrl: "domain.com/path",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /path",
},
{
requestedUrl: "domain.com/path/",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /path",
},
],
}
);
test(
"when file requestedPath doesn't exist, but files requestedPath + .html and requestedPath + /index.html exist, serves the first",
{
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
"path.html": htmlWith("domain.com/ + /path.html"),
path: {
"index.html": htmlWith(
"domain.com/ + /path/index.html"
),
},
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
{
requestedUrl: "domain.com/path",
expectedStatusCode: 200,
expectedBody: htmlWith("domain.com/ + /path.html"),
},
{
requestedUrl: "domain.com/path/",
expectedStatusCode: 200,
expectedBody: htmlWith("domain.com/ + /path.html"),
},
],
}
);
test("serves the fallback asset when no asset matches", {
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
},
],
testCases: [
{
requestedUrl: "domain.com/non-existing",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /fallback",
},
{
requestedUrl: "domain.com/prefix/non-existing",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /fallback",
},
],
});
test(
"when serving fallback asset, uses the status code specified in the bundle",
{
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
path: "domain.com/ + /path",
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
bundleFallbackStatusCode: 299,
},
],
testCases: [
{
requestedUrl: "domain.com/path",
expectedStatusCode: 200,
expectedBody: "domain.com/ + /path",
},
{
requestedUrl: "domain.com/fallback",
expectedStatusCode: 299,
expectedBody: "domain.com/ + /fallback",
},
{
requestedUrl: "domain.com/non-existing",
expectedStatusCode: 299,
expectedBody: "domain.com/ + /fallback",
},
{
requestedUrl: "domain.com/prefix/non-existing",
expectedStatusCode: 299,
expectedBody: "domain.com/ + /fallback",
},
],
}
);
test("adds custom asset-headers specified in the bundle to the response", {
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
asset: "domain.com/ + /asset",
"asset.html": htmlWith("domain.com/ + /asset.html"),
"asset.js": "domain.com/ + /asset.js",
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
bundleHeaders: {
// / (slash) and * can't be used in headers without
// escaping, use _ and - instead
"/asset": { _asset: "_asset" },
"**/*.html": { "--_-.html": "--_-.html" },
},
},
],
testCases: [
{
requestedUrl: "domain.com/asset",
expectedStatusCode: 200,
expectedHeaders: {
_asset: "_asset",
"content-type": "application/octet-stream",
},
expectedBody: "domain.com/ + /asset",
},
{
requestedUrl: "domain.com/asset.html",
expectedStatusCode: 200,
expectedHeaders: {
"--_-.html": "--_-.html",
"content-type": "text/html",
},
expectedBody: htmlWith("domain.com/ + /asset.html"),
},
],
});
test(
"allows overriding an asset's content-type header by specifing a custom header for it",
{
entrypoints: [
{
urlMatcher: "domain.com/",
bundleContent: {
asset: "domain.com/ + /asset",
"asset.html": htmlWith("domain.com/ + /asset.html"),
"asset.js": "domain.com/ + /asset.js",
fallback: "domain.com/ + /fallback",
},
bundleFallbackAssetPath: "/fallback",
bundleFallbackStatusCode: 200,
bundleHeaders: {
"/asset": { "content-type": "custom" },
"**/*.html": { "content-type": "custom/html" },
"**/*.js": { "content-type": "custom/js" },
},
},
],
testCases: [
{
requestedUrl: "domain.com/asset",
expectedStatusCode: 200,
expectedHeaders: {
"content-type": "custom",
},
expectedBody: "domain.com/ + /asset",
},
{
requestedUrl: "domain.com/asset.html",
expectedStatusCode: 200,
expectedHeaders: {
"content-type": "custom/html",
},
expectedBody: htmlWith("domain.com/ + /asset.html"),
},
{
requestedUrl: "domain.com/asset.js",
expectedStatusCode: 200,
expectedHeaders: {
"content-type": "custom/js",
},
expectedBody: "domain.com/ + /asset.js",
},
],
}
);
}); | the_stack |
import { fakeAsync, TestBed, tick } from '@angular/core/testing';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { Post } from 'app/entities/metis/post.model';
import { Course } from 'app/entities/course.model';
import { MockPostService } from '../../helpers/mocks/service/mock-post.service';
import { MockAnswerPostService } from '../../helpers/mocks/service/mock-answer-post.service';
import { MetisService } from 'app/shared/metis/metis.service';
import { MockAccountService } from '../../helpers/mocks/service/mock-account.service';
import { PostService } from 'app/shared/metis/post.service';
import { AnswerPostService } from 'app/shared/metis/answer-post.service';
import { AccountService } from 'app/core/auth/account.service';
import { AnswerPost } from 'app/entities/metis/answer-post.model';
import { ReactionService } from 'app/shared/metis/reaction.service';
import { MockReactionService } from '../../helpers/mocks/service/mock-reaction.service';
import { Reaction } from 'app/entities/metis/reaction.model';
import { CourseWideContext, DisplayPriority, MetisPostAction, MetisWebsocketChannelPrefix, PageType } from 'app/shared/metis/metis.util';
import { MockTranslateService } from '../../helpers/mocks/service/mock-translate.service';
import { TranslateService } from '@ngx-translate/core';
import { Router } from '@angular/router';
import { MockRouter } from '../../helpers/mocks/mock-router';
import { MockLocalStorageService } from '../../helpers/mocks/service/mock-local-storage.service';
import { LocalStorageService, SessionStorageService } from 'ngx-webstorage';
import { MockProvider } from 'ng-mocks';
import { JhiWebsocketService } from 'app/core/websocket/websocket.service';
import { MetisPostDTO } from 'app/entities/metis/metis-post-dto.model';
import { of } from 'rxjs';
import {
metisCourse,
metisCoursePostsWithCourseWideContext,
metisExercise,
metisExercisePosts,
metisLecture,
metisLecturePosts,
metisPostExerciseUser1,
metisReactionUser2,
metisResolvingAnswerPostUser1,
metisUser1,
metisUser2,
} from '../../helpers/sample/metis-sample-data';
import { ITEMS_PER_PAGE } from 'app/shared/constants/pagination.constants';
describe('Metis Service', () => {
let metisService: MetisService;
let metisServiceUserStub: jest.SpyInstance;
let metisServiceGetFilteredPostsSpy: jest.SpyInstance;
let metisServiceCreateWebsocketSubscriptionSpy: jest.SpyInstance;
let websocketServiceSubscribeSpy: jest.SpyInstance;
let websocketServiceReceiveStub: jest.SpyInstance;
let websocketService: JhiWebsocketService;
let reactionService: ReactionService;
let postService: PostService;
let answerPostService: AnswerPostService;
let post: Post;
let answerPost: AnswerPost;
let reaction: Reaction;
let course: Course;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
providers: [
MockProvider(SessionStorageService),
{ provide: MetisService, useClass: MetisService },
{ provide: ReactionService, useClass: MockReactionService },
{ provide: PostService, useClass: MockPostService },
{ provide: AnswerPostService, useClass: MockAnswerPostService },
{ provide: AccountService, useClass: MockAccountService },
{ provide: TranslateService, useClass: MockTranslateService },
{ provide: Router, useClass: MockRouter },
{ provide: LocalStorageService, useClass: MockLocalStorageService },
],
});
metisService = TestBed.inject(MetisService);
websocketService = TestBed.inject(JhiWebsocketService);
reactionService = TestBed.inject(ReactionService);
postService = TestBed.inject(PostService);
answerPostService = TestBed.inject(AnswerPostService);
metisServiceGetFilteredPostsSpy = jest.spyOn(metisService, 'getFilteredPosts');
metisServiceCreateWebsocketSubscriptionSpy = jest.spyOn(metisService, 'createWebsocketSubscription');
metisServiceUserStub = jest.spyOn(metisService, 'getUser');
post = metisPostExerciseUser1;
post.displayPriority = DisplayPriority.PINNED;
answerPost = metisResolvingAnswerPostUser1;
reaction = metisReactionUser2;
course = metisCourse;
});
afterEach(() => {
jest.restoreAllMocks();
});
describe('Invoke post service methods', () => {
it('should create a post', fakeAsync(() => {
const postServiceSpy = jest.spyOn(postService, 'create');
const createdPostSub = metisService.createPost(post).subscribe((createdPost) => {
expect(createdPost).toEqual(post);
});
expect(postServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
createdPostSub.unsubscribe();
}));
it('should delete a post', fakeAsync(() => {
const postServiceSpy = jest.spyOn(postService, 'delete');
metisService.deletePost(post);
expect(postServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
}));
it('should update a post', fakeAsync(() => {
const postServiceSpy = jest.spyOn(postService, 'update');
const updatedPostSub = metisService.updatePost(post).subscribe((updatedPost) => {
expect(updatedPost).toEqual(post);
});
expect(postServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
updatedPostSub.unsubscribe();
}));
it('should pin a post', fakeAsync(() => {
const postServiceSpy = jest.spyOn(postService, 'updatePostDisplayPriority');
const updatedPostSub = metisService.updatePostDisplayPriority(post.id!, DisplayPriority.PINNED).subscribe((updatedPost) => {
expect(updatedPost).toEqual({ id: post.id, displayPriority: DisplayPriority.PINNED });
});
expect(postServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
updatedPostSub.unsubscribe();
}));
it('should archive a post', fakeAsync(() => {
const postServiceSpy = jest.spyOn(postService, 'updatePostDisplayPriority');
const updatedPostSub = metisService.updatePostDisplayPriority(post.id!, DisplayPriority.ARCHIVED).subscribe((updatedPost) => {
expect(updatedPost).toEqual({ id: post.id, displayPriority: DisplayPriority.ARCHIVED });
});
expect(postServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
updatedPostSub.unsubscribe();
}));
it('should get correct list of posts when set', fakeAsync(() => {
metisService.setPosts([post]);
tick();
const postsSub = metisService.posts.subscribe((posts) => {
expect(posts).toEqual([post]);
});
tick();
postsSub.unsubscribe();
}));
it('should update post tags', () => {
const postServiceSpy = jest.spyOn(postService, 'getAllPostTagsByCourseId');
metisService.updateCoursePostTags();
expect(postServiceSpy).toHaveBeenCalled();
});
it('should get posts for lecture filter', () => {
const postServiceSpy = jest.spyOn(postService, 'getPosts');
metisService.getFilteredPosts({ lectureId: metisLecture.id }, false);
expect(postServiceSpy).toBeCalledTimes(1);
// don't change filter
metisService.getFilteredPosts({ lectureId: metisLecture.id }, false);
expect(postServiceSpy).toBeCalledTimes(1);
// change filter
metisService.getFilteredPosts({ lectureId: undefined, exerciseId: metisExercise.id }, false);
expect(postServiceSpy).toBeCalledTimes(2);
// change filter
metisService.getFilteredPosts(
{
lectureId: undefined,
exerciseId: undefined,
courseId: metisCourse.id,
},
false,
);
expect(postServiceSpy).toBeCalledTimes(3);
});
it('should get posts for exercise filter', () => {
const postServiceSpy = jest.spyOn(postService, 'getPosts');
metisService.getFilteredPosts({ exerciseId: metisExercise.id }, false);
expect(postServiceSpy).toHaveBeenCalled();
// don't change filter
metisService.getFilteredPosts({ exerciseId: metisExercise.id }, false);
expect(postServiceSpy).toBeCalledTimes(1);
// change filter
metisService.getFilteredPosts({ lectureId: metisLecture.id, exerciseId: undefined }, false);
expect(postServiceSpy).toBeCalledTimes(2);
// change filter
metisService.getFilteredPosts(
{
lectureId: undefined,
exerciseId: undefined,
courseWideContext: CourseWideContext.RANDOM,
},
false,
);
expect(postServiceSpy).toBeCalledTimes(3);
});
it('should get posts for course-context filter', () => {
const postServiceSpy = jest.spyOn(postService, 'getPosts');
metisService.getFilteredPosts({ courseWideContext: CourseWideContext.RANDOM });
expect(postServiceSpy).toHaveBeenCalled();
});
it('should get posts for course', () => {
const postServiceSpy = jest.spyOn(postService, 'getPosts');
metisService.getFilteredPosts({ courseId: course.id });
expect(postServiceSpy).toHaveBeenCalled();
});
it('should get similar posts within course', () => {
const postServiceSpy = jest.spyOn(postService, 'computeSimilarityScoresWithCoursePosts');
metisService.getSimilarPosts(post);
expect(postServiceSpy).toHaveBeenCalled();
});
});
describe('Invoke answer post service methods', () => {
it('should create an answer post', fakeAsync(() => {
const answerPostServiceSpy = jest.spyOn(answerPostService, 'create');
const createdAnswerPostSub = metisService.createAnswerPost(answerPost).subscribe((createdAnswerPost) => {
expect(createdAnswerPost).toEqual(answerPost);
});
expect(answerPostServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
createdAnswerPostSub.unsubscribe();
}));
it('should delete an answer post', fakeAsync(() => {
const answerPostServiceSpy = jest.spyOn(answerPostService, 'delete');
metisService.deleteAnswerPost(answerPost);
expect(answerPostServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
}));
it('should update an answer post', fakeAsync(() => {
const answerPostServiceSpy = jest.spyOn(answerPostService, 'update');
const updatedAnswerPostSub = metisService.updateAnswerPost(answerPost).subscribe((updatedAnswerPost) => {
expect(updatedAnswerPost).toEqual(answerPost);
});
expect(answerPostServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
updatedAnswerPostSub.unsubscribe();
}));
});
describe('Invoke reaction service methods', () => {
it('should create a reaction', fakeAsync(() => {
const reactionServiceSpy = jest.spyOn(reactionService, 'create');
const createdReactionSub = metisService.createReaction(reaction).subscribe((createdReaction) => {
expect(createdReaction).toEqual(reaction);
});
expect(reactionServiceSpy).toHaveBeenCalled();
tick();
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
createdReactionSub.unsubscribe();
}));
it('should delete a reaction', fakeAsync(() => {
const reactionServiceSpy = jest.spyOn(reactionService, 'delete');
metisService.deleteReaction(reaction).subscribe(() => {
expect(metisServiceGetFilteredPostsSpy).not.toHaveBeenCalled();
});
tick();
expect(reactionServiceSpy).toHaveBeenCalled();
}));
});
it('should determine that metis user is author of post', () => {
metisServiceUserStub.mockReturnValue(metisUser1);
const metisUserIsAuthorOfPostingReturn = metisService.metisUserIsAuthorOfPosting(post);
expect(metisUserIsAuthorOfPostingReturn).toBeTrue();
});
it('should determine that metis user is not author of post', () => {
metisServiceUserStub.mockReturnValue(metisUser2);
const metisUserIsAuthorOfPostingReturn = metisService.metisUserIsAuthorOfPosting(post);
expect(metisUserIsAuthorOfPostingReturn).toBeFalse();
});
it('should set course information correctly and invoke an update of the post tags in this course', () => {
const updateCoursePostTagsSpy = jest.spyOn(metisService, 'updateCoursePostTags');
metisService.setCourse(course);
const getCourseReturn = metisService.getCourse();
expect(getCourseReturn).toEqual(course);
expect(updateCoursePostTagsSpy).toHaveBeenCalled();
});
it('should set course when current course has different id', () => {
metisService.setCourse(course);
const newCourse = new Course();
newCourse.id = 99;
metisService.setCourse(newCourse);
const getCourseReturn = metisService.getCourse();
expect(getCourseReturn).toEqual(newCourse);
});
it('should create empty post for a course-wide context', () => {
const emptyPost = metisService.createEmptyPostForContext(CourseWideContext.ORGANIZATION, undefined, undefined);
expect(emptyPost.courseWideContext).toEqual(CourseWideContext.ORGANIZATION);
expect(emptyPost.exercise).toEqual(undefined);
expect(emptyPost.lecture).toEqual(undefined);
});
it('should create empty post for a exercise context', () => {
const emptyPost = metisService.createEmptyPostForContext(undefined, metisExercise, undefined);
expect(emptyPost.courseWideContext).toEqual(undefined);
expect(emptyPost.exercise).toEqual({
id: metisExercise.id,
title: metisExercise.title,
type: metisExercise.type,
});
expect(emptyPost.lecture).toEqual(undefined);
});
it('should create empty post for a lecture context', () => {
const emptyPost = metisService.createEmptyPostForContext(undefined, undefined, metisLecture);
expect(emptyPost.courseWideContext).toEqual(undefined);
expect(emptyPost.exercise).toEqual(undefined);
expect(emptyPost.lecture).toEqual(metisLecture);
});
it('should determine the link components for a reference to a post with course-wide context', () => {
metisService.setCourse(course);
const referenceLinkComponents = metisService.getLinkForPost(metisCoursePostsWithCourseWideContext[0]);
expect(referenceLinkComponents).toEqual(['/courses', metisCourse.id, 'discussion']);
});
it('should determine the link components for a reference to a post with exercise context', () => {
metisService.setCourse(course);
const referenceLinkComponents = metisService.getLinkForPost(metisExercisePosts[0]);
expect(referenceLinkComponents).toEqual(['/courses', metisCourse.id, 'exercises', metisExercise.id]);
});
it('should determine the link components for a reference to a post with lecture context', () => {
metisService.setCourse(course);
const referenceLinkComponents = metisService.getLinkForPost(metisLecturePosts[0]);
expect(referenceLinkComponents).toEqual(['/courses', metisCourse.id, 'lectures', metisLecture.id]);
});
it('should determine the router link required for referencing an exercise page within posting', () => {
metisService.setCourse(course);
const referenceRouterLink = metisService.getLinkForExercise(metisExercise.id!.toString());
expect(referenceRouterLink).toEqual(`/courses/${metisCourse.id}/exercises/${metisExercise.id!.toString()}`);
});
it('should determine the query param for a reference to a post with course-wide context', () => {
metisService.setCourse(course);
const referenceLinkComponents = metisService.getQueryParamsForPost(metisCoursePostsWithCourseWideContext[0]);
expect(referenceLinkComponents).toEqual({
searchText: `#${metisCoursePostsWithCourseWideContext[0].id}`,
});
});
it('should determine the query param for a reference to a post with exercise context', () => {
metisService.setCourse(course);
const referenceLinkComponents = metisService.getQueryParamsForPost(metisExercisePosts[0]);
expect(referenceLinkComponents).toEqual({
postId: metisExercisePosts[0].id,
});
});
it('should determine the query param for a reference to a post with lecture context', () => {
metisService.setCourse(course);
const referenceLinkComponents = metisService.getQueryParamsForPost(metisLecturePosts[0]);
expect(referenceLinkComponents).toEqual({
postId: metisLecturePosts[0].id,
});
});
it('should determine context information for a post with course-wide context', () => {
metisService.setCourse(course);
const contextInformation = metisService.getContextInformation(metisCoursePostsWithCourseWideContext[0]);
expect(contextInformation.routerLinkComponents).toEqual(undefined);
expect(contextInformation.displayName).toBeDefined();
});
it('should determine context information for a post with exercise context', () => {
metisService.setCourse(course);
const contextInformation = metisService.getContextInformation(metisExercisePosts[0]);
expect(contextInformation.routerLinkComponents).toEqual(['/courses', metisCourse.id, 'exercises', metisExercisePosts[0].exercise!.id]);
expect(contextInformation.displayName).toEqual(metisExercisePosts[0].exercise!.title);
});
it('should determine context information for a post with lecture context', () => {
metisService.setCourse(course);
const contextInformation = metisService.getContextInformation(metisLecturePosts[0]);
expect(contextInformation.routerLinkComponents).toEqual(['/courses', metisCourse.id, 'lectures', metisLecturePosts[0].lecture!.id]);
expect(contextInformation.displayName).toEqual(metisLecturePosts[0].lecture!.title);
});
describe('Handle websocket related functionality', () => {
beforeEach(() => {
metisServiceCreateWebsocketSubscriptionSpy = jest.spyOn(metisService, 'createWebsocketSubscription');
websocketServiceReceiveStub = jest.spyOn(websocketService, 'receive');
websocketServiceSubscribeSpy = jest.spyOn(websocketService, 'subscribe');
metisService.setCourse(metisCourse);
});
it('should create websocket subscription when posts with lecture context are initially retrieved from DB', fakeAsync(() => {
const lecturePostWithTags = metisLecturePosts[0];
lecturePostWithTags.tags = ['tag1', 'tag2'];
websocketServiceReceiveStub.mockReturnValue(of({ post: lecturePostWithTags, action: MetisPostAction.CREATE_POST } as MetisPostDTO));
// setup subscription
metisService.getFilteredPosts({ lectureId: metisLecture.id! });
expect(metisServiceCreateWebsocketSubscriptionSpy).toHaveBeenCalledWith(MetisWebsocketChannelPrefix + `lectures/${metisLecture.id}`);
expect(websocketServiceSubscribeSpy).toHaveBeenCalledOnce();
// receive message on channel
tick();
expect(metisServiceGetFilteredPostsSpy).toHaveBeenCalledWith({ lectureId: metisLecture.id! }, false);
}));
it('should create websocket subscription when posts with exercise context are initially retrieved from DB', fakeAsync(() => {
websocketServiceReceiveStub.mockReturnValue(of({ post: metisExercisePosts[0], action: MetisPostAction.DELETE_POST } as MetisPostDTO));
metisService.setPageType(PageType.OVERVIEW);
// setup subscription
metisService.getFilteredPosts({ exerciseId: metisExercise.id!, page: 0, pageSize: ITEMS_PER_PAGE });
expect(metisServiceCreateWebsocketSubscriptionSpy).toHaveBeenCalledWith(MetisWebsocketChannelPrefix + `exercises/${metisExercise.id}`);
expect(websocketServiceSubscribeSpy).toHaveBeenCalledOnce();
// receive message on channel
tick();
expect(metisServiceGetFilteredPostsSpy).toHaveBeenCalledWith({ exerciseId: metisExercise.id!, page: 0, pageSize: ITEMS_PER_PAGE });
}));
it('should create websocket subscription when posts with course-wide context are initially retrieved from DB', fakeAsync(() => {
const courseWidePostWithTags = metisCoursePostsWithCourseWideContext[0];
courseWidePostWithTags.tags = ['tag1', 'tag2'];
websocketServiceReceiveStub.mockReturnValue(of({ post: courseWidePostWithTags, action: MetisPostAction.UPDATE_POST } as MetisPostDTO));
// setup subscription
metisService.getFilteredPosts({ courseWideContext: courseWidePostWithTags.courseWideContext });
expect(metisServiceCreateWebsocketSubscriptionSpy).toHaveBeenCalledWith(MetisWebsocketChannelPrefix + `courses/${metisCourse.id}`);
expect(websocketServiceSubscribeSpy).toHaveBeenCalledOnce();
// receive message on channel
tick();
expect(metisServiceGetFilteredPostsSpy).toHaveBeenCalledWith({ courseWideContext: courseWidePostWithTags.courseWideContext }, false);
}));
it('should not create new subscription if already exists', fakeAsync(() => {
websocketServiceReceiveStub.mockReturnValue(of({ post: metisExercisePosts[0], action: MetisPostAction.DELETE_POST } as MetisPostDTO));
// setup subscription for the first time
metisService.getFilteredPosts({ exerciseId: metisExercise.id! });
expect(metisServiceCreateWebsocketSubscriptionSpy).toHaveBeenCalledWith(MetisWebsocketChannelPrefix + `exercises/${metisExercise.id}`);
// trigger createWebsocketSubscription for the second time with the same context filter. i.e. same channel
metisService.getFilteredPosts({ exerciseId: metisExercise.id! });
expect(metisServiceGetFilteredPostsSpy).toHaveBeenCalledWith({ exerciseId: metisExercise.id! }, false);
expect(websocketServiceSubscribeSpy).toHaveBeenCalledOnce();
}));
});
}); | the_stack |
import test, { ExecutionContext } from 'ava'
import got from 'got'
import { keys, random } from 'lodash'
import { nanoid } from 'nanoid'
import { getConnection, getRepository } from 'typeorm'
import io from 'socket.io-client'
import app from '../../src'
import BlockEntity from '../../src/entities/block'
import { BlockParentType, BlockType } from '../../src/types/block'
import { defaultPermissions } from '../../src/types/permission'
import { createDatabaseCon } from '../../src/clients/db/orm'
import { LinkType } from '../../src/types/link'
import { uuid } from '../testutils'
const workspaceId = nanoid()
test.before.cb((t: ExecutionContext<any>) => {
const port = random(8000, 20000, false)
t.context.server = app
t.context.server.listen(port, () => {
t.context.prefixUrl = `http://localhost:${port}`
createDatabaseCon()
.then(() => (t as any).end())
.catch((err) => console.error(err))
})
})
test.after.always((t: ExecutionContext<any>) => {
t.context.server.close()
})
test.beforeEach.cb((t: ExecutionContext<any>) => {
const socket = io(`${t.context.prefixUrl}/workspace`, {
reconnectionAttempts: 10,
transports: ['websocket'],
query: {
workspaceId: 'test',
userId: 'testUser1',
},
})
socket.on('connect', () => {
t.context.client = socket
;(t as any).end()
})
})
test.serial('api saveTransactions and mgetLinks', async (t: ExecutionContext<any>) => {
const id = nanoid()
const sid = nanoid()
const sid2 = nanoid()
const resp: any = await got<any>('api/operations/saveTransactions', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: {
transactions: [
{
id: nanoid(),
workspaceId,
operations: [
{
cmd: 'set',
id: sid,
path: [],
table: 'block',
args: {
id: sid,
children: [id],
type: 'story',
content: { title: [[sid]] },
parentId: 'test',
parentTable: 'workspace',
storyId: sid,
alive: true,
},
},
{
cmd: 'set',
id: sid2,
path: [],
table: 'block',
args: {
id: sid2,
children: [],
type: 'story',
content: { title: [[sid2]] },
parentId: 'test',
parentTable: 'workspace',
storyId: sid2,
alive: true,
},
},
{
cmd: 'set',
id,
path: [],
table: 'block',
args: {
type: 'text',
id,
parentId: sid,
parentTable: 'block',
storyId: sid,
content: {
title: [[id], ['‣', [['r', 's', sid2]]]],
format: {},
},
alive: true,
},
},
],
},
],
},
}).json()
t.is(resp.success, true)
const sResp: any = await got<any>('api/mgetResources', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: {
workspaceId: 'test',
requests: [{ type: 'link', id }],
},
}).json()
t.deepEqual(sResp.links[id].forwardRefs, [{ blockId: sid2, storyId: sid2, type: LinkType.BLOCK }])
const sResp2: any = await got<any>('api/mgetResources', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: {
workspaceId: 'test',
requests: [{ type: 'link', id: sid2 }],
},
}).json()
t.deepEqual(sResp2.links[sid2].backwardRefs, [
{
storyId: sid,
blockId: id,
type: LinkType.BLOCK,
},
])
const rp = getConnection().getRepository(BlockEntity)
await rp.delete([sid, sid2, id])
})
test.serial('test saveTransactions with row and column', async (t: ExecutionContext<any>) => {
const wid = nanoid()
const sid = nanoid()
const textBlockId1 = nanoid()
const textBlockId2 = nanoid()
const columnBlockId1 = nanoid()
const columnBlockId2 = nanoid()
const rowBlockId = nanoid()
// move text block to row block
const moveTextBlockIntoRowBlock = {
id: nanoid(),
workspaceId: wid,
transactions: [
{
id: nanoid(),
workspaceId,
operations: [
{
cmd: 'set',
id: sid,
path: [],
table: 'block',
args: {
id: sid,
children: [textBlockId1, rowBlockId],
type: 'story',
content: { title: [[sid]] },
parentId: 'test',
parentTable: 'workspace',
storyId: sid,
alive: true,
},
},
{
cmd: 'set',
id: textBlockId1,
path: [],
table: 'block',
args: {
id: textBlockId1,
type: 'text',
content: { title: [[]] },
parentId: sid,
parentTable: 'block',
storyId: sid,
alive: true,
},
},
{
cmd: 'set',
id: textBlockId2,
path: [],
table: 'block',
args: {
id: textBlockId2,
type: 'text',
content: { title: [[]] },
parentId: sid,
parentTable: 'block',
storyId: sid,
alive: true,
},
},
{
cmd: 'set',
id: rowBlockId,
path: [],
table: 'block',
args: {
id: rowBlockId,
type: 'row',
children: [],
parentId: sid,
parentTable: 'block',
storyId: sid,
alive: true,
},
},
{
cmd: 'listRemove',
id: sid,
path: ['children'],
table: 'block',
args: { id: textBlockId1 },
},
{
cmd: 'listAfter',
id: rowBlockId,
path: ['children'],
table: 'block',
args: { id: columnBlockId1, after: '' },
},
{
cmd: 'listAfter',
id: rowBlockId,
path: ['children'],
table: 'block',
args: { id: columnBlockId2, after: columnBlockId1 },
},
{
cmd: 'set',
id: columnBlockId1,
path: [],
table: 'block',
args: {
type: 'column',
id: columnBlockId1,
storyId: sid,
parentId: rowBlockId,
parentTable: 'block',
children: [],
alive: true,
},
},
{
cmd: 'set',
id: columnBlockId2,
path: [],
table: 'block',
args: {
type: 'column',
id: columnBlockId2,
storyId: sid,
parentId: rowBlockId,
parentTable: 'block',
children: [],
alive: true,
},
},
{
cmd: 'listAfter',
id: columnBlockId1,
path: ['children'],
table: 'block',
args: { id: textBlockId1, after: '' },
},
{
cmd: 'listAfter',
id: columnBlockId2,
path: ['children'],
table: 'block',
args: { id: textBlockId2, after: '' },
},
{
cmd: 'update',
id: textBlockId1,
path: ['parentId'],
table: 'block',
args: columnBlockId1,
},
{
cmd: 'update',
id: textBlockId2,
path: ['parentId'],
table: 'block',
args: columnBlockId2,
},
],
},
],
}
const saveResp1: any = await got<any>('api/operations/saveTransactions', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: moveTextBlockIntoRowBlock,
}).json()
t.deepEqual(saveResp1, { success: true })
const storyResp1: any = await got<any>('api/stories/load', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: {
workspaceId: 'test',
storyId: sid,
},
}).json()
t.deepEqual(storyResp1.blocks[columnBlockId1].children, [textBlockId1])
t.deepEqual(storyResp1.blocks[columnBlockId2].children, [textBlockId2])
t.deepEqual(storyResp1.blocks[rowBlockId].children, [columnBlockId1, columnBlockId2])
t.deepEqual(storyResp1.blocks[sid].children, [rowBlockId])
// revert
const moveColumnBlockOutOfRowBlock = {
id: nanoid(),
workspaceId: wid,
transactions: [
{
id: nanoid(),
workspaceId,
operations: [
// delete text block1 from column block1
{
cmd: 'listRemove',
id: columnBlockId1,
path: ['children'],
table: 'block',
args: { id: textBlockId1 },
},
// delete column block1 from row block
{
cmd: 'listRemove',
id: rowBlockId,
path: ['children'],
table: 'block',
args: { id: columnBlockId1 },
},
{
cmd: 'update',
id: columnBlockId1,
path: ['alive'],
table: 'block',
args: false,
},
// delete column block2 from row block
{
cmd: 'listRemove',
id: rowBlockId,
path: ['children'],
table: 'block',
args: { id: columnBlockId2 },
},
{
cmd: 'update',
id: columnBlockId2,
path: ['alive'],
table: 'block',
args: false,
},
// delete row block from story
{
cmd: 'listRemove',
id: sid,
path: ['children'],
table: 'block',
args: { id: rowBlockId },
},
{
cmd: 'update',
id: rowBlockId,
path: ['alive'],
table: 'block',
args: false,
},
// move text block2 to story block
{
cmd: 'listRemove',
id: columnBlockId2,
path: ['children'],
table: 'block',
args: { id: textBlockId2 },
},
{
cmd: 'listAfter',
id: sid,
path: ['children'],
table: 'block',
args: { id: textBlockId2, after: '' },
},
{
cmd: 'update',
id: textBlockId2,
path: ['parentId'],
table: 'block',
args: sid,
},
// move text block1 to story block
{
cmd: 'listAfter',
id: sid,
path: ['children'],
table: 'block',
args: { id: textBlockId1, after: textBlockId2 },
},
{
cmd: 'update',
id: textBlockId1,
path: ['parentId'],
table: 'block',
args: sid,
},
],
},
],
}
const saveResp2: any = await got<any>('api/operations/saveTransactions', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: moveColumnBlockOutOfRowBlock,
}).json()
t.deepEqual(saveResp2, { success: true })
const storyResp2: any = await got<any>('api/stories/load', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: {
workspaceId: 'test',
storyId: sid,
},
}).json()
t.deepEqual(keys(storyResp2.blocks).length, 3)
t.deepEqual(storyResp2.blocks[sid].children, [textBlockId2, textBlockId1])
})
test.serial('test saveTransactions with bigInt', async (t: ExecutionContext<any>) => {
const sid = nanoid()
const rp = getConnection().getRepository(BlockEntity)
await rp.save({
id: sid,
interKey: sid,
workspaceId: 'test',
content: { title: [[sid]] },
storyId: sid,
type: BlockType.STORY,
parentId: 'test',
parentTable: BlockParentType.WORKSPACE,
permissions: defaultPermissions,
children: [],
alive: true,
})
const bigIntTitle = '11111111111111111111111111111111111111111'
await rp.delete(sid)
try {
await got<any>('api/operations/saveTransactions', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: {
transactions: [
{
id: nanoid(),
workspaceId,
operations: [
{
cmd: 'set',
id: sid,
path: ['content', 'title'],
table: 'block',
args: [[bigIntTitle]],
},
],
},
],
},
})
} catch (e: any) {
t.fail(e.toString())
}
const resp: any = await got<any>('api/mgetResources', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: {
workspaceId,
requests: [{ type: 'block', id: sid }],
},
}).json()
await rp.delete(sid)
t.deepEqual(resp.blocks[sid].content, { title: [[bigIntTitle]] })
})
test.serial('test auto set blocks createdById', async (t: ExecutionContext<any>) => {
const wid = uuid()
const tid = nanoid()
const createThought = {
transactions: [
{
id: nanoid(),
workspaceId,
operations: [
{
cmd: 'set',
id: tid,
path: [],
table: 'block',
args: {
id: tid,
children: [],
type: 'thought',
content: { title: [[tid]], date: nanoid() },
parentId: 'test',
parentTable: wid,
storyId: tid,
alive: true,
},
},
],
},
],
}
await got<any>('api/operations/saveTransactions', {
prefixUrl: t.context.prefixUrl,
method: 'POST',
json: createThought,
})
const block = await getRepository(BlockEntity).findOneOrFail(tid)
t.not(block.createdById, null)
}) | the_stack |
import { Quaternion } from './quaternion'
import { Matrix } from './matrix'
const f32 = { min: Number.MIN_VALUE, max: Number.MAX_VALUE }
export class Vector4 {
/** Returns a vector with values set to their maximum values. */
public static MAX_VALUE: Vector4 = new Vector4(f32.max, f32.max, f32.max, f32.max)
/** Returns a vector with values set to their minimum values. */
public static MIN_VALUE: Vector4 = new Vector4(f32.min, f32.min, f32.min, f32.min)
/** The internal elements for this type. */
public v: Float32Array
/** Creates a new Vector4. */
constructor(x: number, y: number, z: number, w: number) {
this.v = new Float32Array(4)
this.v[0] = x
this.v[1] = y
this.v[2] = z
this.v[3] = w
}
/** Returns the string representation of this object. */
public toString(): string {
return `[${this.v[0]}, ${this.v[1]}, ${this.v[2]}, ${this.v[3]}]`
}
/** Returns the type kind of this object. */
public kind(): string {
return 'Vector4'
}
/** Returns a clone of this vector. */
public clone(): Vector4 {
return Vector4.clone(this)
}
public get x(): number {
return this.v[0]
}
public get y(): number {
return this.v[1]
}
public get z(): number {
return this.v[2]
}
public get w(): number {
return this.v[3]
}
public set x(value: number) {
this.v[0] = value
}
public set y(value: number) {
this.v[1] = value
}
public set z(value: number) {
this.v[2] = value
}
public set w(value: number) {
this.v[3] = value
}
/** Returns the length of this vector. */
public length(): number {
return Vector4.getLength(this)
}
/** Returns the length of this vector. */
public lengthSq(): number {
return Vector4.getLengthSq(this)
}
/** Returns this vector normalized. */
public normalize(): Vector4 {
return Vector4.normalize(this)
}
/** Returns the dot product between this and the given vector. */
public dot(v0: Vector4): number {
return Vector4.dot(this, v0)
}
/** Returns the addition of this and the given vector. */
public add(v0: Vector4): Vector4 {
return Vector4.add(this, v0)
}
/** Returns the subtraction of this and the given vector. */
public sub(v0: Vector4): Vector4 {
return Vector4.sub(this, v0)
}
/** Returns the multiplication of this and the given vector. */
public mul(v0: Vector4): Vector4 {
return Vector4.mul(this, v0)
}
/** Returns the division of this and the given vector. */
public div(v0: Vector4): Vector4 {
return Vector4.div(this, v0)
}
/** Returns a new scaled vector from the given scalar value. */
public scale(s0: number): Vector4 {
return Vector4.scale(this, s0)
}
/** Returns a new negated vector from this vector. */
public negate(): Vector4 {
return Vector4.negate(this)
}
/** Returns a new vector whose values are initialized to zero. */
public static zero(): Vector4 {
return new Vector4(0.0, 0.0, 0.0, 0.0)
}
/** Returns a new vector whose values are initialized to one. */
public static one(): Vector4 {
return new Vector4(1.0, 1.0, 1.0, 1.0)
}
/** Returns a new left vector. */
public static left(): Vector4 {
return new Vector4(-1.0, 0.0, 0.0, 0.0)
}
/** Returns a new unit x vector. */
public static unitX(): Vector4 {
return new Vector4(1.0, 0.0, 0.0, 0.0)
}
/** Returns a new unit y vector. */
public static unitY(): Vector4 {
return new Vector4(0.0, 1.0, 0.0, 0.0)
}
/** Returns a new unit z vector. */
public static unitZ(): Vector4 {
return new Vector4(0.0, 0.0, 1.0, 0.0)
}
/** Returns a new unit z vector. */
public static unitW(): Vector4 {
return new Vector4(0.0, 0.0, 0.0, 1.0)
}
/** Compares the left and right vectors for equality. */
public static equals(v0: Vector4, v1: Vector4): boolean {
return (
v0.v[0] === v1.v[0] &&
v0.v[1] === v1.v[1] &&
v0.v[2] === v1.v[2] &&
v0.v[3] === v1.v[3]
)
}
/** Returns the length of the given vector. */
public static getLength(v0: Vector4): number {
return Math.sqrt(
(v0.v[0] * v0.v[0]) +
(v0.v[1] * v0.v[1]) +
(v0.v[2] * v0.v[2]) +
(v0.v[3] * v0.v[3])
)
}
/** Returns the square length of the given vector. */
public static getLengthSq(v0: Vector4): number {
return (
(v0.v[0] * v0.v[0]) +
(v0.v[1] * v0.v[1]) +
(v0.v[2] * v0.v[2]) +
(v0.v[3] * v0.v[3])
)
}
/** Returns the distance between the left and right vectors. */
public static distance(v0: Vector4, v1: Vector4): number {
const x = v0.v[0] - v1.v[0]
const y = v0.v[1] - v1.v[1]
const z = v0.v[2] - v1.v[2]
const w = v0.v[3] - v1.v[3]
return Math.sqrt((x * x) + (y * y) + (z * z) + (w * w))
}
/** Returns the squared distance between the left and right vectors. */
public static distanceSq(v0: Vector4, v1: Vector4): number {
const x = v0.v[0] - v1.v[0]
const y = v0.v[1] - v1.v[1]
const z = v0.v[2] - v1.v[2]
const w = v0.v[3] - v1.v[3]
return ((x * x) + (y * y) + (z * z) + (w * w))
}
/** Returns the dot product between the given two vectors. */
public static dot(v0: Vector4, v1: Vector4): number {
return (
(v0.v[0] * v1.v[0]) +
(v0.v[1] * v1.v[1]) +
(v0.v[2] * v1.v[2]) +
(v0.v[3] * v1.v[3])
)
}
/** Returns a normalized vector from the given vector. */
public static normalize(v0: Vector4): Vector4 {
const len = 1.0 / Math.sqrt(
(v0.v[0] * v0.v[0]) +
(v0.v[1] * v0.v[1]) +
(v0.v[2] * v0.v[2]) +
(v0.v[3] * v0.v[3])
)
return new Vector4(
v0.v[0] * len,
v0.v[1] * len,
v0.v[2] * len,
v0.v[3] * len
)
}
/** Returns a vectors whose values are absoluted from the given vector. */
public static abs(v0: Vector4): Vector4 {
return new Vector4(
Math.abs(v0.v[0]),
Math.abs(v0.v[1]),
Math.abs(v0.v[2]),
Math.abs(v0.v[3])
)
}
/** Returns the minimum components from the given to vectors. */
public static min(v0: Vector4, v1: Vector4): Vector4 {
return new Vector4(
(v0.v[0] < v1.v[0]) ? v0.v[0] : v1.v[0],
(v0.v[1] < v1.v[1]) ? v0.v[1] : v1.v[1],
(v0.v[2] < v1.v[2]) ? v0.v[2] : v1.v[2],
(v0.v[3] < v1.v[3]) ? v0.v[3] : v1.v[3]
)
}
/** Returns the maximum components from the given to vectors. */
public static max(v0: Vector4, v1: Vector4): Vector4 {
return new Vector4(
(v0.v[0] > v1.v[0]) ? v0.v[0] : v1.v[0],
(v0.v[1] > v1.v[1]) ? v0.v[1] : v1.v[1],
(v0.v[2] > v1.v[2]) ? v0.v[2] : v1.v[2],
(v0.v[3] > v1.v[3]) ? v0.v[3] : v1.v[3]
)
}
/** Returns a clamped vector within the given min and max range. */
public static clamp(v0: Vector4, min: Vector4, max: Vector4): Vector4 {
let x = v0.v[0]
let y = v0.v[1]
let z = v0.v[2]
let w = v0.v[3]
x = (x > max.v[0]) ? max.v[0] : x
x = (x < min.v[0]) ? min.v[0] : x
y = (y > max.v[1]) ? max.v[1] : y
y = (y < min.v[1]) ? min.v[1] : y
z = (z > max.v[2]) ? max.v[2] : z
z = (z < min.v[2]) ? min.v[2] : z
w = (w > max.v[3]) ? max.v[3] : w
w = (w < min.v[3]) ? min.v[3] : w
return new Vector4(x, y, z, w)
}
/** Returns the linear interpolation vector between the given two vectors and amount. */
public static lerp(v0: Vector4, v1: Vector4, amount: number): Vector4 {
return new Vector4(
v0.v[0] + ((v1.v[0] - v0.v[0]) * amount),
v0.v[1] + ((v1.v[1] - v0.v[1]) * amount),
v0.v[2] + ((v1.v[2] - v0.v[2]) * amount),
v0.v[3] + ((v1.v[3] - v0.v[3]) * amount)
)
}
/** Returns the barycentric coordinate between the given 3 vectors and amounts. */
public static barycentric(v0: Vector4, v1: Vector4, v2: Vector4, amount0: number, amount1: number): Vector4 {
return new Vector4(
(v0.v[0] + (amount0 * (v1.v[0] - v0.v[0]))) + (amount1 * (v2.v[0] - v0.v[0])),
(v0.v[1] + (amount0 * (v1.v[1] - v0.v[1]))) + (amount1 * (v2.v[1] - v0.v[1])),
(v0.v[2] + (amount0 * (v1.v[2] - v0.v[2]))) + (amount1 * (v2.v[2] - v0.v[2])),
(v0.v[3] + (amount0 * (v1.v[3] - v0.v[3]))) + (amount1 * (v2.v[3] - v0.v[3]))
)
}
/** Returns the smooth step interpolation between the given two vectors and amount. */
public static smoothstep(v0: Vector4, v1: Vector4, amount: number): Vector4 {
amount = (amount > 1.0) ? 1.0 : ((amount < 0.0) ? 0.0 : amount)
amount = (amount * amount) * (3.0 - (2.0 * amount))
return new Vector4(
v0.v[0] + ((v1.v[0] - v0.v[0]) * amount),
v0.v[1] + ((v1.v[1] - v0.v[1]) * amount),
v0.v[2] + ((v1.v[2] - v0.v[2]) * amount),
v0.v[3] + ((v1.v[3] - v0.v[3]) * amount)
)
}
/** Returns the catmull rom interpolation between the given vectors and amount. */
public static catmullrom(v0: Vector4, v1: Vector4, v2: Vector4, v3: Vector4, amount: number): Vector4 {
const n0 = amount * amount
const n1 = amount * n0
return new Vector4(
0.5 * ((((2.0 * v1.v[0])
+ ((-v0.v[0] + v2.v[0]) * amount))
+ (((((2.0 * v0.v[0]) - (5.0 * v1.v[0]))
+ (4.0 * v2.v[0])) - v3.v[0]) * n0))
+ ((((-v0.v[0] + (3.0 * v1.v[0]))
- (3.0 * v2.v[0])) + v3.v[0]) * n1)),
0.5 * ((((2.0 * v1.v[1])
+ ((-v0.v[1] + v2.v[1]) * amount))
+ (((((2.0 * v0.v[1]) - (5.0 * v1.v[1]))
+ (4.0 * v2.v[1])) - v3.v[1]) * n0))
+ ((((-v0.v[1] + (3.0 * v1.v[1]))
- (3.0 * v2.v[1])) + v3.v[1]) * n1)),
0.5 * ((((2.0 * v1.v[2])
+ ((-v0.v[2] + v2.v[2]) * amount))
+ (((((2.0 * v0.v[2]) - (5.0 * v1.v[2]))
+ (4.0 * v2.v[2])) - v3.v[2]) * n0))
+ ((((-v0.v[2] + (3.0 * v1.v[2]))
- (3.0 * v2.v[2])) + v3.v[2]) * n1)),
0.5 * ((((2.0 * v1.v[3])
+ ((-v0.v[3] + v2.v[3]) * amount))
+ (((((2.0 * v0.v[3]) - (5.0 * v1.v[3]))
+ (4.0 * v2.v[3])) - v3.v[3]) * n0))
+ ((((-v0.v[3] + (3.0 * v1.v[3]))
- (3.0 * v2.v[3])) + v3.v[3]) * n1))
)
}
/** Returns the hermite interpolation between the given vectors and amount. */
public static hermite(v0: Vector4, t0: Vector4, v1: Vector4, t1: Vector4, amount: number): Vector4 {
const n0 = amount * amount
const n1 = amount * n0
const n2 = ((2.0 * n1) - (3.0 * n0)) + 1.0
const n3 = (-2.0 * n1) + (3.0 * n0)
const n4 = (n1 - (2.0 * n0)) + amount
const n5 = n1 - n0
return new Vector4(
(((v0.v[0] * n2) + (v1.v[0] * n3)) + (t0.v[0] * n4)) + (t1.v[0] * n5),
(((v0.v[1] * n2) + (v1.v[1] * n3)) + (t0.v[1] * n4)) + (t1.v[1] * n5),
(((v0.v[2] * n2) + (v1.v[2] * n3)) + (t0.v[2] * n4)) + (t1.v[2] * n5),
(((v0.v[3] * n2) + (v1.v[3] * n3)) + (t0.v[3] * n4)) + (t1.v[3] * n5)
)
}
public static fast_transform(v0: Vector4, m0: Matrix, out: Vector4): void {
out.v[0] = (((v0.v[0] * m0.v[0]) + (v0.v[1] * m0.v[4])) + (v0.v[2] * m0.v[8])) + (v0.v[3] * m0.v[12]),
out.v[1] = (((v0.v[0] * m0.v[1]) + (v0.v[1] * m0.v[5])) + (v0.v[2] * m0.v[9])) + (v0.v[3] * m0.v[13]),
out.v[2] = (((v0.v[0] * m0.v[2]) + (v0.v[1] * m0.v[6])) + (v0.v[2] * m0.v[10])) + (v0.v[3] * m0.v[14]),
out.v[3] = (((v0.v[0] * m0.v[3]) + (v0.v[1] * m0.v[7])) + (v0.v[2] * m0.v[11])) + (v0.v[3] * m0.v[15])
}
/** Returns the transformed vector from the given vector and Matrix. */
public static transform(v0: Vector4, m0: Matrix): Vector4 {
const out = Vector4.zero()
Vector4.fast_transform(v0, m0, out)
return out
}
/** Returns the transformed vector from the given normal and quaternion. */
public static transformQuaternion(v0: Vector4, q0: Quaternion): Vector4 {
const n0 = q0.v[0] + q0.v[0];
const n1 = q0.v[1] + q0.v[1];
const n2 = q0.v[2] + q0.v[2];
const n3 = q0.v[3] * n0;
const n4 = q0.v[3] * n1;
const n5 = q0.v[3] * n2;
const n6 = q0.v[0] * n0;
const n7 = q0.v[0] * n1;
const n8 = q0.v[0] * n2;
const n9 = q0.v[1] * n1;
const n10 = q0.v[1] * n2;
const n11 = q0.v[2] * n2;
return new Vector4(
(v0.v[0] * ((1.0 - n9) - n11)) + (v0.v[1] * (n7 - n5)),
(v0.v[0] * (n7 + n5)) + (v0.v[1] * ((1.0 - n6) - n11)),
(v0.v[0] * (n8 - n4)) + (v0.v[1] * (n10 + n3)),
0.0
)
}
/** Returns the addition of the given vectors. */
public static add(v0: Vector4, v1: Vector4): Vector4 {
return new Vector4(
v0.v[0] + v1.v[0],
v0.v[1] + v1.v[1],
v0.v[2] + v1.v[2],
v0.v[3] + v1.v[3]
)
}
/** Returns the subtraction of the given vectors. */
public static sub(v0: Vector4, v1: Vector4): Vector4 {
return new Vector4(
v0.v[0] - v1.v[0],
v0.v[1] - v1.v[1],
v0.v[2] - v1.v[2],
v0.v[3] - v1.v[3]
)
}
/** Multiplies the given two vectors. */
public static mul(v0: Vector4, v1: Vector4): Vector4 {
return new Vector4(
v0.v[0] * v1.v[0],
v0.v[1] * v1.v[1],
v0.v[2] * v1.v[2],
v0.v[3] * v1.v[3]
)
}
/** Divides the given two vectors. */
public static div(v0: Vector4, v1: Vector4): Vector4 {
return new Vector4(
v0.v[0] / v1.v[0],
v0.v[1] / v1.v[1],
v0.v[2] / v1.v[2],
v0.v[3] / v1.v[3]
)
}
/** Multiplies the given vector with the scalar. */
public static scale(v0: Vector4, scalar: number): Vector4 {
return new Vector4(
v0.v[0] * scalar,
v0.v[1] * scalar,
v0.v[2] * scalar,
v0.v[3] * scalar
)
}
/** Negates the given vector. */
public static negate(v0: Vector4): Vector4 {
return new Vector4(
-v0.v[0],
-v0.v[1],
-v0.v[2],
-v0.v[3]
)
}
/** Returns a clone of the given vector. */
public static clone(v0: Vector4): Vector4 {
return new Vector4(
v0.v[0],
v0.v[1],
v0.v[2],
v0.v[3]
)
}
/** Creates a new Vector4. */
public static create(x: number, y: number, z: number, w: number): Vector4 {
return new Vector4(x, y, z, w)
}
} | the_stack |
import type { Pose } from '@oito/armature/src';
import type { TVec3 } from '@oito/type';
import type { BipedRig, IKChain, Link } from '..';
import type { IVerletPointConfig } from './types';
import type P4Cage from './verlet/cages/P4Cage';
import type AxisCage from './verlet/cages/AxisCage';
import type VerletPoint from './verlet/VerletPoint';
import type LimbCage from './verlet/cages/LimbCage';
import type TriExtCage from './verlet/cages/TriExtCage';
import VerletSkeleton from './verlet/VerletSkeleton';
import RenderLine from './RenderLine';
//import { Vec3 } from '@oito/core';
//#endregion
//#region CONFIG
// Trying for a 33% / 77% spit in Mass
const Spine_Mass = 8;
const Biped_Config : { [key:string]:IVerletPointConfig } = {
hip : { mass: 16 },
head : { mass: 1 },
armL_head : { mass: 4 },
armL_pole : { mass: 2, pole: true },
armL_tail : { mass: 1 },
armR_head : { mass: 4 },
armR_pole : { mass: 2, pole: true },
armR_tail : { mass: 1 },
legL_head : { mass: 4 },
legL_pole : { mass: 2, pole: true },
legL_tail : { mass: 1 },
legR_head : { mass: 4 },
legR_pole : { mass: 2, pole: true },
legR_tail : { mass: 1 },
};
//#endregion
class BipedFBIK{
//#region MAIN
skeleton = new VerletSkeleton();
rig : BipedRig;
lines : Array<RenderLine> = [];
hipCage !: P4Cage;
chestCage !: P4Cage;
armL !: LimbCage;
armR !: LimbCage;
legL !: LimbCage;
legR !: LimbCage;
footL !: TriExtCage;
footR !: TriExtCage;
handL !: TriExtCage;
handR !: TriExtCage;
spineCage : Array< AxisCage > = [];
spinePnts : Array< VerletPoint > = [];
hip !: VerletPoint;
head !: TriExtCage;
constructor( rig: BipedRig ){
this.rig = rig;
this._build(); // Build Points + Constraints
this._defineRenderLines(); // Create lines for Renderer
this.skeleton.iterations = 10;
}
//#endregion
//#region PRIVATE METHODS
_build(): void{
const s = this.skeleton;
const r = this.rig;
const t : any = {}; // Hold Verlet Points Temporarily before placing into cage
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
let k: string, i: IVerletPointConfig;
for( k in Biped_Config ){
t[ k ] = s.newPoint( Biped_Config[ k ] );
}
this.hip = t.hip;
this.armL = s.newLimbCage( t.armL_head, t.armL_pole, t.armL_tail ).setPrevPole( [0,0,-1] );
this.armR = s.newLimbCage( t.armR_head, t.armR_pole, t.armR_tail ).setPrevPole( [0,0,-1] );
this.legR = s.newLimbCage( t.legR_head, t.legR_pole, t.legR_tail ).setPrevPole( [0,0,1] );
this.legL = s.newLimbCage( t.legL_head, t.legL_pole, t.legL_tail ).setPrevPole( [0,0,1] );
this.footL = s.newTriExtCage( t.legL_tail, true );
this.footR = s.newTriExtCage( t.legR_tail, true );
this.handL = s.newTriExtCage( t.armL_tail, false );
this.handR = s.newTriExtCage( t.armR_tail, false );
this.head = s.newTriExtCage( t.head, false );
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if( r.spine ){
// Spine Bones
for( let lnk of r.spine.links ){
this.spinePnts.push( s.newPoint( { mass:Spine_Mass } ) );
}
// Spine Tail
this.spinePnts.push( s.newPoint( { mass:Spine_Mass } ) );
// Create a Constraint Cage
this.hipCage = s.newP4Cage( this.hip, this.spinePnts[ 0 ], t.legR_head, t.legL_head );
this.chestCage = s.newP4Cage( this.spinePnts[ r.spine.count-1 ], this.spinePnts[ r.spine.count ], t.armR_head, t.armL_head );
for( let i=0; i < r.spine.count - 1; i++ ){
this.spineCage.push( s.newAxisCage( this.spinePnts[ i ], this.spinePnts[ i+1 ] ) );
}
s.newLink( this.chestCage.pTail, this.head.pHead );
}
}
_defineRenderLines(): void{
this.lines.push(
new RenderLine( this.head.pHead.pos, this.head.pPole.pos ),
new RenderLine( this.head.pHead.pos, this.head.pEff.pos ),
new RenderLine( this.head.pPole.pos, this.head.pEff.pos ),
new RenderLine( this.head.pHead.pos, this.head.pPole.pos ),
new RenderLine( this.head.pHead.pos, this.chestCage.pTail.pos ),
new RenderLine( this.chestCage.pTail.pos, this.chestCage.pHead.pos ),
new RenderLine( this.chestCage.pHead.pos, this.chestCage.pPole.pos ),
new RenderLine( this.chestCage.pTail.pos, this.armL.pHead.pos ),
new RenderLine( this.armL.pHead.pos, this.armL.pPole.pos ),
new RenderLine( this.armL.pPole.pos, this.armL.pTail.pos ),
new RenderLine( this.armL.pTail.pos, this.handL.pEff.pos ),
new RenderLine( this.armL.pTail.pos, this.handL.pPole.pos ),
new RenderLine( this.handL.pEff.pos, this.handL.pPole.pos ),
new RenderLine( this.chestCage.pTail.pos, this.armR.pHead.pos ),
new RenderLine( this.armR.pHead.pos, this.armR.pPole.pos ),
new RenderLine( this.armR.pPole.pos, this.armR.pTail.pos ),
new RenderLine( this.armR.pTail.pos, this.handR.pEff.pos ),
new RenderLine( this.armR.pTail.pos, this.handR.pPole.pos ),
new RenderLine( this.handR.pEff.pos, this.handR.pPole.pos ),
new RenderLine( this.legL.pHead.pos, this.legL.pPole.pos ),
new RenderLine( this.legL.pPole.pos, this.legL.pTail.pos ),
new RenderLine( this.legL.pTail.pos, this.footL.pEff.pos ),
new RenderLine( this.legL.pTail.pos, this.footL.pPole.pos ),
new RenderLine( this.footL.pEff.pos, this.footL.pPole.pos ),
new RenderLine( this.legR.pHead.pos, this.legR.pPole.pos ),
new RenderLine( this.legR.pPole.pos, this.legR.pTail.pos ),
new RenderLine( this.legR.pTail.pos, this.footR.pEff.pos ),
new RenderLine( this.legR.pTail.pos, this.footR.pPole.pos ),
new RenderLine( this.footR.pEff.pos, this.footR.pPole.pos ),
new RenderLine( this.hipCage.pHead.pos, this.hipCage.pLeft.pos ),
new RenderLine( this.hipCage.pHead.pos, this.hipCage.pRight.pos ),
new RenderLine( this.hipCage.pHead.pos, this.hipCage.pPole.pos ),
new RenderLine( this.hipCage.pHead.pos, this.hipCage.pTail.pos ),
);
for( let c of this.spineCage ){
this.lines.push( new RenderLine( c.pHead.pos, c.pTail.pos ) );
this.lines.push( new RenderLine( c.pHead.pos, c.pPole.pos ) );
}
}
//#endregion
//#region BIND INITIAL POSITION
bindPose( pose: Pose, resetConstraints:false, debug ?: any ): this{
const r = this.rig;
let p1: TVec3;
let p2: TVec3;
if( r.hip ){
this.hip.setPos( r.hip.getStartPosition( pose ) );
this.hipCage.updatePole();
}
if( r.head ){
p1 = r.head.getStartPosition( pose );
p2 = r.head.getTailPosition( pose )
this.head.pHead.setPos( p1 );
this.head.setPoleOffset( p1, [0,0,0.2], [0,p2[1]-p1[1],0] );
}
if( r.spine ){
// Spine Bones
let lnk: Link;
for( let i=0; i < r.spine.count; i++ ){
this.spinePnts[ i ].setPos( r.spine.getPositionAt( pose, i ) );
}
// Spine Tail
this.spinePnts[ r.spine.count ].setPos( r.spine.getTailPosition( pose ) );
this.chestCage.updatePole();
for( let i of this.spineCage ) i.updatePole();
}
if( r.legR ) this._bindLimb( r.legR, pose, this.legR );
if( r.legL ) this._bindLimb( r.legL, pose, this.legL );
if( r.armR ) this._bindLimb( r.armR, pose, this.armR );
if( r.armL ) this._bindLimb( r.armL, pose, this.armL );
if( r.footL ){
p1 = this.legL.pTail.pos;
p2 = r.footL.getTailPosition( pose );
this.footL.setPoleOffset( p1, [0,0,p2[2]-p1[2]], [0,p2[1]-p1[1],0] );
}
if( r.footR ){
p1 = this.legR.pTail.pos;
p2 = r.footR.getTailPosition( pose );
this.footR.setPoleOffset( p1, [0,0,p2[2]-p1[2]], [0,p2[1]-p1[1],0] );
}
if( r.handL ){
p1 = this.armL.pTail.pos;
p2 = r.handL.getTailPosition( pose );
this.handL.setPoleOffset( p1, [p2[0]-p1[0],0,0], [0,0,-0.1] );
}
if( r.handR ){
p1 = this.armR.pTail.pos;
p2 = r.handR.getTailPosition( pose );
this.handR.setPoleOffset( p1, [p2[0]-p1[0],0,0], [0,0,-0.1] );
}
if( resetConstraints ) this.skeleton.rebindConstraints();
return this;
}
_bindLimb( chain: IKChain, pose:Pose, limb: LimbCage ): void{
limb.pHead.setPos( chain.getStartPosition( pose ) );
limb.pPole.setPos( chain.getMiddlePosition( pose ) );
limb.pTail.setPos( chain.getTailPosition( pose ) );
}
//#endregion
setPointPos( idx: number, pos: TVec3 ): this{
this.skeleton.setPos( idx, pos );
return this;
}
resolve(): this{
this.skeleton.resolve();
return this;
}
resolveForPole( pIdx: number ): this{
let cage: any;
let cage2: any;
if( this.armL.pPole.idx == pIdx ){
cage = this.armL;
cage2 = this.handL;
}else if( this.armR.pPole.idx == pIdx ){
cage = this.armR;
cage2 = this.handR;
}else if( this.chestCage.pPole.idx == pIdx ) cage = this.chestCage;
else if( this.hipCage.pPole.idx == pIdx ) cage = this.hipCage;
else if( this.legR.pPole.idx == pIdx ){
cage = this.legR;
cage2 = this.footR;
}else if( this.legL.pPole.idx == pIdx ){
cage = this.legL;
cage2 = this.footL;
}
else if( this.head.pPole.idx == pIdx || this.head.pEff.idx == pIdx ) cage = this.head;
else if( this.footL.pPole.idx == pIdx || this.footL.pEff.idx == pIdx ) cage = this.footL;
else if( this.footR.pPole.idx == pIdx || this.footR.pEff.idx == pIdx ) cage = this.footR;
else if( this.handL.pPole.idx == pIdx || this.handL.pEff.idx == pIdx ) cage = this.handL;
else if( this.handR.pPole.idx == pIdx || this.handR.pEff.idx == pIdx ) cage = this.handR;
else{
for( let c of this.spineCage ){
if( c.pPole.idx == pIdx ){ cage = c; break; }
}
}
if( !cage ){
console.warn( 'Can not found Verlet Cage that pole belongs to:', pIdx );
return this;
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
let isDone = false;
let i = 0;
cage.poleMode( true );
do{
isDone = true;
if( !cage.resolve() ) isDone = false; // Run Cage
if( cage2 && !cage2.resolve() ) isDone = false; // Run Second Cage
i++;
}while( !isDone && i < this.skeleton.iterations )
cage.poleMode( false );
return this;
}
updateRigTargets(){
const r = this.rig;
const effDir : TVec3 = [0,0,0];
const poleDir : TVec3 = [0,0,0];
// HIPS
this.hipCage.getAxis( effDir, poleDir );
r.hip?.solver
.setTargetDir( effDir, poleDir )
.setMovePos( this.hipCage.getHeadPos(), true );
// HEAD
this.head.getAxis( effDir, poleDir );
r.head?.solver
.setTargetDir( effDir, poleDir );
// ARMS
r.armL?.solver
.setTargetPos( this.armL.getTailPos() )
.setTargetPole( this.armL.getPoleDir( poleDir ) );
r.armR?.solver
.setTargetPos( this.armR.getTailPos() )
.setTargetPole( this.armR.getPoleDir( poleDir ) );
this.handL.getAxis( effDir, poleDir );
r.handL?.solver
.setTargetDir( effDir, poleDir );
this.handR.getAxis( effDir, poleDir );
r.handR?.solver
.setTargetDir( effDir, poleDir );
// LEGS
r.legL?.solver
.setTargetPos( this.legL.getTailPos() )
.setTargetPole( this.legL.getPoleDir( poleDir ) );
r.legR?.solver
.setTargetPos( this.legR.getTailPos() )
.setTargetPole( this.legR.getPoleDir( poleDir ) );
this.footL.getAxis( effDir, poleDir );
r.footL?.solver
.setTargetDir( effDir, poleDir );
this.footR.getAxis( effDir, poleDir );
r.footR?.solver
.setTargetDir( effDir, poleDir );
// SPINE
if( r.spine ){
const aEff : Array< TVec3 > = [];
const aPol : Array< TVec3 > = [];
for( let i=0; i < this.spineCage.length; i++ ){
this.spineCage[ i ]
.getAxis( poleDir, effDir ); // Spine has Pole+Eff flipped
aEff.push( effDir.slice( 0 ) );
aPol.push( poleDir.slice( 0 ) );
}
this.chestCage.getAxis( poleDir, effDir );
aEff.push( effDir.slice( 0 ) );
aPol.push( poleDir.slice( 0 ) );
r.spine.solver.setChainDir( aEff, aPol );
}
}
}
export default BipedFBIK; | the_stack |
import { separateDisplayNameAndHOCs } from "./utils/separateDisplayNameAndHOCs";
import {
ElementTypeClass,
ElementTypeFunction,
ElementTypeMemo,
ElementTypeForwardRef,
ElementTypeProvider,
ElementTypeConsumer,
ElementTypeHostRoot,
} from "../../common/constants";
import type { CoreApi } from "./core";
import {
Fiber,
MemoizedState,
TransferFiber,
TransferFiberChanges,
TransferContextChange,
FiberRoot,
FiberType,
ReactDevtoolsHookHandlers,
RecordEventHandler,
ReactContext,
ReactDispatcherTrapApi,
FiberDispatchCall,
CommitTrigger,
HookInfo,
TransferHookInfo,
TransferPropChange,
TransferStateChange,
ClassComponentUpdateCall,
} from "../types";
import { simpleValueSerialization } from "./utils/simpleValueSerialization";
import { objectDiff } from "./utils/objectDiff";
import { arrayDiff } from "./utils/arrayDiff";
import { getDisplayName } from "./utils/getDisplayName";
import { extractCallLoc } from "./utils/stackTrace";
type CommitUpdateInfo = {
providerId: number;
valueChangedEventId: number | null;
};
function valueDiff(prev: any, next: any) {
return Array.isArray(prev) ? arrayDiff(prev, next) : objectDiff(prev, next);
}
export function createReactDevtoolsHookHandlers(
{
ReactTypeOfWork,
// ReactPriorityLevels,
getFiberTypeId,
getOrGenerateFiberId,
getFiberIdThrows,
getFiberIdUnsafe,
getFiberOwnerId,
getFiberById,
removeFiber,
getElementTypeForFiber,
getDisplayNameForFiber,
setRootPseudoKey,
didFiberRender,
removeRootPseudoKey,
shouldFilterFiber,
}: CoreApi,
{
getDispatchHookIndex,
getFiberTypeHookInfo,
flushDispatchCalls,
}: ReactDispatcherTrapApi,
recordEvent: RecordEventHandler
): ReactDevtoolsHookHandlers {
const { HostRoot, SuspenseComponent, OffscreenComponent, ContextProvider } =
ReactTypeOfWork;
// const {
// ImmediatePriority,
// UserBlockingPriority,
// NormalPriority,
// LowPriority,
// IdlePriority,
// NoPriority,
// } = ReactPriorityLevels;
const idToOwnerId = new Map<number, number>();
const commitUpdatedFiberId = new Map<number, number | undefined>();
const commitTriggeredFiber = new Set<Fiber>();
const commitClassFiberUpdateCalls = new Map<
Fiber,
ClassComponentUpdateCall[]
>();
const commitFiberUpdateCalls = new Map<any, FiberDispatchCall[]>();
const commitContext = new Map<ReactContext<any>, CommitUpdateInfo>();
let currentRootId = -1;
let currentCommitId = -1;
let commitIdSeed = 0;
let classComponentUpdateCalls: Array<ClassComponentUpdateCall> = [];
const patchedClassComponentUpdater = new Set<any>();
const classComponentInstanceToFiber = new WeakMap<
any,
{ rootId: number; fiber: Fiber }
>();
const recordedTypeDef = new Map<
number,
{
hookContextIndecies: Map<ReactContext<any>, number>;
hookMemoIndecies: number[];
hooks: TransferHookInfo[];
}
>();
const unmountedFiberIds = new Set<number>();
const unmountedFiberIdsByOwnerId = new Map<number, Set<number>>();
const unmountedFiberIdBeforeSiblingId = new Map<number, number>();
const unmountedFiberIdForParentId = new Map<number, number>();
const untrackFibersSet = new Set<Fiber>();
let untrackFibersTimer: ReturnType<typeof setTimeout> | null = null;
// Removes a Fiber (and its alternate) from the Maps used to track their id.
// This method should always be called when a Fiber is unmounting.
function untrackFiber(fiber: Fiber) {
// Untrack Fibers after a slight delay in order to support a Fast Refresh edge case:
// 1. Component type is updated and Fast Refresh schedules an update+remount.
// 2. flushPendingErrorsAndWarningsAfterDelay() runs, sees the old Fiber is no longer mounted
// (it's been disconnected by Fast Refresh), and calls untrackFiber() to clear it from the Map.
// 3. React flushes pending passive effects before it runs the next render,
// which logs an error or warning, which causes a new ID to be generated for this Fiber.
// 4. DevTools now tries to unmount the old Component with the new ID.
//
// The underlying problem here is the premature clearing of the Fiber ID,
// but DevTools has no way to detect that a given Fiber has been scheduled for Fast Refresh.
// (The "_debugNeedsRemount" flag won't necessarily be set.)
//
// The best we can do is to delay untracking by a small amount,
// and give React time to process the Fast Refresh delay.
untrackFibersSet.add(fiber);
if (untrackFibersTimer === null) {
untrackFibersTimer = setTimeout(untrackFibers, 1000);
}
}
function untrackFibers() {
if (untrackFibersTimer !== null) {
clearTimeout(untrackFibersTimer);
untrackFibersTimer = null;
}
for (const fiber of untrackFibersSet) {
removeFiber(fiber);
}
untrackFibersSet.clear();
}
function getComponentChange(
prevFiber: Fiber,
nextFiber: Fiber
): TransferFiberChanges | null {
const type = getElementTypeForFiber(nextFiber);
if (
type !== ElementTypeClass &&
type !== ElementTypeFunction &&
type !== ElementTypeMemo &&
type !== ElementTypeForwardRef &&
type !== ElementTypeProvider &&
type !== ElementTypeConsumer
) {
return null;
}
const isElementTypeClass = prevFiber.stateNode !== null;
const data: TransferFiberChanges = {
props: getPropsChanges(prevFiber.memoizedProps, nextFiber.memoizedProps),
...(isElementTypeClass
? {
// Class component
context: getClassContextChanges(nextFiber),
state: getStateChanges(
prevFiber.memoizedState,
nextFiber.memoizedState,
prevFiber
),
}
: {
// Functional component
context: getFunctionContextChanges(nextFiber),
state: getStateHooksChanges(
prevFiber.memoizedState,
nextFiber.memoizedState
),
memos: getMemoHookChanges(nextFiber),
}),
};
return data;
}
function getContextsForClassFiber(fiber: Fiber): ReactContext<any> | null {
const instance = fiber.stateNode || null;
if (instance !== null) {
return instance.constructor?.contextType || null;
}
return null;
}
function getClassContextChanges(
fiber: Fiber
): TransferContextChange[] | undefined {
const context = getContextsForClassFiber(fiber);
if (context !== null) {
const valueChangedEventId =
commitContext.get(context)?.valueChangedEventId || null;
if (valueChangedEventId !== null) {
return [
{
context: 0,
valueChangedEventId,
},
];
}
}
return;
}
function getContextsForFunctionFiber(
fiber: Fiber
): Array<ReactContext<any>> | null {
let cursor =
fiber.dependencies?.firstContext ||
fiber.contextDependencies?.first ||
null;
if (cursor !== null) {
const contexts = [];
while (cursor !== null) {
contexts.push(cursor.context);
cursor = cursor.next;
}
return contexts;
}
return null;
}
function getFunctionContextChanges(
fiber: Fiber
): TransferContextChange[] | undefined {
const contexts = getContextsForFunctionFiber(fiber);
if (contexts !== null) {
const seenContexts = new Set<number>();
const changes = [];
const typeId = getFiberTypeId(fiber.type);
const hookContextIndecies =
recordedTypeDef.get(typeId)?.hookContextIndecies;
for (const context of contexts) {
const contextIndex = hookContextIndecies?.get(context);
const valueChangedEventId =
commitContext.get(context)?.valueChangedEventId || null;
if (
typeof contextIndex === "number" &&
valueChangedEventId !== null &&
!seenContexts.has(contextIndex)
) {
// React adds extra entries to dependencies list in some cases,
// e.g. useContext(A) -> useContext(B) -> useContext(A) will produce
// 3 entries on dependencies list instead of 2. Moreover re-renders
// might double count of entries on the list.
// It's not clear that's a bug or a feature, so just we exclude
// context reference duplicates for now
seenContexts.add(contextIndex);
changes.push({
context: contextIndex,
valueChangedEventId,
});
}
}
if (changes.length > 0) {
return changes;
}
}
return;
}
function getStateHooksChanges(
prev: MemoizedState = null,
next: MemoizedState = null
): TransferStateChange[] | undefined {
if (prev === null || next === null || prev === next) {
return;
}
const changes: TransferStateChange[] = [];
while (next !== null && prev !== null) {
// We only interested in useState/useReducer hooks, since only these
// hooks can be a trigger for an update. Such hooks have a special
// signature in the form of the presence of the "queue" property.
// So filter hooks by this attribute. With hookNames can distinguish
// these hooks.
if (next.queue) {
const prevValue = prev.memoizedState;
const nextValue = next.memoizedState;
if (!Object.is(prevValue, nextValue)) {
const dispatch = next.queue.dispatch;
const dispatchCalls = commitFiberUpdateCalls.get(dispatch);
changes.push({
hook: getDispatchHookIndex(dispatch),
prev: simpleValueSerialization(prevValue),
next: simpleValueSerialization(nextValue),
diff: valueDiff(prevValue, nextValue),
calls: dispatchCalls?.map(entry => ({
name: entry.dispatchName,
loc: entry.loc,
})),
});
}
}
next = next.next;
prev = prev.next;
}
return changes.length > 0 ? changes : undefined;
}
function getPropsChanges(prev: MemoizedState, next: MemoizedState) {
if (prev == null || next == null || prev === next) {
return undefined;
}
const keys = new Set([...Object.keys(prev), ...Object.keys(next)]);
const changedProps: TransferPropChange[] = [];
for (const key of keys) {
if (!Object.is(prev[key], next[key])) {
changedProps.push({
name: key,
prev: simpleValueSerialization(prev[key]),
next: simpleValueSerialization(next[key]),
diff: valueDiff(prev[key], next[key]),
});
}
}
return changedProps;
}
function getStateChanges(
prev: MemoizedState,
next: MemoizedState,
fiber: Fiber
) {
if (prev == null || next == null || Object.is(prev, next)) {
return undefined;
}
const calls = commitClassFiberUpdateCalls.get(fiber);
const setStateCall = calls?.find(call => call.type === "setState");
const changes: TransferStateChange = {
hook: null,
prev: simpleValueSerialization(prev),
next: simpleValueSerialization(next),
diff: valueDiff(prev, next),
calls: setStateCall
? [
{
name: "setState",
loc: setStateCall.loc,
},
]
: null,
};
return [changes];
}
function getMemoHookChanges(fiber: Fiber) {
const hookMemoIndecies =
recordedTypeDef.get(getFiberTypeId(fiber.type))?.hookMemoIndecies || [];
const changes = [];
let nextState = fiber.memoizedState || null;
let prevState = fiber.alternate?.memoizedState || null;
let stateIndex = 0;
while (nextState !== null && prevState !== null) {
if (nextState.queue === null && Array.isArray(nextState.memoizedState)) {
const [prevValue, prevDeps] = prevState.memoizedState;
const [nextValue, nextDeps] = nextState.memoizedState;
const memoHookIndex = hookMemoIndecies[stateIndex++];
const changedDeps = [];
if (prevDeps !== nextDeps) {
// recompute
if (prevDeps !== null && nextDeps !== null) {
for (let i = 0; i < prevDeps.length; i++) {
if (!Object.is(prevDeps[i], nextDeps[i])) {
changedDeps.push({
index: i,
prev: simpleValueSerialization(prevDeps[i]),
next: simpleValueSerialization(nextDeps[i]),
diff: valueDiff(prevDeps[i], nextDeps[i]),
});
}
}
}
changes.push({
hook: memoHookIndex,
prev: simpleValueSerialization(prevValue),
next: simpleValueSerialization(nextValue),
diff: valueDiff(prevValue, nextValue),
deps: changedDeps,
});
}
}
nextState = nextState.next || null;
prevState = prevState.next || null;
}
return changes.length > 0 ? changes : undefined;
}
function getFiberContexts(
fiber: Fiber,
fiberType: number,
fiberHooks: HookInfo[]
) {
if (fiber.stateNode !== null) {
const context = getContextsForClassFiber(fiber);
if (context === null) {
return null;
}
return [
{
name: getDisplayName(context, "Context"),
providerId: commitContext.get(context)?.providerId,
},
];
}
if (fiberType === ElementTypeConsumer) {
const context = fiber.type._context || fiber.type.context;
return [
{
name: getDisplayName(context, "Context"),
providerId: commitContext.get(context)?.providerId,
},
];
}
const hookContexts = fiberHooks.reduce(
(contexts, hook) =>
hook.context != null ? contexts.add(hook.context) : contexts,
new Set<ReactContext<any>>()
);
if (hookContexts.size) {
return [...hookContexts].map(context => ({
name: getDisplayName(context, "Context"),
providerId: commitContext.get(context)?.providerId,
}));
}
return null;
}
function recordFiberTypeDefIfNeeded(
fiber: Fiber,
typeId: number,
fiberType: FiberType
) {
if (recordedTypeDef.has(typeId)) {
return;
}
const hooks = getFiberTypeHookInfo(typeId);
const contexts = getFiberContexts(fiber, fiberType, hooks);
const hookContextIndecies = new Map<ReactContext<any>, number>();
const hookMemoIndecies: number[] = [];
const transferHooks: TransferHookInfo[] = [];
for (const hook of hooks) {
let hookContext = null;
if (hook.context) {
hookContext = hookContextIndecies.get(hook.context);
if (hookContext === undefined) {
hookContextIndecies.set(
hook.context,
(hookContext = hookContextIndecies.size)
);
}
}
if (hook.name === "useMemo" || hook.name === "useCallback") {
hookMemoIndecies.push(transferHooks.length);
}
transferHooks.push({
...hook,
context: hookContext,
});
}
if (fiberType === ElementTypeClass) {
const { updater } = fiber.stateNode;
if (!patchedClassComponentUpdater.has(updater)) {
patchedClassComponentUpdater.add(updater);
const { enqueueForceUpdate, enqueueSetState } = updater;
Object.defineProperties(updater, {
enqueueForceUpdate: {
value(inst: any, callback: any) {
classComponentUpdateCalls.push({
type: "forceUpdate",
...classComponentInstanceToFiber.get(inst),
loc: extractCallLoc(2),
});
return enqueueForceUpdate(inst, callback);
},
},
enqueueSetState: {
value(inst: any, payload: any, callback: any) {
classComponentUpdateCalls.push({
type: "setState",
...classComponentInstanceToFiber.get(inst),
loc: extractCallLoc(1),
});
return enqueueSetState(inst, payload, callback);
},
},
});
}
}
recordedTypeDef.set(typeId, {
hookContextIndecies,
hookMemoIndecies,
hooks: transferHooks,
});
recordEvent({
op: "fiber-type-def",
commitId: currentCommitId,
typeId,
definition: {
contexts,
hooks: transferHooks,
},
});
}
function locFromDebugSource({
fileName,
lineNumber,
columnNumber,
}: {
fileName: string;
lineNumber: number;
columnNumber?: number;
}) {
return typeof fileName === "string" &&
typeof lineNumber === "number" &&
lineNumber > 0
? `${fileName}:${lineNumber}${
typeof columnNumber === "number" && columnNumber > 0
? ":" + columnNumber
: ""
}`
: null;
}
function recordMount(fiber: Fiber, parentFiber: Fiber | null) {
const isRoot = fiber.tag === HostRoot;
const fiberId = getOrGenerateFiberId(fiber);
let props: string[] = [];
let transferFiber: TransferFiber;
let triggerEventId: number | undefined;
if (isRoot) {
transferFiber = {
id: fiberId,
type: ElementTypeHostRoot,
typeId: 0,
rootMode: fiber.stateNode.tag || 0,
key: fiber.stateNode.containerInfo.id || null,
ownerId: 0,
parentId: 0,
displayName: null,
hocDisplayNames: null,
loc: null,
};
} else {
const { key, type } = fiber;
const elementType = getElementTypeForFiber(fiber);
const parentId = parentFiber ? getFiberIdThrows(parentFiber) : 0;
const ownerIdCandidate = getFiberOwnerId(fiber);
const ownerId =
ownerIdCandidate !== -1 ? ownerIdCandidate : currentRootId;
const { displayName, hocDisplayNames } = separateDisplayNameAndHOCs(
getDisplayNameForFiber(fiber),
elementType
);
props = Object.keys(fiber.memoizedProps);
triggerEventId = commitUpdatedFiberId.get(ownerId);
transferFiber = {
id: fiberId,
type: elementType,
typeId: getFiberTypeId(type),
key: key === null ? null : String(key),
ownerId,
parentId,
displayName,
hocDisplayNames,
loc: fiber._debugSource ? locFromDebugSource(fiber._debugSource) : null,
};
}
recordFiberTypeDefIfNeeded(fiber, transferFiber.typeId, transferFiber.type);
const { selfTime, totalTime } = getDurations(fiber);
const eventId = recordEvent({
op: "mount",
commitId: currentCommitId,
fiberId,
fiber: transferFiber,
props,
selfTime,
totalTime,
trigger: triggerEventId,
});
idToOwnerId.set(fiberId, transferFiber.ownerId);
commitUpdatedFiberId.set(fiberId, triggerEventId ?? eventId);
if (transferFiber.type === ElementTypeClass) {
classComponentInstanceToFiber.set(fiber.stateNode, {
rootId: currentRootId,
fiber,
});
}
}
function recordUnmount(fiberId: number) {
const ownerId = idToOwnerId.get(fiberId);
const triggerEventId = commitUpdatedFiberId.get(ownerId as number);
const eventId = recordEvent({
op: "unmount",
commitId: currentCommitId,
fiberId,
trigger: triggerEventId,
});
commitUpdatedFiberId.set(fiberId, triggerEventId ?? eventId);
idToOwnerId.delete(fiberId);
const fiber = getFiberById(fiberId);
if (fiber !== null) {
untrackFiber(fiber);
}
}
function recordSubtreeUnmount(fiberId: number) {
unmountedFiberIds.delete(fiberId);
recordUnmount(fiberId);
const ownerUnmountedFiberIds = unmountedFiberIdsByOwnerId.get(fiberId);
if (ownerUnmountedFiberIds !== undefined) {
unmountedFiberIdsByOwnerId.delete(fiberId);
for (const fiberId of ownerUnmountedFiberIds) {
recordSubtreeUnmount(fiberId);
}
}
}
function recordPreviousSiblingUnmount(fiberId: number) {
const siblingUnmountId = unmountedFiberIdBeforeSiblingId.get(fiberId);
if (siblingUnmountId !== undefined) {
recordPreviousSiblingUnmount(siblingUnmountId);
recordSubtreeUnmount(siblingUnmountId);
}
}
function recordLastChildUnmounts(fiberId: number) {
const lastChildUnmountId = unmountedFiberIdForParentId.get(fiberId);
if (lastChildUnmountId !== undefined) {
recordPreviousSiblingUnmount(lastChildUnmountId);
recordSubtreeUnmount(lastChildUnmountId);
}
}
function unmountFiber(fiber: Fiber) {
const id = getFiberIdUnsafe(fiber);
if (id === null) {
// If we've never seen this Fiber, it might be inside of a legacy render Suspense fragment (so the store is not even aware of it).
// In that case we can just ignore it or it will cause errors later on.
// One example of this is a Lazy component that never resolves before being unmounted.
//
// This also might indicate a Fast Refresh force-remount scenario.
//
// TODO: This is fragile and can obscure actual bugs.
return;
}
const isRoot = fiber.tag === HostRoot;
if (isRoot || !shouldFilterFiber(fiber)) {
if (currentCommitId !== -1) {
// If unmount occurs in a commit, then record it immediatelly.
recordSubtreeUnmount(id);
} else {
// If an unmount occurs outside of a commit then just remember it not record.
// React reports about an unmount before a commit, so we will flush
// events on a commit processing. A various maps are using here to record
// unmount events in more natural way (since we don't know the right order
// of events anyway and simulate it on component's tree traversal).
const ownerId = idToOwnerId.get(id) || 0;
const siblingId = fiber.sibling
? getFiberIdUnsafe(fiber.sibling)
: null;
if (siblingId !== null) {
unmountedFiberIdBeforeSiblingId.set(siblingId, id);
} else {
const parentId = fiber.return ? getFiberIdUnsafe(fiber.return) : null;
if (parentId !== null) {
unmountedFiberIdForParentId.set(parentId, id);
}
}
if (unmountedFiberIdsByOwnerId.has(ownerId)) {
unmountedFiberIdsByOwnerId.get(ownerId)?.add(id);
} else {
unmountedFiberIdsByOwnerId.set(ownerId, new Set([id]));
}
unmountedFiberIds.add(id);
}
}
if (!fiber._debugNeedsRemount) {
// ???
// unmountedFiberIds.delete(id);
}
}
function mountFiberRecursively(
firstChild: Fiber | null,
parentFiber: Fiber | null,
traverseSiblings: boolean
) {
// Iterate over siblings rather than recursing.
// This reduces the chance of stack overflow for wide trees (e.g. lists with many items).
let fiber = firstChild;
while (fiber !== null) {
const shouldIncludeInTree = !shouldFilterFiber(fiber);
const isSuspense = fiber.tag === SuspenseComponent;
const isProvider = fiber.tag === ContextProvider;
const context = isProvider
? fiber.type._context || fiber.type.context
: null;
let prevCommitContextValue: any;
// Generate an ID even for filtered Fibers, in case it's needed later (e.g. for Profiling).
const fiberId = getOrGenerateFiberId(fiber);
if (context !== null) {
prevCommitContextValue = commitContext.get(context);
commitContext.set(context, {
providerId: fiberId,
valueChangedEventId: null,
});
}
if (shouldIncludeInTree) {
recordMount(fiber, parentFiber);
}
if (isSuspense) {
const isTimedOut = fiber.memoizedState !== null;
if (isTimedOut) {
// Special case: if Suspense mounts in a timed-out state,
// get the fallback child from the inner fragment and mount
// it as if it was our own child. Updates handle this too.
const primaryChildFragment = fiber.child;
const fallbackChildFragment = primaryChildFragment?.sibling;
const fallbackChild = fallbackChildFragment?.child || null;
if (fallbackChild !== null) {
mountFiberRecursively(
fallbackChild,
shouldIncludeInTree ? fiber : parentFiber,
true
);
}
} else {
let primaryChild = null;
const areSuspenseChildrenConditionallyWrapped =
OffscreenComponent === -1;
if (areSuspenseChildrenConditionallyWrapped) {
primaryChild = fiber.child;
} else if (fiber.child !== null) {
primaryChild = fiber.child.child;
}
if (primaryChild !== null) {
mountFiberRecursively(
primaryChild,
shouldIncludeInTree ? fiber : parentFiber,
true
);
}
}
} else {
if (fiber.child !== null) {
mountFiberRecursively(
fiber.child,
shouldIncludeInTree ? fiber : parentFiber,
true
);
}
}
if (context !== null) {
commitContext.set(context, prevCommitContextValue);
}
fiber = traverseSiblings ? fiber.sibling : null;
}
}
// We use this to simulate unmounting for Suspense trees
// when we switch from primary to fallback.
function unmountFiberChildrenRecursively(fiber: Fiber) {
// We might meet a nested Suspense on our way.
const isTimedOutSuspense =
fiber.tag === SuspenseComponent && fiber.memoizedState !== null;
let child = fiber.child;
if (isTimedOutSuspense) {
// If it's showing fallback tree, let's traverse it instead.
const primaryChildFragment = fiber.child;
const fallbackChildFragment = primaryChildFragment?.sibling || null;
// Skip over to the real Fiber child.
child = fallbackChildFragment?.child || null;
}
while (child !== null) {
// Record simulated unmounts children-first.
// We skip nodes without return because those are real unmounts.
if (child.return !== null) {
unmountFiber(child);
unmountFiberChildrenRecursively(child);
}
child = child.sibling;
}
}
// Calculate fiber durations. Should be called on mount or fiber changes only,
// otherwise it may return a duration for a previous fiber update.
function getDurations(fiber: Fiber) {
const totalTime = fiber.actualDuration ?? 0;
let selfTime = totalTime;
// The actual duration reported by React includes time spent working on children.
// This is useful information, but it's also useful to be able to exclude child durations.
// The frontend can't compute this, since the immediate children may have been filtered out.
// So we need to do this on the backend.
// Note that this calculated self duration is not the same thing as the base duration.
// The two are calculated differently (tree duration does not accumulate).
let child = fiber.child;
while (totalTime > 0 && child !== null) {
selfTime -= child.actualDuration || 0;
child = child.sibling;
}
return { totalTime, selfTime };
}
function recordUpdate(fiber: Fiber) {
const { alternate = null } = fiber;
// actually alternate is always not a null here, this check is for TS only
if (alternate === null) {
return;
}
const fiberId = getFiberIdThrows(fiber);
const ownerId = getFiberOwnerId(fiber);
const triggerEventId =
fiber.memoizedProps !== alternate.memoizedProps
? commitUpdatedFiberId.get(ownerId)
: undefined;
if (didFiberRender(alternate, fiber)) {
const { selfTime, totalTime } = getDurations(fiber);
const changes = getComponentChange(alternate, fiber);
const classUpdateCalls = commitClassFiberUpdateCalls.get(fiber);
const specialReasons = [];
if (classUpdateCalls !== undefined) {
for (const call of classUpdateCalls) {
if (call.type === "forceUpdate") {
specialReasons.push({ name: "forceUpdate", loc: call.loc });
}
}
}
// FIXME: changes are not null when no actual changes and the reason doesn't registrate
if (changes === null && commitUpdatedFiberId.has(ownerId)) {
specialReasons.push({ name: "ownerUpdate", loc: null });
}
const eventId = recordEvent({
op: "update",
commitId: currentCommitId,
fiberId,
selfTime,
totalTime,
changes,
specialReasons: specialReasons.length ? specialReasons : null,
trigger: triggerEventId,
});
commitUpdatedFiberId.set(fiberId, triggerEventId || eventId);
if (
changes !== null &&
getElementTypeForFiber(fiber) === ElementTypeProvider
) {
const valueChange = changes.props?.find(prop => prop.name === "value");
if (valueChange !== undefined) {
const contextInfo = commitContext.get(
fiber.type._context || fiber.type.context
);
if (contextInfo !== undefined) {
contextInfo.valueChangedEventId = eventId;
}
}
}
} else if (fiber.stateNode && fiber.updateQueue !== alternate.updateQueue) {
recordEvent({
op: "update-bailout-scu",
commitId: currentCommitId,
fiberId,
changes: {
props: getPropsChanges(fiber.memoizedProps, alternate.memoizedProps),
state: getStateChanges(
fiber.memoizedState,
alternate.memoizedState,
fiber
),
},
trigger: triggerEventId,
});
} else if (commitTriggeredFiber.has(fiber)) {
recordEvent({
op: "update-bailout-state",
commitId: currentCommitId,
fiberId,
trigger: triggerEventId,
});
} else if (
commitUpdatedFiberId.has(ownerId) &&
getElementTypeForFiber(fiber) === ElementTypeMemo
) {
recordEvent({
op: "update-bailout-memo",
commitId: currentCommitId,
fiberId,
trigger: triggerEventId,
});
}
}
// Returns whether closest unfiltered fiber parent needs to reset its child list.
function updateFiberRecursively(
nextFiber: Fiber,
prevFiber: Fiber,
parentFiber: Fiber | null
) {
const fiberId = getOrGenerateFiberId(nextFiber);
const shouldIncludeInTree = !shouldFilterFiber(nextFiber);
const isSuspense = nextFiber.tag === SuspenseComponent;
const isProvider = nextFiber.tag === ContextProvider;
const context = isProvider
? nextFiber.type._context || nextFiber.type.context
: null;
let prevCommitContextValue: any;
recordPreviousSiblingUnmount(fiberId);
if (context !== null) {
prevCommitContextValue = commitContext.get(context);
commitContext.set(context, {
providerId: fiberId,
valueChangedEventId: null,
});
}
if (shouldIncludeInTree) {
recordUpdate(nextFiber);
}
// The behavior of timed-out Suspense trees is unique.
// Rather than unmount the timed out content (and possibly lose important state),
// React re-parents this content within a hidden Fragment while the fallback is showing.
// This behavior doesn't need to be observable in the DevTools though.
// It might even result in a bad user experience for e.g. node selection in the Elements panel.
// The easiest fix is to strip out the intermediate Fragment fibers,
// so the Elements panel and Profiler don't need to special case them.
// Suspense components only have a non-null memoizedState if they're timed-out.
const prevDidTimeout = isSuspense && prevFiber.memoizedState !== null;
const nextDidTimeOut = isSuspense && nextFiber.memoizedState !== null;
// The logic below is inspired by the code paths in updateSuspenseComponent()
// inside ReactFiberBeginWork in the React source code.
if (prevDidTimeout && nextDidTimeOut) {
// Fallback -> Fallback:
// 1. Reconcile fallback set.
const nextFallbackChildSet = nextFiber.child?.sibling || null;
// Note: We can't use nextFiber.child.sibling.alternate
// because the set is special and alternate may not exist.
const prevFallbackChildSet = prevFiber.child?.sibling || null;
if (nextFallbackChildSet !== null && prevFallbackChildSet !== null) {
updateFiberRecursively(
nextFallbackChildSet,
prevFallbackChildSet,
nextFiber
);
}
} else if (prevDidTimeout && !nextDidTimeOut) {
// Fallback -> Primary:
// 1. Unmount fallback set
// Note: don't emulate fallback unmount because React actually did it.
// 2. Mount primary set
const nextPrimaryChildSet = nextFiber.child;
if (nextPrimaryChildSet !== null) {
mountFiberRecursively(
nextPrimaryChildSet,
shouldIncludeInTree ? nextFiber : parentFiber,
true
);
}
} else if (!prevDidTimeout && nextDidTimeOut) {
// Primary -> Fallback:
// 1. Hide primary set
// This is not a real unmount, so it won't get reported by React.
// We need to manually walk the previous tree and record unmounts.
unmountFiberChildrenRecursively(prevFiber);
// 2. Mount fallback set
const nextFallbackChildSet = nextFiber.child?.sibling || null;
if (nextFallbackChildSet !== null) {
mountFiberRecursively(
nextFallbackChildSet,
shouldIncludeInTree ? nextFiber : parentFiber,
true
);
}
} else {
// Common case: Primary -> Primary.
// This is the same code path as for non-Suspense fibers.
if (nextFiber.child !== prevFiber.child) {
// If the first child is different, we need to traverse them.
// Each next child will be either a new child (mount) or an alternate (update).
let nextChild = nextFiber.child;
while (nextChild) {
// We already know children will be referentially different because
// they are either new mounts or alternates of previous children.
// Schedule updates and mounts depending on whether alternates exist.
// We don't track deletions here because they are reported separately.
if (nextChild.alternate) {
const prevChild = nextChild.alternate;
updateFiberRecursively(
nextChild,
prevChild,
shouldIncludeInTree ? nextFiber : parentFiber
);
} else {
mountFiberRecursively(
nextChild,
shouldIncludeInTree ? nextFiber : parentFiber,
false
);
}
// Try the next child.
nextChild = nextChild.sibling;
}
}
}
if (context !== null) {
commitContext.set(context, prevCommitContextValue);
}
recordLastChildUnmounts(fiberId);
}
function recordCommitStart(root: FiberRoot, initialMount: boolean) {
const dispatchCalls = flushDispatchCalls(root);
const triggers: CommitTrigger[] = [];
if (initialMount) {
triggers.push({
type: "initial-mount",
kind: "mount",
fiberId: currentRootId,
loc: null,
});
}
for (const call of dispatchCalls) {
const fiberId = getOrGenerateFiberId(call.fiber);
let fiberDispatchCalls = commitFiberUpdateCalls.get(call.dispatch);
if (fiberDispatchCalls === undefined) {
commitFiberUpdateCalls.set(call.dispatch, (fiberDispatchCalls = []));
}
fiberDispatchCalls.push(call);
commitTriggeredFiber.add(call.fiber);
if (call.effectFiber) {
triggers.push({
type: call.effectName || "unknown",
kind: call.dispatchName === "setState" ? "useState" : "useReducer",
fiberId,
relatedFiberId: getOrGenerateFiberId(call.effectFiber),
loc: call.loc,
});
} else if (call.event) {
triggers.push({
type: "event",
kind: call.dispatchName === "setState" ? "useState" : "useReducer",
fiberId,
event: call.event,
loc: call.loc,
});
} else if (!call.renderFiber) {
triggers.push({
type: "unknown",
kind: call.dispatchName === "setState" ? "useState" : "useReducer",
fiberId,
loc: call.loc,
});
}
}
classComponentUpdateCalls = classComponentUpdateCalls.filter(call => {
if (call.fiber && call.rootId === currentRootId) {
const fiberId = getOrGenerateFiberId(call.fiber);
let fiberDispatchCalls = commitClassFiberUpdateCalls.get(call.fiber);
if (fiberDispatchCalls === undefined) {
commitClassFiberUpdateCalls.set(
call.fiber,
(fiberDispatchCalls = [])
);
}
fiberDispatchCalls.push(call);
commitTriggeredFiber.add(call.fiber);
triggers.push({
type: "unknown",
kind: call.type,
fiberId,
loc: call.loc,
});
return false;
}
return true;
});
recordEvent({
op: "commit-start",
commitId: currentCommitId,
triggers, // FIXME: Don't send triggers for now
});
}
function handleCommitFiberUnmount(fiber: Fiber) {
// We can't traverse fibers after unmounting so instead
// we rely on React telling us about each unmount.
unmountFiber(fiber);
}
function handlePostCommitFiberRoot(/* root */) {
// if (rootSupportsProfiling(root)) {
// if (currentCommitProfilingMetadata !== null) {
// const { effectDuration, passiveEffectDuration } =
// getEffectDurations(root);
// currentCommitProfilingMetadata.effectDuration = effectDuration;
// currentCommitProfilingMetadata.passiveEffectDuration =
// passiveEffectDuration;
// }
// }
}
function handleCommitFiberRoot(root: FiberRoot /*, priorityLevel?: number*/) {
const { current } = root;
const { alternate } = current;
// Flush any pending Fibers that we are untracking before processing the new commit.
// If we don't do this, we might end up double-deleting Fibers in some cases (like Legacy Suspense).
untrackFibers();
currentCommitId = commitIdSeed++;
currentRootId = getOrGenerateFiberId(current);
// FIXME: add to commit event
// console.log(formatPriorityLevel(priorityLevel || -1));
// Handle multi-renderer edge-case where only some v16 renderers support profiling.
// const isProfilingSupported = rootSupportsProfiling(root);
// if (isProfilingSupported) {
// // If profiling is active, store commit time and duration.
// // The frontend may request this information after profiling has stopped.
// currentCommitProfilingMetadata = {
// commitTime: getCurrentTime() - profilingStartTime,
// priorityLevel:
// priorityLevel == null ? null : formatPriorityLevel(priorityLevel),
// // Initialize to null; if new enough React version is running,
// // these values will be read during separate handlePostCommitFiberRoot() call.
// effectDuration: null,
// passiveEffectDuration: null,
// };
// }
// TODO: relying on this seems a bit fishy.
const wasMounted =
alternate !== null && Boolean(alternate.memoizedState?.element);
const isMounted = Boolean(current.memoizedState?.element);
recordCommitStart(root, !wasMounted && isMounted);
if (!wasMounted && isMounted) {
// Mount a new root.
setRootPseudoKey(currentRootId, current);
mountFiberRecursively(current, null, false);
} else if (wasMounted && isMounted) {
// Update an existing root.
updateFiberRecursively(current, alternate, null);
} else if (wasMounted && !isMounted) {
// Unmount an existing root.
removeRootPseudoKey(currentRootId);
unmountFiber(current);
}
// Normally unmounted fibers should removed on component's tree traversal,
// but in case it's not then flush what's left
for (const fiberId of unmountedFiberIds) {
recordUnmount(fiberId);
}
// We're done here
currentCommitId = -1;
commitTriggeredFiber.clear();
commitUpdatedFiberId.clear();
commitFiberUpdateCalls.clear();
commitContext.clear();
unmountedFiberIds.clear();
unmountedFiberIdsByOwnerId.clear();
unmountedFiberIdBeforeSiblingId.clear();
unmountedFiberIdForParentId.clear();
}
// function formatPriorityLevel(priorityLevel: number | null = null) {
// switch (priorityLevel) {
// case ImmediatePriority:
// return "Immediate";
// case UserBlockingPriority:
// return "User-Blocking";
// case NormalPriority:
// return "Normal";
// case LowPriority:
// return "Low";
// case IdlePriority:
// return "Idle";
// case NoPriority:
// default:
// return "Unknown";
// }
// }
return {
handleCommitFiberRoot,
handlePostCommitFiberRoot,
handleCommitFiberUnmount,
};
} | the_stack |
import * as coreHttp from "@azure/core-http";
import * as Mappers from "../models/mappers";
import * as Parameters from "../models/parameters";
import { StorageClientContext } from "../storageClientContext";
import {
PageBlobCreateOptionalParams,
PageBlobCreateResponse,
PageBlobUploadPagesOptionalParams,
PageBlobUploadPagesResponse,
PageBlobClearPagesOptionalParams,
PageBlobClearPagesResponse,
PageBlobUploadPagesFromURLOptionalParams,
PageBlobUploadPagesFromURLResponse,
PageBlobGetPageRangesOptionalParams,
PageBlobGetPageRangesResponse,
PageBlobGetPageRangesDiffOptionalParams,
PageBlobGetPageRangesDiffResponse,
PageBlobResizeOptionalParams,
PageBlobResizeResponse,
SequenceNumberActionType,
PageBlobUpdateSequenceNumberOptionalParams,
PageBlobUpdateSequenceNumberResponse,
PageBlobCopyIncrementalOptionalParams,
PageBlobCopyIncrementalResponse
} from "../models";
/** Class representing a PageBlob. */
export class PageBlob {
private readonly client: StorageClientContext;
/**
* Initialize a new instance of the class PageBlob class.
* @param client Reference to the service client
*/
constructor(client: StorageClientContext) {
this.client = client;
}
/**
* The Create operation creates a new page blob.
* @param contentLength The length of the request.
* @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The
* page blob size must be aligned to a 512-byte boundary.
* @param options The options parameters.
*/
create(
contentLength: number,
blobContentLength: number,
options?: PageBlobCreateOptionalParams
): Promise<PageBlobCreateResponse> {
const operationArguments: coreHttp.OperationArguments = {
contentLength,
blobContentLength,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
createOperationSpec
) as Promise<PageBlobCreateResponse>;
}
/**
* The Upload Pages operation writes a range of pages to a page blob
* @param contentLength The length of the request.
* @param body Initial data
* @param options The options parameters.
*/
uploadPages(
contentLength: number,
body: coreHttp.HttpRequestBody,
options?: PageBlobUploadPagesOptionalParams
): Promise<PageBlobUploadPagesResponse> {
const operationArguments: coreHttp.OperationArguments = {
contentLength,
body,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
uploadPagesOperationSpec
) as Promise<PageBlobUploadPagesResponse>;
}
/**
* The Clear Pages operation clears a set of pages from a page blob
* @param contentLength The length of the request.
* @param options The options parameters.
*/
clearPages(
contentLength: number,
options?: PageBlobClearPagesOptionalParams
): Promise<PageBlobClearPagesResponse> {
const operationArguments: coreHttp.OperationArguments = {
contentLength,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
clearPagesOperationSpec
) as Promise<PageBlobClearPagesResponse>;
}
/**
* The Upload Pages operation writes a range of pages to a page blob where the contents are read from a
* URL
* @param sourceUrl Specify a URL to the copy source.
* @param sourceRange Bytes of source data in the specified range. The length of this range should
* match the ContentLength header and x-ms-range/Range destination range header.
* @param contentLength The length of the request.
* @param range The range of bytes to which the source range would be written. The range should be 512
* aligned and range-end is required.
* @param options The options parameters.
*/
uploadPagesFromURL(
sourceUrl: string,
sourceRange: string,
contentLength: number,
range: string,
options?: PageBlobUploadPagesFromURLOptionalParams
): Promise<PageBlobUploadPagesFromURLResponse> {
const operationArguments: coreHttp.OperationArguments = {
sourceUrl,
sourceRange,
contentLength,
range,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
uploadPagesFromURLOperationSpec
) as Promise<PageBlobUploadPagesFromURLResponse>;
}
/**
* The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a
* page blob
* @param options The options parameters.
*/
getPageRanges(
options?: PageBlobGetPageRangesOptionalParams
): Promise<PageBlobGetPageRangesResponse> {
const operationArguments: coreHttp.OperationArguments = {
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
getPageRangesOperationSpec
) as Promise<PageBlobGetPageRangesResponse>;
}
/**
* The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were
* changed between target blob and previous snapshot.
* @param options The options parameters.
*/
getPageRangesDiff(
options?: PageBlobGetPageRangesDiffOptionalParams
): Promise<PageBlobGetPageRangesDiffResponse> {
const operationArguments: coreHttp.OperationArguments = {
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
getPageRangesDiffOperationSpec
) as Promise<PageBlobGetPageRangesDiffResponse>;
}
/**
* Resize the Blob
* @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The
* page blob size must be aligned to a 512-byte boundary.
* @param options The options parameters.
*/
resize(
blobContentLength: number,
options?: PageBlobResizeOptionalParams
): Promise<PageBlobResizeResponse> {
const operationArguments: coreHttp.OperationArguments = {
blobContentLength,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
resizeOperationSpec
) as Promise<PageBlobResizeResponse>;
}
/**
* Update the sequence number of the blob
* @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request.
* This property applies to page blobs only. This property indicates how the service should modify the
* blob's sequence number
* @param options The options parameters.
*/
updateSequenceNumber(
sequenceNumberAction: SequenceNumberActionType,
options?: PageBlobUpdateSequenceNumberOptionalParams
): Promise<PageBlobUpdateSequenceNumberResponse> {
const operationArguments: coreHttp.OperationArguments = {
sequenceNumberAction,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
updateSequenceNumberOperationSpec
) as Promise<PageBlobUpdateSequenceNumberResponse>;
}
/**
* The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.
* The snapshot is copied such that only the differential changes between the previously copied
* snapshot are transferred to the destination. The copied snapshots are complete copies of the
* original snapshot and can be read or copied from as usual. This API is supported since REST version
* 2016-05-31.
* @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to
* 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would
* appear in a request URI. The source blob must either be public or must be authenticated via a shared
* access signature.
* @param options The options parameters.
*/
copyIncremental(
copySource: string,
options?: PageBlobCopyIncrementalOptionalParams
): Promise<PageBlobCopyIncrementalResponse> {
const operationArguments: coreHttp.OperationArguments = {
copySource,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
copyIncrementalOperationSpec
) as Promise<PageBlobCopyIncrementalResponse>;
}
}
// Operation Specifications
const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);
const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
const createOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "PUT",
responses: {
201: {
headersMapper: Mappers.PageBlobCreateHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobCreateExceptionHeaders
}
},
queryParameters: [Parameters.timeoutInSeconds],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.accept1,
Parameters.contentLength,
Parameters.metadata,
Parameters.leaseId,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.encryptionKey,
Parameters.encryptionKeySha256,
Parameters.encryptionAlgorithm,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags,
Parameters.blobCacheControl,
Parameters.blobContentType,
Parameters.blobContentMD5,
Parameters.blobContentEncoding,
Parameters.blobContentLanguage,
Parameters.blobContentDisposition,
Parameters.immutabilityPolicyExpiry,
Parameters.immutabilityPolicyMode,
Parameters.encryptionScope,
Parameters.tier,
Parameters.blobTagsString,
Parameters.legalHold1,
Parameters.blobType,
Parameters.blobContentLength,
Parameters.blobSequenceNumber
],
isXML: true,
serializer: xmlSerializer
};
const uploadPagesOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "PUT",
responses: {
201: {
headersMapper: Mappers.PageBlobUploadPagesHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders
}
},
requestBody: Parameters.body1,
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.contentLength,
Parameters.leaseId,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.range,
Parameters.encryptionKey,
Parameters.encryptionKeySha256,
Parameters.encryptionAlgorithm,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags,
Parameters.encryptionScope,
Parameters.transactionalContentMD5,
Parameters.transactionalContentCrc64,
Parameters.contentType1,
Parameters.accept2,
Parameters.pageWrite,
Parameters.ifSequenceNumberLessThanOrEqualTo,
Parameters.ifSequenceNumberLessThan,
Parameters.ifSequenceNumberEqualTo
],
mediaType: "binary",
serializer
};
const clearPagesOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "PUT",
responses: {
201: {
headersMapper: Mappers.PageBlobClearPagesHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobClearPagesExceptionHeaders
}
},
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.accept1,
Parameters.contentLength,
Parameters.leaseId,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.range,
Parameters.encryptionKey,
Parameters.encryptionKeySha256,
Parameters.encryptionAlgorithm,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags,
Parameters.encryptionScope,
Parameters.ifSequenceNumberLessThanOrEqualTo,
Parameters.ifSequenceNumberLessThan,
Parameters.ifSequenceNumberEqualTo,
Parameters.pageWrite1
],
isXML: true,
serializer: xmlSerializer
};
const uploadPagesFromURLOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "PUT",
responses: {
201: {
headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders
}
},
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.accept1,
Parameters.contentLength,
Parameters.leaseId,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.encryptionKey,
Parameters.encryptionKeySha256,
Parameters.encryptionAlgorithm,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags,
Parameters.encryptionScope,
Parameters.sourceIfModifiedSince,
Parameters.sourceIfUnmodifiedSince,
Parameters.sourceIfMatch,
Parameters.sourceIfNoneMatch,
Parameters.sourceContentMD5,
Parameters.copySourceAuthorization,
Parameters.pageWrite,
Parameters.ifSequenceNumberLessThanOrEqualTo,
Parameters.ifSequenceNumberLessThan,
Parameters.ifSequenceNumberEqualTo,
Parameters.sourceUrl,
Parameters.sourceRange,
Parameters.sourceContentCrc64,
Parameters.range1
],
isXML: true,
serializer: xmlSerializer
};
const getPageRangesOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.PageList,
headersMapper: Mappers.PageBlobGetPageRangesHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders
}
},
queryParameters: [
Parameters.timeoutInSeconds,
Parameters.snapshot,
Parameters.comp20
],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.accept1,
Parameters.leaseId,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.range,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags
],
isXML: true,
serializer: xmlSerializer
};
const getPageRangesDiffOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.PageList,
headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders
}
},
queryParameters: [
Parameters.timeoutInSeconds,
Parameters.snapshot,
Parameters.comp20,
Parameters.prevsnapshot
],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.accept1,
Parameters.leaseId,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.range,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags,
Parameters.prevSnapshotUrl
],
isXML: true,
serializer: xmlSerializer
};
const resizeOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "PUT",
responses: {
200: {
headersMapper: Mappers.PageBlobResizeHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobResizeExceptionHeaders
}
},
queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.accept1,
Parameters.leaseId,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.encryptionKey,
Parameters.encryptionKeySha256,
Parameters.encryptionAlgorithm,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags,
Parameters.encryptionScope,
Parameters.blobContentLength
],
isXML: true,
serializer: xmlSerializer
};
const updateSequenceNumberOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "PUT",
responses: {
200: {
headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders
}
},
queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.accept1,
Parameters.leaseId,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags,
Parameters.blobSequenceNumber,
Parameters.sequenceNumberAction
],
isXML: true,
serializer: xmlSerializer
};
const copyIncrementalOperationSpec: coreHttp.OperationSpec = {
path: "/{containerName}/{blob}",
httpMethod: "PUT",
responses: {
202: {
headersMapper: Mappers.PageBlobCopyIncrementalHeaders
},
default: {
bodyMapper: Mappers.StorageError,
headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders
}
},
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21],
urlParameters: [Parameters.url],
headerParameters: [
Parameters.version,
Parameters.requestId,
Parameters.accept1,
Parameters.ifModifiedSince,
Parameters.ifUnmodifiedSince,
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.ifTags,
Parameters.copySource
],
isXML: true,
serializer: xmlSerializer
}; | the_stack |
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
import fetch from 'node-fetch';
import { Activity } from 'botbuilder';
import { BoolProperty, EnumProperty, StringProperty, UnknownProperty } from '../properties';
import { Response, Headers } from 'node-fetch';
import { evaluateExpression } from '../jsonExtensions';
import {
BoolExpression,
BoolExpressionConverter,
EnumExpression,
EnumExpressionConverter,
StringExpression,
StringExpressionConverter,
ValueExpression,
ValueExpressionConverter,
} from 'adaptive-expressions';
import {
Converter,
ConverterFactory,
DialogContext,
Dialog,
DialogTurnResult,
DialogConfiguration,
} from 'botbuilder-dialogs';
type HeadersInput = Record<string, string>;
type HeadersOutput = Record<string, StringExpression>;
/**
* [HeadersInput](xref:botbuilder-dialogs-adaptive.HeadersInput) or [HeadersOutput](xref:botbuilder-dialogs-adaptive.HeadersOutput) to [HttpHeader](xref:botbuilder-dialogs-adaptive.HttpHeader) converter.
*/
class HttpHeadersConverter implements Converter<HeadersInput, HeadersOutput> {
/**
* Converts a [HeadersInput](xref:botbuilder-dialogs-adaptive.HeadersInput) or [HeadersOutput](xref:botbuilder-dialogs-adaptive.HeadersOutput) to [HttpHeader](xref:botbuilder-dialogs-adaptive.HttpHeader).
* @param value [HeadersInput](xref:botbuilder-dialogs-adaptive.HeadersInput) or [HeadersOutput](xref:botbuilder-dialogs-adaptive.HeadersOutput) to convert.
* @returns The [HttpHeader](xref:botbuilder-dialogs-adaptive.HttpHeader).
*/
public convert(value: HeadersInput | HeadersOutput): HeadersOutput {
return Object.entries(value).reduce((headers, [key, value]) => {
return {
...headers,
[key]: value instanceof StringExpression ? value : new StringExpression(value),
};
}, {});
}
}
export enum ResponsesTypes {
/**
* No response expected
*/
None,
/**
* Plain JSON response
*/
Json,
/**
* JSON Activity object to send to the user
*/
Activity,
/**
* Json Array of activity objects to send to the user
*/
Activities,
/**
* Binary data parsing from http response content
*/
Binary,
}
export enum HttpMethod {
/**
* Http GET
*/
GET = 'GET',
/**
* Http POST
*/
POST = 'POST',
/**
* Http PATCH
*/
PATCH = 'PATCH',
/**
* Http PUT
*/
PUT = 'PUT',
/**
* Http DELETE
*/
DELETE = 'DELETE',
}
/**
* Result data of HTTP operation.
*/
export class Result {
/**
* Initialize a new instance of Result class.
* @param headers Response headers.
*/
public constructor(headers?: Headers) {
if (headers) {
headers.forEach((value: string, name: string): void => {
this.headers[name] = value;
});
}
}
/**
* The status code from the response to HTTP operation.
*/
public statusCode?: number;
/**
* The reason phrase from the response to HTTP operation.
*/
public reasonPhrase?: string;
/**
* The headers from the response to HTTP operation.
*/
public headers?: { [key: string]: string } = {};
/**
* The content body from the response to HTTP operation.
*/
public content?: any;
}
export interface HttpRequestConfiguration extends DialogConfiguration {
method?: HttpMethod;
contentType?: StringProperty;
url?: StringProperty;
headers?: HeadersInput | HeadersOutput;
body?: UnknownProperty;
responseType?: EnumProperty<ResponsesTypes>;
resultProperty: StringProperty;
disabled?: BoolProperty;
}
/**
* Action for performing an `HttpRequest`.
*/
export class HttpRequest<O extends object = {}> extends Dialog<O> implements HttpRequestConfiguration {
public static $kind = 'Microsoft.HttpRequest';
public constructor();
/**
* Initializes a new instance of the [HttpRequest](xref:botbuilder-dialogs-adaptive.HttpRequest) class.
* @param method The [HttpMethod](xref:botbuilder-dialogs-adaptive.HttpMethod), for example POST, GET, DELETE or PUT.
* @param url URL for the request.
* @param headers The headers of the request.
* @param body The raw body of the request.
*/
public constructor(method: HttpMethod, url: string, headers: { [key: string]: string }, body: any);
/**
* Initializes a new instance of the [HttpRequest](xref:botbuilder-dialogs-adaptive.HttpRequest) class.
* @param method Optional. The [HttpMethod](xref:botbuilder-dialogs-adaptive.HttpMethod), for example POST, GET, DELETE or PUT.
* @param url Optional. URL for the request.
* @param headers Optional. The headers of the request.
* @param body Optional. The raw body of the request.
*/
public constructor(method?: HttpMethod, url?: string, headers?: { [key: string]: string }, body?: any) {
super();
this.method = method || HttpMethod.GET;
this.url = new StringExpression(url);
if (headers) {
this.headers = {};
for (const key in headers) {
this.headers[key] = new StringExpression(headers[key]);
}
}
this.body = new ValueExpression(body);
}
/**
* Http Method
*/
public method?: HttpMethod = HttpMethod.GET;
/**
* Content type of request body
*/
public contentType?: StringExpression = new StringExpression('application/json');
/**
* Http Url
*/
public url?: StringExpression;
/**
* Http Headers
*/
public headers?: { [key: string]: StringExpression } = {};
/**
* Http Body
*/
public body?: ValueExpression;
/**
* The response type of the response
*/
public responseType?: EnumExpression<ResponsesTypes> = new EnumExpression<ResponsesTypes>(ResponsesTypes.Json);
/**
* Gets or sets the property expression to store the HTTP response in.
*/
public resultProperty: StringExpression = new StringExpression('turn.results');
/**
* An optional expression which if is true will disable this action.
*/
public disabled?: BoolExpression;
public getConverter(property: keyof HttpRequestConfiguration): Converter | ConverterFactory {
switch (property) {
case 'contentType':
return new StringExpressionConverter();
case 'url':
return new StringExpressionConverter();
case 'headers':
return new HttpHeadersConverter();
case 'body':
return new ValueExpressionConverter();
case 'responseType':
return new EnumExpressionConverter<ResponsesTypes>(ResponsesTypes);
case 'resultProperty':
return new StringExpressionConverter();
case 'disabled':
return new BoolExpressionConverter();
default:
return super.getConverter(property);
}
}
/**
* Starts a new [Dialog](xref:botbuilder-dialogs.Dialog) and pushes it onto the dialog stack.
* @param dc The [DialogContext](xref:botbuilder-dialogs.DialogContext) for the current turn of conversation.
* @param options Optional. Initial information to pass to the dialog.
* @returns A `Promise` representing the asynchronous operation.
*/
public async beginDialog(dc: DialogContext, options?: O): Promise<DialogTurnResult> {
if (this.disabled && this.disabled.getValue(dc.state)) {
return await dc.endDialog();
}
const instanceUrl = this.url.getValue(dc.state);
const instanceMethod = this.method.toString();
const instanceHeaders = {};
for (let key in this.headers) {
if (key.toLowerCase() === 'content-type') {
key = 'Content-Type';
}
instanceHeaders[key] = this.headers[key].getValue(dc.state);
}
const contentType = this.contentType.getValue(dc.state) || 'application/json';
instanceHeaders['Content-Type'] = contentType;
let instanceBody: string;
const body = evaluateExpression(dc.state, this.body);
if (body) {
if (typeof body === 'string') {
instanceBody = body;
} else {
instanceBody = JSON.stringify(Object.assign({}, body));
}
}
const traceInfo = {
request: {
method: instanceMethod,
url: instanceUrl,
headers: instanceHeaders,
content: instanceBody,
},
response: undefined,
};
let response: Response;
switch (this.method) {
case HttpMethod.DELETE:
case HttpMethod.GET:
response = await fetch(instanceUrl, {
method: instanceMethod,
headers: instanceHeaders,
});
break;
case HttpMethod.PUT:
case HttpMethod.PATCH:
case HttpMethod.POST:
response = await fetch(instanceUrl, {
method: instanceMethod,
headers: instanceHeaders,
body: instanceBody,
});
break;
}
const result = new Result(response.headers);
result.statusCode = response.status;
result.reasonPhrase = response.statusText;
switch (this.responseType.getValue(dc.state)) {
case ResponsesTypes.Activity:
result.content = await response.json();
dc.context.sendActivity(result.content as Activity);
break;
case ResponsesTypes.Activities:
result.content = await response.json();
dc.context.sendActivities(result.content as Activity[]);
break;
case ResponsesTypes.Json:
const content = await response.text();
try {
result.content = JSON.parse(content);
} catch {
result.content = content;
}
break;
case ResponsesTypes.Binary:
const buffer = await response.arrayBuffer();
result.content = new Uint8Array(buffer);
break;
case ResponsesTypes.None:
default:
break;
}
traceInfo.response = result;
// Write trace activity for http request and response values.
await dc.context.sendTraceActivity('HttpRequest', traceInfo, 'Microsoft.HttpRequest', this.id);
if (this.resultProperty) {
dc.state.setValue(this.resultProperty.getValue(dc.state), result);
}
return await dc.endDialog(result);
}
/**
* @protected
* Builds the compute Id for the [Dialog](xref:botbuilder-dialogs.Dialog).
* @returns A `string` representing the compute Id.
*/
protected onComputeId(): string {
return `HttpRequest[${this.method} ${this.url}]`;
}
} | the_stack |
import { module, test } from 'qunit';
import {
click,
currentURL,
fillIn,
settled,
visit,
waitFor,
waitUntil,
} from '@ember/test-helpers';
import { setupApplicationTest } from 'ember-qunit';
import Layer1TestWeb3Strategy from '@cardstack/web-client/utils/web3-strategies/test-layer1';
import Layer2TestWeb3Strategy from '@cardstack/web-client/utils/web3-strategies/test-layer2';
import a11yAudit from 'ember-a11y-testing/test-support/audit';
import BN from 'bn.js';
import percySnapshot from '@percy/ember';
import { currentNetworkDisplayInfo as c } from '@cardstack/web-client/utils/web3-strategies/network-display-info';
import { capitalize } from '@ember/string';
import {
createDepotSafe,
createSafeToken,
} from '@cardstack/web-client/utils/test-factories';
function postableSel(milestoneIndex: number, postableIndex: number): string {
return `[data-test-milestone="${milestoneIndex}"][data-test-postable="${postableIndex}"]`;
}
function epiloguePostableSel(postableIndex: number): string {
return `[data-test-epilogue][data-test-postable="${postableIndex}"]`;
}
function milestoneCompletedSel(milestoneIndex: number): string {
return `[data-test-milestone-completed][data-test-milestone="${milestoneIndex}"]`;
}
function cancelationPostableSel(postableIndex: number) {
return `[data-test-cancelation][data-test-postable="${postableIndex}"]`;
}
module('Acceptance | deposit', function (hooks) {
setupApplicationTest(hooks);
test('Initiating workflow without wallet connections', async function (assert) {
await visit('/card-pay/deposit-withdrawal');
assert.equal(currentURL(), '/card-pay/deposit-withdrawal');
await click('[data-test-workflow-button="deposit"]');
let post = postableSel(0, 0);
assert.dom(`${post} img`).exists();
assert.dom(post).containsText('Hi there, we’re happy to see you');
assert
.dom(postableSel(0, 1))
.containsText('you need to connect two wallets');
assert
.dom(postableSel(0, 2))
.containsText(
`The funds you wish to deposit must be available in your ${c.layer1.conversationalName} wallet`
);
post = postableSel(0, 3);
await click(`${post} [data-test-wallet-option="metamask"]`);
await click(
`${post} [data-test-mainnnet-connection-action-container] [data-test-boxel-button]`
);
assert.dom(post).containsText(`Connect your ${c.layer1.fullName} wallet`);
await a11yAudit();
assert.ok(true, 'no a11y errors found - layer 1 connect card');
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
layer1Service.test__simulateBalances({
defaultToken: new BN('2141100000000000000'),
dai: new BN('250500000000000000000'),
card: new BN('10000000000000000000000'),
});
await waitFor(`${post} [data-test-balance="ETH"]`);
assert.dom(`${post} [data-test-balance="ETH"]`).containsText('2.14');
assert.dom(`${post} [data-test-balance="DAI"]`).containsText('250.50');
assert.dom(`${post} [data-test-balance="CARD"]`).containsText('10,000.00');
await settled();
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Now it’s time to connect your ${c.layer2.fullName} wallet via your Card Wallet mobile app`
);
assert
.dom(postableSel(1, 1))
.containsText(
'Once you have installed the app, open the app and add an existing wallet/account'
);
assert
.dom(`${postableSel(1, 2)} [data-test-wallet-connect-loading-qr-code]`)
.exists();
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
layer2Service.test__simulateWalletConnectUri();
await waitFor('[data-test-wallet-connect-qr-code]');
assert.dom('[data-test-wallet-connect-qr-code]').exists();
// Simulate the user scanning the QR code and connecting their mobile wallet
let layer2AccountAddress = '0x182619c6Ea074C053eF3f1e1eF81Ec8De6Eb6E44';
layer2Service.test__simulateRemoteAccountSafes(layer2AccountAddress, [
createDepotSafe({
address: '0xB236ca8DbAB0644ffCD32518eBF4924ba8666666',
owners: [layer2AccountAddress],
tokens: [createSafeToken('DAI.CPXD', '0')],
}),
]);
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await waitFor(`${postableSel(1, 2)} [data-test-balance-container]`);
await waitUntil(() => {
return (
document.querySelector('[data-test-balance="DAI.CPXD"]') === null &&
document.querySelector('[data-test-balance-container-loading]') === null
);
});
assert
.dom(`${postableSel(1, 2)} [data-test-balance="XDAI"]`)
.doesNotExist();
assert
.dom(`${postableSel(1, 2)} [data-test-balance-container]`)
.containsText('None');
assert
.dom(
'[data-test-card-pay-layer-2-connect] [data-test-card-pay-connect-button]'
)
.hasText('0x1826...6E44');
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
post = postableSel(2, 1);
// transaction-setup card
await waitFor(`${post} [data-test-balance="DAI"]`);
await click(
`${post} [data-test-deposit-transaction-setup] [data-test-boxel-button]`
);
assert
.dom(
`${post} [data-test-deposit-transaction-setup-from-balance="DAI"] [data-test-balance-display-amount]`
)
.hasText('250.50 DAI');
// transaction-amount card
assert
.dom(postableSel(2, 2))
.containsText('How many tokens would you like to deposit?');
post = postableSel(2, 3);
assert.dom(`${post} [data-test-source-token="DAI"]`).exists();
assert.dom(`${post} [data-test-unlock-button]`).isDisabled();
assert.dom(`${post} [data-test-deposit-button]`).isDisabled();
await fillIn('[data-test-token-amount-input]', '250');
assert
.dom(`${post} [data-test-unlock-button]`)
.isEnabled('Unlock button is enabled once amount has been entered');
// make sure that our property to test if balances are refreshed is not true yet
layer1Service.balancesRefreshed = false;
await click(`${post} [data-test-unlock-button]`);
// // MetaMask pops up and user approves the transaction. There is a spinner
// // on the "Unlock" button until the Ethereum transaction is mined.
// // When the mining is complete, the "Deposit" button becomes clickable.
assert
.dom(`${post} [data-test-unlock-button]`)
.hasClass('boxel-button--loading');
assert
.dom('[data-test-token-amount-input]')
.doesNotExist('Input field is no longer available when unlocking');
assert.dom('[data-test-deposit-amount-entered]').containsText('250.00 DAI');
assert.dom(`${post} [data-test-unlock-etherscan-button]`).doesNotExist();
layer1Service.test__simulateUnlockTxnHash();
await settled();
assert.dom(`${post} [data-test-unlock-etherscan-button]`).exists();
layer1Service.test__simulateUnlock();
await settled();
assert
.dom(`${post} [data-test-unlock-button]`)
.doesNotExist('Unlock button is no longer visible after unlocking.');
assert
.dom(`${post} [data-test-unlock-success-message]`)
.exists('There should be a success message after unlocking');
assert
.dom('[data-test-token-amount-input]')
.doesNotExist('Input field is no longer available after unlocking');
assert
.dom(`${post} [data-test-deposit-button]`)
.isEnabled('Deposit is enabled once unlocked');
await click(`${post} [data-test-deposit-button]`);
assert
.dom(`${post} [data-test-deposit-button]`)
.hasClass('boxel-button--loading');
assert.dom(`${post} [data-test-deposit-etherscan-button]`).doesNotExist();
layer1Service.test__simulateDepositTxnHash();
await settled();
assert.dom(`${post} [data-test-deposit-etherscan-button]`).exists();
layer1Service.test__simulateDeposit();
await settled();
assert.ok(
layer1Service.balancesRefreshed,
'Balances should be refreshed after relaying tokens'
);
assert
.dom(`${post} [data-test-deposit-button]`)
.doesNotExist('Deposit button is no longer visible after depositing.');
assert
.dom(`${post} [data-test-deposit-success-message]`)
.exists('There should be a success message after depositing');
assert
.dom(milestoneCompletedSel(2))
.containsText('Deposited into reserve pool');
assert
.dom(postableSel(3, 0))
.containsText(
`your token will be bridged to the ${c.layer2.shortName} blockchain`
);
post = postableSel(3, 1);
// transaction-status card
// we assert that it exists, and will show a blockscout button after completion of bridging (success state)
// the rest is handled in an integration test
assert.dom(`${post} [data-test-deposit-transaction-status-card]`).exists();
const blockCount = layer1Service.bridgeConfirmationBlockCount;
layer1Service.test__simulateBlockConfirmation();
await waitFor(`[data-test-token-bridge-step-block-count="${blockCount}"]`);
layer1Service.test__simulateBlockConfirmation();
await waitFor(
`[data-test-token-bridge-step-block-count="${blockCount + 1}"]`
);
layer1Service.test__simulateBlockConfirmation();
await waitFor(`[data-test-token-bridge-step="1"][data-test-completed]`);
assert.dom(`${post} [data-test-bridge-explorer-button]`).exists();
layer2Service.test__simulateBridgedToLayer2(
'0xabc123abc123abc123e5984131f6b4cc3ac8af14'
);
await waitFor(`${post} [data-test-blockscout-button]`);
assert.ok(
layer2Service.balancesRefreshed,
'Balances for layer 2 should be refreshed after bridging'
);
assert.dom(`${post} [data-test-blockscout-button]`).exists();
await settled();
assert
.dom(milestoneCompletedSel(3))
.containsText(`Tokens received on ${c.layer2.shortName}`);
assert
.dom(epiloguePostableSel(0))
.containsText('Thank you for your contribution!');
assert
.dom(epiloguePostableSel(1))
.containsText(`Minted from CARD Protocol on ${c.layer2.fullName}`);
assert.dom(epiloguePostableSel(1)).containsText('250.00 DAI.CPXD');
await waitFor(epiloguePostableSel(2));
assert
.dom(epiloguePostableSel(2))
.containsText(
`This is the remaining balance in your ${c.layer1.fullName} wallet`
);
layer1Service.test__simulateBalances({
defaultToken: new BN('2141100000000000000'),
dai: new BN('500000000000000000'),
card: new BN('10000000000000000000000'),
});
await waitFor(`${epiloguePostableSel(3)} [data-test-balance="ETH"]`);
assert
.dom(`${epiloguePostableSel(3)} [data-test-balance="ETH"]`)
.containsText('2.14');
assert
.dom(`${epiloguePostableSel(3)} [data-test-balance="DAI"]`)
.containsText('0.50');
assert
.dom(`${epiloguePostableSel(3)} [data-test-balance="CARD"]`)
.containsText('10,000.00');
assert
.dom(
'[data-test-milestone] [data-test-boxel-action-chin] button[data-test-boxel-button]:not([disabled])'
)
.doesNotExist();
assert
.dom(
`${epiloguePostableSel(4)} [data-test-deposit-next-step="dashboard"]`
)
.exists();
await percySnapshot(assert);
await click(
`${epiloguePostableSel(4)} [data-test-deposit-next-step="dashboard"]`
);
assert.dom('[data-test-workflow-thread]').doesNotExist();
});
test('Initiating workflow with layer 1 wallet already connected', async function (assert) {
await visit('/card-pay/deposit-withdrawal');
await click(
'[data-test-card-pay-layer-1-connect] [data-test-card-pay-connect-button]'
);
assert.dom('[data-test-layer-connect-modal="layer1"]').exists();
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
await waitUntil(
() => !document.querySelector('[data-test-layer-connect-modal="layer1"]')
);
assert
.dom(
'[data-test-card-pay-layer-1-connect] [data-test-card-pay-connect-button]'
)
.hasText('0xaCD5...4Fb6');
assert.dom('[data-test-layer-connect-modal="layer1"]').doesNotExist();
await click('[data-test-workflow-button="deposit"]');
let post = postableSel(0, 0);
assert.dom(`${post} img`).exists();
assert.dom(post).containsText('Hi there, we’re happy to see you');
assert
.dom(postableSel(0, 1))
.containsText('you need to connect two wallets');
assert
.dom(postableSel(0, 2))
.containsText(
`The funds you wish to deposit must be available in your ${c.layer1.conversationalName} wallet`
);
assert
.dom(postableSel(0, 3))
.containsText(
`Looks like you’ve already connected your ${c.layer1.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Now it’s time to connect your ${c.layer2.fullName} wallet via your Card Wallet mobile app`
);
assert
.dom(postableSel(1, 1))
.containsText(
'Once you have installed the app, open the app and add an existing wallet/account'
);
assert
.dom(postableSel(1, 2))
.containsText('Loading QR Code for Card Wallet connection');
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
layer2Service.test__simulateWalletConnectUri();
await waitFor('[data-test-wallet-connect-qr-code]');
assert.dom('[data-test-wallet-connect-qr-code]').exists();
// Simulate the user scanning the QR code and connecting their mobile wallet
let layer2AccountAddress = '0x182619c6Ea074C053eF3f1e1eF81Ec8De6Eb6E44';
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await waitUntil(
() => !document.querySelector('[data-test-wallet-connect-qr-code]')
);
assert
.dom(
'[data-test-card-pay-layer-2-connect] [data-test-card-pay-connect-button]'
)
.hasText('0x1826...6E44');
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
});
test('Initiating workflow with layer 2 wallet already connected', async function (assert) {
await visit('/card-pay/deposit-withdrawal');
await click(
'[data-test-card-pay-layer-2-connect] [data-test-card-pay-connect-button]'
);
assert.dom('[data-test-layer-connect-modal="layer2"]').exists();
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
layer2Service.test__simulateWalletConnectUri();
await waitFor('[data-test-wallet-connect-qr-code]');
assert.dom('[data-test-wallet-connect-qr-code]').exists();
// Simulate the user scanning the QR code and connecting their mobile wallet
let layer2AccountAddress = '0x182619c6Ea074C053eF3f1e1eF81Ec8De6Eb6E44';
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await waitUntil(
() => !document.querySelector('[data-test-layer-connect-modal="layer2"]')
);
assert
.dom(
'[data-test-card-pay-layer-2-connect] [data-test-card-pay-connect-button]'
)
.hasText('0x1826...6E44');
assert.dom('[data-test-layer-connect-modal="layer2"]').doesNotExist();
await click('[data-test-workflow-button="deposit"]');
await click(`${postableSel(0, 3)} [data-test-wallet-option="metamask"]`);
await click(
`${postableSel(
0,
3
)} [data-test-mainnnet-connection-action-container] [data-test-boxel-button]`
);
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
await settled();
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Looks like you’ve already connected your ${c.layer2.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
});
test('Disconnecting Layer 1 from within the workflow', async function (assert) {
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
layer1Service.test__simulateBalances({
defaultToken: new BN('2141100000000000000'),
dai: new BN('250500000000000000000'),
card: new BN('10000000000000000000000'),
});
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
let layer2AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer2Service.test__simulateRemoteAccountSafes(layer2AccountAddress, [
createDepotSafe({
owners: [layer2AccountAddress],
tokens: [createSafeToken('DAI.CPXD', '142200000000000000')],
}),
]);
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await layer2Service.safes.fetch();
await visit('/card-pay/deposit-withdrawal');
await click('[data-test-workflow-button="deposit"]');
let post = postableSel(0, 0);
assert.dom(post).containsText('Hi there, we’re happy to see you');
assert
.dom(postableSel(0, 3))
.containsText(
`Looks like you’ve already connected your ${c.layer1.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Looks like you’ve already connected your ${c.layer2.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
assert
.dom(`${postableSel(0, 4)} [data-test-mainnet-disconnect-button]`)
.containsText('Disconnect Wallet');
await click(`${postableSel(0, 4)} [data-test-mainnet-disconnect-button]`);
// test that all cta buttons are disabled
assert
.dom(
'[data-test-milestone] [data-test-boxel-action-chin] button[data-test-boxel-button]:not([disabled])'
)
.doesNotExist();
await waitFor('[data-test-cancelation][data-test-postable]');
assert
.dom(cancelationPostableSel(0))
.containsText(
'It looks like your wallet(s) got disconnected. If you still want to deposit funds, please start again by connecting your wallet(s).'
);
assert.dom(cancelationPostableSel(1)).containsText('Workflow canceled');
assert
.dom('[data-test-workflow-default-cancelation-restart="deposit"]')
.exists();
});
test('Disconnecting Layer 1 from outside the current tab (mobile wallet / other tabs)', async function (assert) {
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
layer1Service.test__simulateBalances({
defaultToken: new BN('2141100000000000000'),
dai: new BN('250500000000000000000'),
card: new BN('10000000000000000000000'),
});
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
let layer2AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer2Service.test__simulateRemoteAccountSafes(layer2AccountAddress, [
createDepotSafe({
owners: [layer2AccountAddress],
tokens: [createSafeToken('DAI.CPXD', '142200000000000000')],
}),
]);
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await layer2Service.safes.fetch();
await visit('/card-pay/deposit-withdrawal');
await click('[data-test-workflow-button="deposit"]');
let post = postableSel(0, 0);
assert.dom(post).containsText('Hi there, we’re happy to see you');
assert
.dom(postableSel(0, 3))
.containsText(
`Looks like you’ve already connected your ${c.layer1.fullName} wallet`
);
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Looks like you’ve already connected your ${c.layer2.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
layer1Service.test__simulateDisconnectFromWallet();
await waitFor('[data-test-workflow-default-cancelation-cta="deposit"]');
// test that all cta buttons are disabled
assert
.dom(
'[data-test-milestone] [data-test-boxel-action-chin] button[data-test-boxel-button]:not([disabled])'
)
.doesNotExist();
assert
.dom(cancelationPostableSel(0))
.containsText(
'It looks like your wallet(s) got disconnected. If you still want to deposit funds, please start again by connecting your wallet(s).'
);
assert.dom(cancelationPostableSel(1)).containsText('Workflow canceled');
assert
.dom('[data-test-workflow-default-cancelation-restart="deposit"]')
.exists();
});
test('Disconnecting Layer 2 from within the workflow', async function (assert) {
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
layer1Service.test__simulateBalances({
defaultToken: new BN('2141100000000000000'),
dai: new BN('250500000000000000000'),
card: new BN('10000000000000000000000'),
});
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
let layer2AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer2Service.test__simulateRemoteAccountSafes(layer2AccountAddress, [
createDepotSafe({
owners: [layer2AccountAddress],
tokens: [createSafeToken('DAI.CPXD', '142200000000000000')],
}),
]);
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await layer2Service.safes.fetch();
await visit('/card-pay/deposit-withdrawal');
await click('[data-test-workflow-button="deposit"]');
let post = postableSel(0, 0);
assert.dom(post).containsText('Hi there, we’re happy to see you');
assert
.dom(postableSel(0, 3))
.containsText(
`Looks like you’ve already connected your ${c.layer1.fullName} wallet`
);
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Looks like you’ve already connected your ${c.layer2.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
assert.dom('[data-test-layer-2-wallet-card]').containsText('0.14');
assert
.dom(
'[data-test-layer-2-wallet-card] [data-test-layer-2-wallet-disconnect-button]'
)
.containsText('Disconnect Wallet');
await click(
`[data-test-layer-2-wallet-card] [data-test-layer-2-wallet-disconnect-button]`
);
// test that all cta buttons are disabled
assert
.dom(
'[data-test-milestone] [data-test-boxel-action-chin] button[data-test-boxel-button]:not([disabled])'
)
.doesNotExist();
assert
.dom(cancelationPostableSel(0))
.containsText(
'It looks like your wallet(s) got disconnected. If you still want to deposit funds, please start again by connecting your wallet(s).'
);
assert.dom(cancelationPostableSel(1)).containsText('Workflow canceled');
assert
.dom('[data-test-workflow-default-cancelation-restart="deposit"]')
.exists();
});
test('Disconnecting Layer 2 from outside the current tab (mobile wallet / other tabs)', async function (assert) {
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
layer1Service.test__simulateBalances({
defaultToken: new BN('2141100000000000000'),
dai: new BN('250500000000000000000'),
card: new BN('10000000000000000000000'),
});
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
let layer2AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer2Service.test__simulateRemoteAccountSafes(layer2AccountAddress, [
createDepotSafe({
owners: [layer2AccountAddress],
tokens: [createSafeToken('DAI.CPXD', '142200000000000000')],
}),
]);
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await visit('/card-pay/deposit-withdrawal');
await click('[data-test-workflow-button="deposit"]');
let post = postableSel(0, 0);
assert.dom(post).containsText('Hi there, we’re happy to see you');
assert
.dom(postableSel(0, 3))
.containsText(
`Looks like you’ve already connected your ${c.layer1.fullName} wallet`
);
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Looks like you’ve already connected your ${c.layer2.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
assert.dom('[data-test-layer-2-wallet-card]').containsText('0.14');
assert
.dom(
'[data-test-layer-2-wallet-card] [data-test-layer-2-wallet-disconnect-button]'
)
.containsText('Disconnect Wallet');
layer2Service.test__simulateDisconnectFromWallet();
await waitFor('[data-test-workflow-default-cancelation-cta="deposit"]');
// test that all cta buttons are disabled
assert
.dom(
'[data-test-milestone] [data-test-boxel-action-chin] button[data-test-boxel-button]:not([disabled])'
)
.doesNotExist();
assert
.dom(cancelationPostableSel(0))
.containsText(
'It looks like your wallet(s) got disconnected. If you still want to deposit funds, please start again by connecting your wallet(s).'
);
assert.dom(cancelationPostableSel(1)).containsText('Workflow canceled');
assert
.dom('[data-test-workflow-default-cancelation-restart="deposit"]')
.exists();
});
test('Changing layer 1 account should cancel the workflow', async function (assert) {
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
let secondLayer1AccountAddress =
'0x5416C61193C3393B46C2774ac4717C252031c0bE';
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
layer1Service.test__simulateBalances({
defaultToken: new BN('2141100000000000000'),
dai: new BN('250500000000000000000'),
card: new BN('10000000000000000000000'),
});
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
let layer2AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer2Service.test__simulateRemoteAccountSafes(layer2AccountAddress, [
createDepotSafe({
owners: [layer2AccountAddress],
tokens: [createSafeToken('DAI.CPXD', '142200000000000000')],
}),
]);
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await visit('/card-pay/deposit-withdrawal');
await click('[data-test-workflow-button="deposit"]');
let post = postableSel(0, 0);
assert.dom(post).containsText('Hi there, we’re happy to see you');
assert
.dom(postableSel(0, 3))
.containsText(
`Looks like you’ve already connected your ${c.layer1.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Looks like you’ve already connected your ${c.layer2.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
assert
.dom(`${postableSel(0, 4)} [data-test-mainnet-disconnect-button]`)
.containsText('Disconnect Wallet');
layer1Service.test__simulateAccountsChanged(
[secondLayer1AccountAddress],
'metamask'
);
await settled();
// test that all cta buttons are disabled
assert
.dom(
'[data-test-milestone] [data-test-boxel-action-chin] button[data-test-boxel-button]:not([disabled])'
)
.doesNotExist();
await waitFor('[data-test-cancelation][data-test-postable]');
assert
.dom(cancelationPostableSel(0))
.containsText(
'It looks like you changed accounts in the middle of this workflow. If you still want to deposit funds, please restart the workflow.'
);
assert.dom(cancelationPostableSel(1)).containsText('Workflow canceled');
assert
.dom('[data-test-workflow-default-cancelation-restart="deposit"]')
.exists();
});
test('Changing layer 2 account should cancel the workflow', async function (assert) {
let layer1Service = this.owner.lookup('service:layer1-network')
.strategy as Layer1TestWeb3Strategy;
let layer1AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
layer1Service.test__simulateAccountsChanged(
[layer1AccountAddress],
'metamask'
);
layer1Service.test__simulateBalances({
defaultToken: new BN('2141100000000000000'),
dai: new BN('250500000000000000000'),
card: new BN('10000000000000000000000'),
});
let layer2Service = this.owner.lookup('service:layer2-network')
.strategy as Layer2TestWeb3Strategy;
let layer2AccountAddress = '0xaCD5f5534B756b856ae3B2CAcF54B3321dd6654Fb6';
let secondLayer2AccountAddress =
'0x0x89205A3A3b2A69De6Dbf7f01ED13B2108B2c43e7';
layer2Service.test__simulateRemoteAccountSafes(layer2AccountAddress, [
createDepotSafe({
owners: [layer2AccountAddress],
tokens: [createSafeToken('DAI.CPXD', '142200000000000000')],
}),
]);
await layer2Service.test__simulateAccountsChanged([layer2AccountAddress]);
await visit('/card-pay/deposit-withdrawal');
await click('[data-test-workflow-button="deposit"]');
let post = postableSel(0, 0);
assert.dom(post).containsText('Hi there, we’re happy to see you');
assert
.dom(postableSel(0, 3))
.containsText(
`Looks like you’ve already connected your ${c.layer1.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(0))
.containsText(
`${capitalize(c.layer1.conversationalName)} wallet connected`
);
assert
.dom(postableSel(1, 0))
.containsText(
`Looks like you’ve already connected your ${c.layer2.fullName} wallet`
);
await settled();
assert
.dom(milestoneCompletedSel(1))
.containsText(`${c.layer2.fullName} wallet connected`);
assert
.dom(postableSel(2, 0))
.containsText('choose the asset you would like to deposit');
assert
.dom(`${postableSel(0, 4)} [data-test-mainnet-disconnect-button]`)
.containsText('Disconnect Wallet');
await layer2Service.test__simulateAccountsChanged([
secondLayer2AccountAddress,
]);
await settled();
// test that all cta buttons are disabled
assert
.dom(
'[data-test-milestone] [data-test-boxel-action-chin] button[data-test-boxel-button]:not([disabled])'
)
.doesNotExist();
await waitFor('[data-test-cancelation][data-test-postable]');
assert
.dom(cancelationPostableSel(0))
.containsText(
'It looks like you changed accounts in the middle of this workflow. If you still want to deposit funds, please restart the workflow.'
);
assert.dom(cancelationPostableSel(1)).containsText('Workflow canceled');
assert
.dom('[data-test-workflow-default-cancelation-restart="deposit"]')
.exists();
});
}); | the_stack |
import * as React from 'react';
import { styled } from 'gatsby-theme-stitches/src/config';
import { rem } from 'polished';
import type { PropOf } from '@cometjs/react-utils';
import messages from './formField/messages';
type FormFieldVariants = (
| {
type: 'text',
}
| {
type: 'longtext',
}
| {
type: 'tel',
}
| {
type: 'email',
}
| {
type: 'select',
options: {
value: string,
label: string,
}[],
defaultValue?: string,
}
| {
type: 'checkbox',
defaultChecked?: boolean,
}
| {
type: 'radio',
options: {
value: string,
label: string,
}[],
defaultValue?: string,
}
| {
type: 'file',
accepts: string[],
}
| {
type: 'terms',
terms: string,
}
);
type FormFieldProps = {
variants: FormFieldVariants,
name: string,
label: string,
className?: string,
placeholder?: string,
description?: string,
required?: boolean,
};
const Container = styled('div', {
display: 'flex',
flexDirection: 'column',
});
const Label = styled('label', {
display: 'inline-flex',
alignItems: 'center',
color: '$gray700',
typography: '$body2',
marginBottom: rem(8),
variants: {
required: {
true: {
'&::after': {
content: '""',
display: 'inline-block',
marginLeft: rem(4),
width: rem(6),
height: rem(6),
background: '$carrot500',
borderRadius: '50%',
},
},
},
},
});
const Input = styled('input', {
display: 'inline-flex',
alignItems: 'center',
height: rem(52),
border: '1px solid $gray400',
borderRadius: rem(8),
typography: '$body2',
paddingX: rem(20),
'&:focus': {
border: '1px solid $carrot500',
},
'::placeholder': {
color: '$gray500',
},
});
const TextInput = styled(Input, {
});
const LongTextInput = styled(Input, {
minHeight: rem(104),
});
const FileInput = styled(Input, {
display: 'flex',
color: '$gray500',
cursor: 'pointer',
transition: 'box-shadow .25s ease',
'input:focus + label > &': {
border: '1px solid $carrot500',
// Note: $carrot500 을 써야하는데 브라우저 버그 때문에 css variable 적용이 안됨
boxShadow: '0 0 0 0.05em #fff, 0 0 0.15em 0.1em #ff7e36',
},
});
const TermsInput = styled(Input, {
display: 'block',
height: rem(200),
paddingY: rem(16),
overflowY: 'auto',
color: '$gray700',
whiteSpace: 'pre-line',
'> p, ul': {
marginBottom: rem(16),
},
'> ul': {
paddingLeft: rem(16),
},
});
const Select = styled(Input, {
boxSizing: 'border-box',
display: 'grid',
gridTemplateAreas: '"select"',
gridArea: 'select',
alignItems: 'center',
appearance: 'none',
backgroundColor: '$white',
'&::-ms-expand': {
display: 'none',
},
'&::after': {
content: '""',
gridArea: 'select',
justifySelf: 'end',
width: '0.8em',
height: '0.5em',
clipPath: 'polygon(100% 0%, 0 0%, 50% 100%)',
background: '$gray500',
},
});
const CheckboxContainer = styled('label', {
display: 'inline-grid',
gridTemplateColumns: 'min-content auto',
gridGap: '0.5em',
});
const Checkbox = styled('span', {
display: 'grid',
gridTemplateAreas: '"checkbox"',
'& > *': {
gridArea: 'checkbox',
},
});
const CheckboxControl = styled('input', {
opacity: 0,
width: '1em',
height: '1em',
});
const CheckboxLabel = styled('span', {
});
const Checkmark = styled('span', {
width: '1.2em',
height: '1.2em',
border: '1px solid $gray500',
borderRadius: '0.3em',
color: '$carrot500',
transition: 'box-shadow .25s ease',
'input:focus + &': {
boxShadow: '0 0 0 0.05em #fff, 0 0 0.15em 0.1em currentColor',
},
});
const CheckmarkSvg = styled('svg', {
transition: 'transform 0.1s ease-in 25ms',
transform: 'scale(0)',
transformOrigin: 'bottom left',
'input:checked + * > &': {
transform: 'scale(1)',
}
});
// fieldset 이면 좋겠으나...
// See https://bugs.chromium.org/p/chromium/issues/detail?id=375693
const RadioGroupContainer = styled('div', {
display: 'grid',
border: 'none',
padding: 0,
});
// legend 이면 좋겠으나...
// See https://bugs.chromium.org/p/chromium/issues/detail?id=375693
const RadioGroupLabel = styled('div', {
display: 'inline-flex',
alignItems: 'center',
color: '$gray700',
typography: '$body2',
marginBottom: rem(8),
variants: {
required: {
true: {
'&::after': {
content: '""',
display: 'inline-block',
marginLeft: rem(4),
width: rem(6),
height: rem(6),
background: '$carrot500',
borderRadius: '50%',
},
},
},
},
});
const RadioButtonContainer = styled('div', {
display: 'grid',
});
const RadioButton = styled('label', {
display: 'flex',
alignItems: 'center',
typography: '$body2',
color: '$gray700',
cursor: 'pointer',
});
const RadioButtonInput = styled('input', {
appearance: 'none',
});
const Radiomark = styled('span', {
display: 'grid',
alignItems: 'center',
justifyContent: 'center',
width: rem(16),
height: rem(16),
borderRadius: '50%',
border: '1px solid $gray500',
marginRight: rem(14),
color: '$carrot500',
transition: 'box-shadow .25s ease',
'input:focus + &': {
boxShadow: '0 0 0 0.05em #fff, 0 0 0.15em 0.1em currentColor',
},
'input:checked + &': {
borderColor: '$carrot500',
},
'input:checked + &::after': {
background: '$carrot500',
},
'&::after': {
content: '',
width: rem(10),
height: rem(10),
borderRadius: '50%',
},
});
const Description = styled('p', {
color: '$gray600',
marginTop: rem(16),
});
const FormField: React.FC<FormFieldProps> = ({
variants,
name,
label,
className,
placeholder,
description,
children,
required = false,
}) => {
const id = React.useId();
const [filename, setFilename] = React.useState(placeholder);
const fileRef = React.useRef<HTMLInputElement>(null);
type ChangeHandler = NonNullable<PropOf<typeof Input, 'onChange'>>;
const handleFileChange: ChangeHandler = () => {
if (fileRef.current?.files?.[0]) {
setFilename(fileRef.current.files[0].name);
}
};
switch (variants.type) {
case 'text':
case 'email':
case 'tel': {
return (
<Container className={className}>
<Label htmlFor={id} required={required}>
{label}
</Label>
<TextInput
id={id}
name={name}
type={variants.type}
placeholder={placeholder}
/>
{description && (
<Description>{description}</Description>
)}
</Container>
);
}
case 'longtext': {
return (
<Container className={className}>
<Label htmlFor={id} required={required}>
{label}
</Label>
<LongTextInput
as="textarea"
id={id}
name={name}
placeholder={placeholder}
rows={5}
/>
{description && (
<Description>{description}</Description>
)}
</Container>
);
}
case 'select': {
return (
<Container className={className}>
<Label htmlFor={id} required={required}>
{label}
</Label>
<Select
as="select"
id={id}
name={name}
defaultValue={variants.defaultValue}
>
{variants.options.map(option => (
<option key={option.value} value={option.value}>
{option.label}
</option>
))}
</Select>
</Container>
);
}
case 'checkbox': {
return (
<CheckboxContainer className={className}>
<Checkbox>
<CheckboxControl type="checkbox" name={name} defaultChecked={variants.defaultChecked} />
<Checkmark>
<CheckmarkSvg viewBox="0 0 24 24" fill="none">
<rect width="24" height="24" rx="4" fill="none"/>
<rect width="24" height="24" rx="4" stroke="currentColor"/>
<path d="M18.4711 7.52876C18.7314 7.78911 18.7314 8.21122 18.4711 8.47157L10.4711 16.4716C10.2107 16.7319 9.78862 16.7319 9.52827 16.4716L5.52827 12.4716C5.26792 12.2112 5.26792 11.7891 5.52827 11.5288C5.78862 11.2684 6.21073 11.2684 6.47108 11.5288L9.99967 15.0574L17.5283 7.52876C17.7886 7.26841 18.2107 7.26841 18.4711 7.52876Z" fill="currentColor" />
</CheckmarkSvg>
</Checkmark>
</Checkbox>
<CheckboxLabel>{label}</CheckboxLabel>
</CheckboxContainer>
);
}
case 'radio': {
return (
<RadioGroupContainer
className={className}
aria-role="radiogroup"
>
{label && (
<RadioGroupLabel key="label" required={required}>
{label}
</RadioGroupLabel>
)}
<RadioButtonContainer
css={{
gridTemplateColumns: `repeat(${variants.options.length}, 1fr)`,
}}
>
{variants.options.map(option => (
<RadioButton key={option.value}>
<RadioButtonInput
type="radio"
name={name}
value={option.value}
defaultChecked={option.value === variants.defaultValue}
/>
<Radiomark />
{option.label}
</RadioButton>
))}
</RadioButtonContainer>
</RadioGroupContainer>
);
}
case 'file': {
return (
<Container className={className}>
<input
ref={fileRef}
id={id}
name={name}
type="file"
accept={variants.accepts.join(', ')}
onChange={handleFileChange}
style={{ opacity: 0, height: 0 }}
/>
<label htmlFor={id}>
<Label as="div" required={required} css={{ display: 'flex' }}>
{label}
</Label>
<FileInput as="div">
{filename || messages.attach_file}
</FileInput>
</label>
{description && (
<Description>{description}</Description>
)}
</Container>
);
}
case 'terms': {
return (
<Container className={className}>
<Label as="div" required={required}>
{label}
</Label>
<TermsInput as="div" dangerouslySetInnerHTML={{ __html: variants.terms }} />
{description && (
<Description>{description}</Description>
)}
</Container>
);
}
}
};
export default FormField; | the_stack |
import {
NamedType,
Argument,
Field,
InlineFragment,
Operation,
Selection,
SelectionSet,
Variable,
Executor,
Client,
TypeConditionError,
} from "../../src";
export const VERSION = "unversioned";
export const SCHEMA_SHA = "12df668";
export enum bookmarks_constraint {
idx_16652_primary = "idx_16652_primary",
}
export enum bookmarks_select_column {
createdUtc = "createdUtc",
id = "id",
name = "name",
ownerUserId = "ownerUserId",
parent = "parent",
value = "value",
}
export enum bookmarks_update_column {
createdUtc = "createdUtc",
id = "id",
name = "name",
ownerUserId = "ownerUserId",
parent = "parent",
value = "value",
}
export enum order_by {
asc = "asc",
asc_nulls_first = "asc_nulls_first",
asc_nulls_last = "asc_nulls_last",
desc = "desc",
desc_nulls_first = "desc_nulls_first",
desc_nulls_last = "desc_nulls_last",
}
export enum playlist_items_constraint {
idx_16694_primary = "idx_16694_primary",
}
export enum playlist_items_select_column {
createdUtc = "createdUtc",
id = "id",
playlistId = "playlistId",
position = "position",
trackId = "trackId",
}
export enum playlist_items_update_column {
createdUtc = "createdUtc",
id = "id",
playlistId = "playlistId",
position = "position",
trackId = "trackId",
}
export enum playlists_constraint {
idx_16687_primary = "idx_16687_primary",
}
export enum playlists_select_column {
createdUtc = "createdUtc",
id = "id",
name = "name",
ownerUserId = "ownerUserId",
}
export enum playlists_update_column {
createdUtc = "createdUtc",
id = "id",
name = "name",
ownerUserId = "ownerUserId",
}
export enum tracks_constraint {
idx_16710_napsterid = "idx_16710_napsterid",
idx_16710_primary = "idx_16710_primary",
}
export enum tracks_select_column {
createdUtc = "createdUtc",
id = "id",
name = "name",
napsterId = "napsterId",
}
export enum tracks_update_column {
createdUtc = "createdUtc",
id = "id",
name = "name",
napsterId = "napsterId",
}
export enum users_constraint {
idx_16717_primary = "idx_16717_primary",
}
export enum users_select_column {
createdUtc = "createdUtc",
email = "email",
id = "id",
}
export enum users_update_column {
createdUtc = "createdUtc",
email = "email",
id = "id",
}
export interface Int_comparison_exp {
readonly _eq?: number;
readonly _gt?: number;
readonly _gte?: number;
readonly _in?: number[];
readonly _is_null?: boolean;
readonly _lt?: number;
readonly _lte?: number;
readonly _neq?: number;
readonly _nin?: number[];
}
export interface String_comparison_exp {
readonly _eq?: string;
readonly _gt?: string;
readonly _gte?: string;
readonly _ilike?: string;
readonly _in?: string[];
readonly _is_null?: boolean;
readonly _like?: string;
readonly _lt?: string;
readonly _lte?: string;
readonly _neq?: string;
readonly _nilike?: string;
readonly _nin?: string[];
readonly _nlike?: string;
readonly _nsimilar?: string;
readonly _similar?: string;
}
export interface bookmarks_aggregate_order_by {
readonly avg?: bookmarks_avg_order_by;
readonly count?: order_by;
readonly max?: bookmarks_max_order_by;
readonly min?: bookmarks_min_order_by;
readonly stddev?: bookmarks_stddev_order_by;
readonly stddev_pop?: bookmarks_stddev_pop_order_by;
readonly stddev_samp?: bookmarks_stddev_samp_order_by;
readonly sum?: bookmarks_sum_order_by;
readonly var_pop?: bookmarks_var_pop_order_by;
readonly var_samp?: bookmarks_var_samp_order_by;
readonly variance?: bookmarks_variance_order_by;
}
export interface bookmarks_arr_rel_insert_input {
readonly data: bookmarks_insert_input;
readonly on_conflict?: bookmarks_on_conflict;
}
export interface bookmarks_avg_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_bool_exp {
readonly _and?: bookmarks_bool_exp[];
readonly _not?: bookmarks_bool_exp;
readonly _or?: bookmarks_bool_exp[];
readonly children?: bookmarks_bool_exp;
readonly createdUtc?: timestamp_comparison_exp;
readonly id?: Int_comparison_exp;
readonly name?: String_comparison_exp;
readonly owner?: users_bool_exp;
readonly ownerUserId?: Int_comparison_exp;
readonly parent?: Int_comparison_exp;
readonly parentBookmark?: bookmarks_bool_exp;
readonly value?: json_comparison_exp;
}
export interface bookmarks_inc_input {
readonly id?: number;
readonly ownerUserId?: number;
readonly parent?: number;
}
export interface bookmarks_insert_input {
readonly children?: bookmarks_arr_rel_insert_input;
readonly createdUtc?: unknown;
readonly id?: number;
readonly name?: string;
readonly owner?: users_obj_rel_insert_input;
readonly ownerUserId?: number;
readonly parent?: number;
readonly parentBookmark?: bookmarks_obj_rel_insert_input;
readonly value?: unknown;
}
export interface bookmarks_max_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_min_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_obj_rel_insert_input {
readonly data: bookmarks_insert_input;
readonly on_conflict?: bookmarks_on_conflict;
}
export interface bookmarks_on_conflict {
readonly constraint: bookmarks_constraint;
readonly update_columns: bookmarks_update_column;
readonly where?: bookmarks_bool_exp;
}
export interface bookmarks_order_by {
readonly children_aggregate?: bookmarks_aggregate_order_by;
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly owner?: users_order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
readonly parentBookmark?: bookmarks_order_by;
readonly value?: order_by;
}
export interface bookmarks_pk_columns_input {
readonly id: number;
}
export interface bookmarks_set_input {
readonly createdUtc?: unknown;
readonly id?: number;
readonly name?: string;
readonly ownerUserId?: number;
readonly parent?: number;
readonly value?: unknown;
}
export interface bookmarks_stddev_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_stddev_pop_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_stddev_samp_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_sum_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_var_pop_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_var_samp_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface bookmarks_variance_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
readonly parent?: order_by;
}
export interface json_comparison_exp {
readonly _eq?: unknown;
readonly _gt?: unknown;
readonly _gte?: unknown;
readonly _in?: unknown[];
readonly _is_null?: boolean;
readonly _lt?: unknown;
readonly _lte?: unknown;
readonly _neq?: unknown;
readonly _nin?: unknown[];
}
export interface playlist_items_aggregate_order_by {
readonly avg?: playlist_items_avg_order_by;
readonly count?: order_by;
readonly max?: playlist_items_max_order_by;
readonly min?: playlist_items_min_order_by;
readonly stddev?: playlist_items_stddev_order_by;
readonly stddev_pop?: playlist_items_stddev_pop_order_by;
readonly stddev_samp?: playlist_items_stddev_samp_order_by;
readonly sum?: playlist_items_sum_order_by;
readonly var_pop?: playlist_items_var_pop_order_by;
readonly var_samp?: playlist_items_var_samp_order_by;
readonly variance?: playlist_items_variance_order_by;
}
export interface playlist_items_arr_rel_insert_input {
readonly data: playlist_items_insert_input;
readonly on_conflict?: playlist_items_on_conflict;
}
export interface playlist_items_avg_order_by {
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_bool_exp {
readonly _and?: playlist_items_bool_exp[];
readonly _not?: playlist_items_bool_exp;
readonly _or?: playlist_items_bool_exp[];
readonly createdUtc?: timestamp_comparison_exp;
readonly id?: Int_comparison_exp;
readonly playlistId?: Int_comparison_exp;
readonly position?: Int_comparison_exp;
readonly trackId?: Int_comparison_exp;
}
export interface playlist_items_inc_input {
readonly id?: number;
readonly playlistId?: number;
readonly position?: number;
readonly trackId?: number;
}
export interface playlist_items_insert_input {
readonly createdUtc?: unknown;
readonly id?: number;
readonly playlistId?: number;
readonly position?: number;
readonly trackId?: number;
}
export interface playlist_items_max_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_min_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_obj_rel_insert_input {
readonly data: playlist_items_insert_input;
readonly on_conflict?: playlist_items_on_conflict;
}
export interface playlist_items_on_conflict {
readonly constraint: playlist_items_constraint;
readonly update_columns: playlist_items_update_column;
readonly where?: playlist_items_bool_exp;
}
export interface playlist_items_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_pk_columns_input {
readonly id: number;
}
export interface playlist_items_set_input {
readonly createdUtc?: unknown;
readonly id?: number;
readonly playlistId?: number;
readonly position?: number;
readonly trackId?: number;
}
export interface playlist_items_stddev_order_by {
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_stddev_pop_order_by {
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_stddev_samp_order_by {
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_sum_order_by {
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_var_pop_order_by {
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_var_samp_order_by {
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlist_items_variance_order_by {
readonly id?: order_by;
readonly playlistId?: order_by;
readonly position?: order_by;
readonly trackId?: order_by;
}
export interface playlists_aggregate_order_by {
readonly avg?: playlists_avg_order_by;
readonly count?: order_by;
readonly max?: playlists_max_order_by;
readonly min?: playlists_min_order_by;
readonly stddev?: playlists_stddev_order_by;
readonly stddev_pop?: playlists_stddev_pop_order_by;
readonly stddev_samp?: playlists_stddev_samp_order_by;
readonly sum?: playlists_sum_order_by;
readonly var_pop?: playlists_var_pop_order_by;
readonly var_samp?: playlists_var_samp_order_by;
readonly variance?: playlists_variance_order_by;
}
export interface playlists_arr_rel_insert_input {
readonly data: playlists_insert_input;
readonly on_conflict?: playlists_on_conflict;
}
export interface playlists_avg_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_bool_exp {
readonly _and?: playlists_bool_exp[];
readonly _not?: playlists_bool_exp;
readonly _or?: playlists_bool_exp[];
readonly createdUtc?: timestamptz_comparison_exp;
readonly id?: Int_comparison_exp;
readonly name?: String_comparison_exp;
readonly ownerUserId?: Int_comparison_exp;
}
export interface playlists_inc_input {
readonly id?: number;
readonly ownerUserId?: number;
}
export interface playlists_insert_input {
readonly createdUtc?: unknown;
readonly id?: number;
readonly name?: string;
readonly ownerUserId?: number;
}
export interface playlists_max_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_min_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_obj_rel_insert_input {
readonly data: playlists_insert_input;
readonly on_conflict?: playlists_on_conflict;
}
export interface playlists_on_conflict {
readonly constraint: playlists_constraint;
readonly update_columns: playlists_update_column;
readonly where?: playlists_bool_exp;
}
export interface playlists_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_pk_columns_input {
readonly id: number;
}
export interface playlists_set_input {
readonly createdUtc?: unknown;
readonly id?: number;
readonly name?: string;
readonly ownerUserId?: number;
}
export interface playlists_stddev_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_stddev_pop_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_stddev_samp_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_sum_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_var_pop_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_var_samp_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
}
export interface playlists_variance_order_by {
readonly id?: order_by;
readonly ownerUserId?: order_by;
}
export interface timestamp_comparison_exp {
readonly _eq?: unknown;
readonly _gt?: unknown;
readonly _gte?: unknown;
readonly _in?: unknown[];
readonly _is_null?: boolean;
readonly _lt?: unknown;
readonly _lte?: unknown;
readonly _neq?: unknown;
readonly _nin?: unknown[];
}
export interface timestamptz_comparison_exp {
readonly _eq?: unknown;
readonly _gt?: unknown;
readonly _gte?: unknown;
readonly _in?: unknown[];
readonly _is_null?: boolean;
readonly _lt?: unknown;
readonly _lte?: unknown;
readonly _neq?: unknown;
readonly _nin?: unknown[];
}
export interface tracks_aggregate_order_by {
readonly avg?: tracks_avg_order_by;
readonly count?: order_by;
readonly max?: tracks_max_order_by;
readonly min?: tracks_min_order_by;
readonly stddev?: tracks_stddev_order_by;
readonly stddev_pop?: tracks_stddev_pop_order_by;
readonly stddev_samp?: tracks_stddev_samp_order_by;
readonly sum?: tracks_sum_order_by;
readonly var_pop?: tracks_var_pop_order_by;
readonly var_samp?: tracks_var_samp_order_by;
readonly variance?: tracks_variance_order_by;
}
export interface tracks_arr_rel_insert_input {
readonly data: tracks_insert_input;
readonly on_conflict?: tracks_on_conflict;
}
export interface tracks_avg_order_by {
readonly id?: order_by;
}
export interface tracks_bool_exp {
readonly _and?: tracks_bool_exp[];
readonly _not?: tracks_bool_exp;
readonly _or?: tracks_bool_exp[];
readonly createdUtc?: timestamp_comparison_exp;
readonly id?: Int_comparison_exp;
readonly name?: String_comparison_exp;
readonly napsterId?: String_comparison_exp;
}
export interface tracks_inc_input {
readonly id?: number;
}
export interface tracks_insert_input {
readonly createdUtc?: unknown;
readonly id?: number;
readonly name?: string;
readonly napsterId?: string;
}
export interface tracks_max_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly napsterId?: order_by;
}
export interface tracks_min_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly napsterId?: order_by;
}
export interface tracks_obj_rel_insert_input {
readonly data: tracks_insert_input;
readonly on_conflict?: tracks_on_conflict;
}
export interface tracks_on_conflict {
readonly constraint: tracks_constraint;
readonly update_columns: tracks_update_column;
readonly where?: tracks_bool_exp;
}
export interface tracks_order_by {
readonly createdUtc?: order_by;
readonly id?: order_by;
readonly name?: order_by;
readonly napsterId?: order_by;
}
export interface tracks_pk_columns_input {
readonly id: number;
}
export interface tracks_set_input {
readonly createdUtc?: unknown;
readonly id?: number;
readonly name?: string;
readonly napsterId?: string;
}
export interface tracks_stddev_order_by {
readonly id?: order_by;
}
export interface tracks_stddev_pop_order_by {
readonly id?: order_by;
}
export interface tracks_stddev_samp_order_by {
readonly id?: order_by;
}
export interface tracks_sum_order_by {
readonly id?: order_by;
}
export interface tracks_var_pop_order_by {
readonly id?: order_by;
}
export interface tracks_var_samp_order_by {
readonly id?: order_by;
}
export interface tracks_variance_order_by {
readonly id?: order_by;
}
export interface users_aggregate_order_by {
readonly avg?: users_avg_order_by;
readonly count?: order_by;
readonly max?: users_max_order_by;
readonly min?: users_min_order_by;
readonly stddev?: users_stddev_order_by;
readonly stddev_pop?: users_stddev_pop_order_by;
readonly stddev_samp?: users_stddev_samp_order_by;
readonly sum?: users_sum_order_by;
readonly var_pop?: users_var_pop_order_by;
readonly var_samp?: users_var_samp_order_by;
readonly variance?: users_variance_order_by;
}
export interface users_arr_rel_insert_input {
readonly data: users_insert_input;
readonly on_conflict?: users_on_conflict;
}
export interface users_avg_order_by {
readonly id?: order_by;
}
export interface users_bool_exp {
readonly _and?: users_bool_exp[];
readonly _not?: users_bool_exp;
readonly _or?: users_bool_exp[];
readonly bookmarks?: bookmarks_bool_exp;
readonly createdUtc?: timestamp_comparison_exp;
readonly email?: String_comparison_exp;
readonly id?: Int_comparison_exp;
readonly playlists?: playlists_bool_exp;
}
export interface users_inc_input {
readonly id?: number;
}
export interface users_insert_input {
readonly bookmarks?: bookmarks_arr_rel_insert_input;
readonly createdUtc?: unknown;
readonly email?: string;
readonly id?: number;
readonly playlists?: playlists_arr_rel_insert_input;
}
export interface users_max_order_by {
readonly createdUtc?: order_by;
readonly email?: order_by;
readonly id?: order_by;
}
export interface users_min_order_by {
readonly createdUtc?: order_by;
readonly email?: order_by;
readonly id?: order_by;
}
export interface users_obj_rel_insert_input {
readonly data: users_insert_input;
readonly on_conflict?: users_on_conflict;
}
export interface users_on_conflict {
readonly constraint: users_constraint;
readonly update_columns: users_update_column;
readonly where?: users_bool_exp;
}
export interface users_order_by {
readonly bookmarks_aggregate?: bookmarks_aggregate_order_by;
readonly createdUtc?: order_by;
readonly email?: order_by;
readonly id?: order_by;
readonly playlists_aggregate?: playlists_aggregate_order_by;
}
export interface users_pk_columns_input {
readonly id: number;
}
export interface users_set_input {
readonly createdUtc?: unknown;
readonly email?: string;
readonly id?: number;
}
export interface users_stddev_order_by {
readonly id?: order_by;
}
export interface users_stddev_pop_order_by {
readonly id?: order_by;
}
export interface users_stddev_samp_order_by {
readonly id?: order_by;
}
export interface users_sum_order_by {
readonly id?: order_by;
}
export interface users_var_pop_order_by {
readonly id?: order_by;
}
export interface users_var_samp_order_by {
readonly id?: order_by;
}
export interface users_variance_order_by {
readonly id?: order_by;
}
export interface Ibookmarks {
readonly __typename: "bookmarks";
readonly children: ReadonlyArray<Ibookmarks>;
readonly children_aggregate: Ibookmarks_aggregate;
readonly createdUtc: unknown | null;
readonly id: number;
readonly name: string;
readonly owner: Iusers;
readonly ownerUserId: number;
readonly parent: number | null;
readonly parentBookmark: Ibookmarks | null;
readonly value: unknown;
}
interface bookmarksSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description An array relationship
*/
readonly children: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | bookmarks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | bookmarks_order_by;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarksSelector) => T
) => Field<
"children",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | bookmarks_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | bookmarks_order_by>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description An aggregated array relationship
*/
readonly children_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | bookmarks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | bookmarks_order_by;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarks_aggregateSelector) => T
) => Field<
"children_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | bookmarks_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | bookmarks_order_by>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
/**
* @description An object relationship
*/
readonly owner: <T extends Array<Selection>>(
select: (t: usersSelector) => T
) => Field<"owner", never, SelectionSet<T>>;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
/**
* @description An object relationship
*/
readonly parentBookmark: <T extends Array<Selection>>(
select: (t: bookmarksSelector) => T
) => Field<"parentBookmark", never, SelectionSet<T>>;
readonly value: (variables: {
path?: Variable<"path"> | string;
}) => Field<"value", [Argument<"path", Variable<"path"> | string>]>;
}
export const bookmarks: bookmarksSelector = {
__typename: () => new Field("__typename"),
/**
* @description An array relationship
*/
children: (variables, select) =>
new Field(
"children",
[
new Argument(
"distinct_on",
variables.distinct_on,
bookmarks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks))
),
/**
* @description An aggregated array relationship
*/
children_aggregate: (variables, select) =>
new Field(
"children_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
bookmarks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks_aggregate))
),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
/**
* @description An object relationship
*/
owner: (select) =>
new Field("owner", undefined as never, new SelectionSet(select(users))),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
/**
* @description An object relationship
*/
parentBookmark: (select) =>
new Field(
"parentBookmark",
undefined as never,
new SelectionSet(select(bookmarks))
),
value: (variables) => new Field("value"),
};
export interface Ibookmarks_aggregate {
readonly __typename: "bookmarks_aggregate";
readonly aggregate: Ibookmarks_aggregate_fields | null;
readonly nodes: ReadonlyArray<Ibookmarks>;
}
interface bookmarks_aggregateSelector {
readonly __typename: () => Field<"__typename">;
readonly aggregate: <T extends Array<Selection>>(
select: (t: bookmarks_aggregate_fieldsSelector) => T
) => Field<"aggregate", never, SelectionSet<T>>;
readonly nodes: <T extends Array<Selection>>(
select: (t: bookmarksSelector) => T
) => Field<"nodes", never, SelectionSet<T>>;
}
export const bookmarks_aggregate: bookmarks_aggregateSelector = {
__typename: () => new Field("__typename"),
aggregate: (select) =>
new Field(
"aggregate",
undefined as never,
new SelectionSet(select(bookmarks_aggregate_fields))
),
nodes: (select) =>
new Field("nodes", undefined as never, new SelectionSet(select(bookmarks))),
};
export interface Ibookmarks_aggregate_fields {
readonly __typename: "bookmarks_aggregate_fields";
readonly avg: Ibookmarks_avg_fields | null;
readonly count: number | null;
readonly max: Ibookmarks_max_fields | null;
readonly min: Ibookmarks_min_fields | null;
readonly stddev: Ibookmarks_stddev_fields | null;
readonly stddev_pop: Ibookmarks_stddev_pop_fields | null;
readonly stddev_samp: Ibookmarks_stddev_samp_fields | null;
readonly sum: Ibookmarks_sum_fields | null;
readonly var_pop: Ibookmarks_var_pop_fields | null;
readonly var_samp: Ibookmarks_var_samp_fields | null;
readonly variance: Ibookmarks_variance_fields | null;
}
interface bookmarks_aggregate_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly avg: <T extends Array<Selection>>(
select: (t: bookmarks_avg_fieldsSelector) => T
) => Field<"avg", never, SelectionSet<T>>;
readonly count: (variables: {
columns?: Variable<"columns"> | bookmarks_select_column;
distinct?: Variable<"distinct"> | boolean;
}) => Field<
"count",
[
Argument<"columns", Variable<"columns"> | bookmarks_select_column>,
Argument<"distinct", Variable<"distinct"> | boolean>
]
>;
readonly max: <T extends Array<Selection>>(
select: (t: bookmarks_max_fieldsSelector) => T
) => Field<"max", never, SelectionSet<T>>;
readonly min: <T extends Array<Selection>>(
select: (t: bookmarks_min_fieldsSelector) => T
) => Field<"min", never, SelectionSet<T>>;
readonly stddev: <T extends Array<Selection>>(
select: (t: bookmarks_stddev_fieldsSelector) => T
) => Field<"stddev", never, SelectionSet<T>>;
readonly stddev_pop: <T extends Array<Selection>>(
select: (t: bookmarks_stddev_pop_fieldsSelector) => T
) => Field<"stddev_pop", never, SelectionSet<T>>;
readonly stddev_samp: <T extends Array<Selection>>(
select: (t: bookmarks_stddev_samp_fieldsSelector) => T
) => Field<"stddev_samp", never, SelectionSet<T>>;
readonly sum: <T extends Array<Selection>>(
select: (t: bookmarks_sum_fieldsSelector) => T
) => Field<"sum", never, SelectionSet<T>>;
readonly var_pop: <T extends Array<Selection>>(
select: (t: bookmarks_var_pop_fieldsSelector) => T
) => Field<"var_pop", never, SelectionSet<T>>;
readonly var_samp: <T extends Array<Selection>>(
select: (t: bookmarks_var_samp_fieldsSelector) => T
) => Field<"var_samp", never, SelectionSet<T>>;
readonly variance: <T extends Array<Selection>>(
select: (t: bookmarks_variance_fieldsSelector) => T
) => Field<"variance", never, SelectionSet<T>>;
}
export const bookmarks_aggregate_fields: bookmarks_aggregate_fieldsSelector = {
__typename: () => new Field("__typename"),
avg: (select) =>
new Field(
"avg",
undefined as never,
new SelectionSet(select(bookmarks_avg_fields))
),
count: (variables) => new Field("count"),
max: (select) =>
new Field(
"max",
undefined as never,
new SelectionSet(select(bookmarks_max_fields))
),
min: (select) =>
new Field(
"min",
undefined as never,
new SelectionSet(select(bookmarks_min_fields))
),
stddev: (select) =>
new Field(
"stddev",
undefined as never,
new SelectionSet(select(bookmarks_stddev_fields))
),
stddev_pop: (select) =>
new Field(
"stddev_pop",
undefined as never,
new SelectionSet(select(bookmarks_stddev_pop_fields))
),
stddev_samp: (select) =>
new Field(
"stddev_samp",
undefined as never,
new SelectionSet(select(bookmarks_stddev_samp_fields))
),
sum: (select) =>
new Field(
"sum",
undefined as never,
new SelectionSet(select(bookmarks_sum_fields))
),
var_pop: (select) =>
new Field(
"var_pop",
undefined as never,
new SelectionSet(select(bookmarks_var_pop_fields))
),
var_samp: (select) =>
new Field(
"var_samp",
undefined as never,
new SelectionSet(select(bookmarks_var_samp_fields))
),
variance: (select) =>
new Field(
"variance",
undefined as never,
new SelectionSet(select(bookmarks_variance_fields))
),
};
export interface Ibookmarks_avg_fields {
readonly __typename: "bookmarks_avg_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_avg_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_avg_fields: bookmarks_avg_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_max_fields {
readonly __typename: "bookmarks_max_fields";
readonly createdUtc: unknown | null;
readonly id: number | null;
readonly name: string | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_max_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_max_fields: bookmarks_max_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_min_fields {
readonly __typename: "bookmarks_min_fields";
readonly createdUtc: unknown | null;
readonly id: number | null;
readonly name: string | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_min_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_min_fields: bookmarks_min_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_mutation_response {
readonly __typename: "bookmarks_mutation_response";
readonly affected_rows: number;
readonly returning: ReadonlyArray<Ibookmarks>;
}
interface bookmarks_mutation_responseSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description number of affected rows by the mutation
*/
readonly affected_rows: () => Field<"affected_rows">;
/**
* @description data of the affected rows by the mutation
*/
readonly returning: <T extends Array<Selection>>(
select: (t: bookmarksSelector) => T
) => Field<"returning", never, SelectionSet<T>>;
}
export const bookmarks_mutation_response: bookmarks_mutation_responseSelector = {
__typename: () => new Field("__typename"),
/**
* @description number of affected rows by the mutation
*/
affected_rows: () => new Field("affected_rows"),
/**
* @description data of the affected rows by the mutation
*/
returning: (select) =>
new Field(
"returning",
undefined as never,
new SelectionSet(select(bookmarks))
),
};
export interface Ibookmarks_stddev_fields {
readonly __typename: "bookmarks_stddev_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_stddev_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_stddev_fields: bookmarks_stddev_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_stddev_pop_fields {
readonly __typename: "bookmarks_stddev_pop_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_stddev_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_stddev_pop_fields: bookmarks_stddev_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_stddev_samp_fields {
readonly __typename: "bookmarks_stddev_samp_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_stddev_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_stddev_samp_fields: bookmarks_stddev_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_sum_fields {
readonly __typename: "bookmarks_sum_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_sum_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_sum_fields: bookmarks_sum_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_var_pop_fields {
readonly __typename: "bookmarks_var_pop_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_var_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_var_pop_fields: bookmarks_var_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_var_samp_fields {
readonly __typename: "bookmarks_var_samp_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_var_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_var_samp_fields: bookmarks_var_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Ibookmarks_variance_fields {
readonly __typename: "bookmarks_variance_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
readonly parent: number | null;
}
interface bookmarks_variance_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
readonly parent: () => Field<"parent">;
}
export const bookmarks_variance_fields: bookmarks_variance_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
parent: () => new Field("parent"),
};
export interface Imutation_root {
readonly __typename: "mutation_root";
readonly delete_bookmarks: Ibookmarks_mutation_response | null;
readonly delete_bookmarks_by_pk: Ibookmarks | null;
readonly delete_playlist_items: Iplaylist_items_mutation_response | null;
readonly delete_playlist_items_by_pk: Iplaylist_items | null;
readonly delete_playlists: Iplaylists_mutation_response | null;
readonly delete_playlists_by_pk: Iplaylists | null;
readonly delete_tracks: Itracks_mutation_response | null;
readonly delete_tracks_by_pk: Itracks | null;
readonly delete_users: Iusers_mutation_response | null;
readonly delete_users_by_pk: Iusers | null;
readonly insert_bookmarks: Ibookmarks_mutation_response | null;
readonly insert_bookmarks_one: Ibookmarks | null;
readonly insert_playlist_items: Iplaylist_items_mutation_response | null;
readonly insert_playlist_items_one: Iplaylist_items | null;
readonly insert_playlists: Iplaylists_mutation_response | null;
readonly insert_playlists_one: Iplaylists | null;
readonly insert_tracks: Itracks_mutation_response | null;
readonly insert_tracks_one: Itracks | null;
readonly insert_users: Iusers_mutation_response | null;
readonly insert_users_one: Iusers | null;
readonly update_bookmarks: Ibookmarks_mutation_response | null;
readonly update_bookmarks_by_pk: Ibookmarks | null;
readonly update_playlist_items: Iplaylist_items_mutation_response | null;
readonly update_playlist_items_by_pk: Iplaylist_items | null;
readonly update_playlists: Iplaylists_mutation_response | null;
readonly update_playlists_by_pk: Iplaylists | null;
readonly update_tracks: Itracks_mutation_response | null;
readonly update_tracks_by_pk: Itracks | null;
readonly update_users: Iusers_mutation_response | null;
readonly update_users_by_pk: Iusers | null;
}
interface mutation_rootSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description delete data from the table: "bookmarks"
*/
readonly delete_bookmarks: <T extends Array<Selection>>(
variables: { where?: Variable<"where"> | bookmarks_bool_exp },
select: (t: bookmarks_mutation_responseSelector) => T
) => Field<
"delete_bookmarks",
[Argument<"where", Variable<"where"> | bookmarks_bool_exp>],
SelectionSet<T>
>;
/**
* @description delete single row from the table: "bookmarks"
*/
readonly delete_bookmarks_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: bookmarksSelector) => T
) => Field<
"delete_bookmarks_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description delete data from the table: "playlist_items"
*/
readonly delete_playlist_items: <T extends Array<Selection>>(
variables: { where?: Variable<"where"> | playlist_items_bool_exp },
select: (t: playlist_items_mutation_responseSelector) => T
) => Field<
"delete_playlist_items",
[Argument<"where", Variable<"where"> | playlist_items_bool_exp>],
SelectionSet<T>
>;
/**
* @description delete single row from the table: "playlist_items"
*/
readonly delete_playlist_items_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: playlist_itemsSelector) => T
) => Field<
"delete_playlist_items_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description delete data from the table: "playlists"
*/
readonly delete_playlists: <T extends Array<Selection>>(
variables: { where?: Variable<"where"> | playlists_bool_exp },
select: (t: playlists_mutation_responseSelector) => T
) => Field<
"delete_playlists",
[Argument<"where", Variable<"where"> | playlists_bool_exp>],
SelectionSet<T>
>;
/**
* @description delete single row from the table: "playlists"
*/
readonly delete_playlists_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: playlistsSelector) => T
) => Field<
"delete_playlists_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description delete data from the table: "tracks"
*/
readonly delete_tracks: <T extends Array<Selection>>(
variables: { where?: Variable<"where"> | tracks_bool_exp },
select: (t: tracks_mutation_responseSelector) => T
) => Field<
"delete_tracks",
[Argument<"where", Variable<"where"> | tracks_bool_exp>],
SelectionSet<T>
>;
/**
* @description delete single row from the table: "tracks"
*/
readonly delete_tracks_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: tracksSelector) => T
) => Field<
"delete_tracks_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description delete data from the table: "users"
*/
readonly delete_users: <T extends Array<Selection>>(
variables: { where?: Variable<"where"> | users_bool_exp },
select: (t: users_mutation_responseSelector) => T
) => Field<
"delete_users",
[Argument<"where", Variable<"where"> | users_bool_exp>],
SelectionSet<T>
>;
/**
* @description delete single row from the table: "users"
*/
readonly delete_users_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: usersSelector) => T
) => Field<
"delete_users_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description insert data into the table: "bookmarks"
*/
readonly insert_bookmarks: <T extends Array<Selection>>(
variables: {
objects?: Variable<"objects"> | bookmarks_insert_input;
on_conflict?: Variable<"on_conflict"> | bookmarks_on_conflict;
},
select: (t: bookmarks_mutation_responseSelector) => T
) => Field<
"insert_bookmarks",
[
Argument<"objects", Variable<"objects"> | bookmarks_insert_input>,
Argument<"on_conflict", Variable<"on_conflict"> | bookmarks_on_conflict>
],
SelectionSet<T>
>;
/**
* @description insert a single row into the table: "bookmarks"
*/
readonly insert_bookmarks_one: <T extends Array<Selection>>(
variables: {
object?: Variable<"object"> | bookmarks_insert_input;
on_conflict?: Variable<"on_conflict"> | bookmarks_on_conflict;
},
select: (t: bookmarksSelector) => T
) => Field<
"insert_bookmarks_one",
[
Argument<"object", Variable<"object"> | bookmarks_insert_input>,
Argument<"on_conflict", Variable<"on_conflict"> | bookmarks_on_conflict>
],
SelectionSet<T>
>;
/**
* @description insert data into the table: "playlist_items"
*/
readonly insert_playlist_items: <T extends Array<Selection>>(
variables: {
objects?: Variable<"objects"> | playlist_items_insert_input;
on_conflict?: Variable<"on_conflict"> | playlist_items_on_conflict;
},
select: (t: playlist_items_mutation_responseSelector) => T
) => Field<
"insert_playlist_items",
[
Argument<"objects", Variable<"objects"> | playlist_items_insert_input>,
Argument<
"on_conflict",
Variable<"on_conflict"> | playlist_items_on_conflict
>
],
SelectionSet<T>
>;
/**
* @description insert a single row into the table: "playlist_items"
*/
readonly insert_playlist_items_one: <T extends Array<Selection>>(
variables: {
object?: Variable<"object"> | playlist_items_insert_input;
on_conflict?: Variable<"on_conflict"> | playlist_items_on_conflict;
},
select: (t: playlist_itemsSelector) => T
) => Field<
"insert_playlist_items_one",
[
Argument<"object", Variable<"object"> | playlist_items_insert_input>,
Argument<
"on_conflict",
Variable<"on_conflict"> | playlist_items_on_conflict
>
],
SelectionSet<T>
>;
/**
* @description insert data into the table: "playlists"
*/
readonly insert_playlists: <T extends Array<Selection>>(
variables: {
objects?: Variable<"objects"> | playlists_insert_input;
on_conflict?: Variable<"on_conflict"> | playlists_on_conflict;
},
select: (t: playlists_mutation_responseSelector) => T
) => Field<
"insert_playlists",
[
Argument<"objects", Variable<"objects"> | playlists_insert_input>,
Argument<"on_conflict", Variable<"on_conflict"> | playlists_on_conflict>
],
SelectionSet<T>
>;
/**
* @description insert a single row into the table: "playlists"
*/
readonly insert_playlists_one: <T extends Array<Selection>>(
variables: {
object?: Variable<"object"> | playlists_insert_input;
on_conflict?: Variable<"on_conflict"> | playlists_on_conflict;
},
select: (t: playlistsSelector) => T
) => Field<
"insert_playlists_one",
[
Argument<"object", Variable<"object"> | playlists_insert_input>,
Argument<"on_conflict", Variable<"on_conflict"> | playlists_on_conflict>
],
SelectionSet<T>
>;
/**
* @description insert data into the table: "tracks"
*/
readonly insert_tracks: <T extends Array<Selection>>(
variables: {
objects?: Variable<"objects"> | tracks_insert_input;
on_conflict?: Variable<"on_conflict"> | tracks_on_conflict;
},
select: (t: tracks_mutation_responseSelector) => T
) => Field<
"insert_tracks",
[
Argument<"objects", Variable<"objects"> | tracks_insert_input>,
Argument<"on_conflict", Variable<"on_conflict"> | tracks_on_conflict>
],
SelectionSet<T>
>;
/**
* @description insert a single row into the table: "tracks"
*/
readonly insert_tracks_one: <T extends Array<Selection>>(
variables: {
object?: Variable<"object"> | tracks_insert_input;
on_conflict?: Variable<"on_conflict"> | tracks_on_conflict;
},
select: (t: tracksSelector) => T
) => Field<
"insert_tracks_one",
[
Argument<"object", Variable<"object"> | tracks_insert_input>,
Argument<"on_conflict", Variable<"on_conflict"> | tracks_on_conflict>
],
SelectionSet<T>
>;
/**
* @description insert data into the table: "users"
*/
readonly insert_users: <T extends Array<Selection>>(
variables: {
objects?: Variable<"objects"> | users_insert_input;
on_conflict?: Variable<"on_conflict"> | users_on_conflict;
},
select: (t: users_mutation_responseSelector) => T
) => Field<
"insert_users",
[
Argument<"objects", Variable<"objects"> | users_insert_input>,
Argument<"on_conflict", Variable<"on_conflict"> | users_on_conflict>
],
SelectionSet<T>
>;
/**
* @description insert a single row into the table: "users"
*/
readonly insert_users_one: <T extends Array<Selection>>(
variables: {
object?: Variable<"object"> | users_insert_input;
on_conflict?: Variable<"on_conflict"> | users_on_conflict;
},
select: (t: usersSelector) => T
) => Field<
"insert_users_one",
[
Argument<"object", Variable<"object"> | users_insert_input>,
Argument<"on_conflict", Variable<"on_conflict"> | users_on_conflict>
],
SelectionSet<T>
>;
/**
* @description update data of the table: "bookmarks"
*/
readonly update_bookmarks: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | bookmarks_inc_input;
_set?: Variable<"_set"> | bookmarks_set_input;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarks_mutation_responseSelector) => T
) => Field<
"update_bookmarks",
[
Argument<"_inc", Variable<"_inc"> | bookmarks_inc_input>,
Argument<"_set", Variable<"_set"> | bookmarks_set_input>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description update single row of the table: "bookmarks"
*/
readonly update_bookmarks_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | bookmarks_inc_input;
_set?: Variable<"_set"> | bookmarks_set_input;
pk_columns?: Variable<"pk_columns"> | bookmarks_pk_columns_input;
},
select: (t: bookmarksSelector) => T
) => Field<
"update_bookmarks_by_pk",
[
Argument<"_inc", Variable<"_inc"> | bookmarks_inc_input>,
Argument<"_set", Variable<"_set"> | bookmarks_set_input>,
Argument<
"pk_columns",
Variable<"pk_columns"> | bookmarks_pk_columns_input
>
],
SelectionSet<T>
>;
/**
* @description update data of the table: "playlist_items"
*/
readonly update_playlist_items: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | playlist_items_inc_input;
_set?: Variable<"_set"> | playlist_items_set_input;
where?: Variable<"where"> | playlist_items_bool_exp;
},
select: (t: playlist_items_mutation_responseSelector) => T
) => Field<
"update_playlist_items",
[
Argument<"_inc", Variable<"_inc"> | playlist_items_inc_input>,
Argument<"_set", Variable<"_set"> | playlist_items_set_input>,
Argument<"where", Variable<"where"> | playlist_items_bool_exp>
],
SelectionSet<T>
>;
/**
* @description update single row of the table: "playlist_items"
*/
readonly update_playlist_items_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | playlist_items_inc_input;
_set?: Variable<"_set"> | playlist_items_set_input;
pk_columns?: Variable<"pk_columns"> | playlist_items_pk_columns_input;
},
select: (t: playlist_itemsSelector) => T
) => Field<
"update_playlist_items_by_pk",
[
Argument<"_inc", Variable<"_inc"> | playlist_items_inc_input>,
Argument<"_set", Variable<"_set"> | playlist_items_set_input>,
Argument<
"pk_columns",
Variable<"pk_columns"> | playlist_items_pk_columns_input
>
],
SelectionSet<T>
>;
/**
* @description update data of the table: "playlists"
*/
readonly update_playlists: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | playlists_inc_input;
_set?: Variable<"_set"> | playlists_set_input;
where?: Variable<"where"> | playlists_bool_exp;
},
select: (t: playlists_mutation_responseSelector) => T
) => Field<
"update_playlists",
[
Argument<"_inc", Variable<"_inc"> | playlists_inc_input>,
Argument<"_set", Variable<"_set"> | playlists_set_input>,
Argument<"where", Variable<"where"> | playlists_bool_exp>
],
SelectionSet<T>
>;
/**
* @description update single row of the table: "playlists"
*/
readonly update_playlists_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | playlists_inc_input;
_set?: Variable<"_set"> | playlists_set_input;
pk_columns?: Variable<"pk_columns"> | playlists_pk_columns_input;
},
select: (t: playlistsSelector) => T
) => Field<
"update_playlists_by_pk",
[
Argument<"_inc", Variable<"_inc"> | playlists_inc_input>,
Argument<"_set", Variable<"_set"> | playlists_set_input>,
Argument<
"pk_columns",
Variable<"pk_columns"> | playlists_pk_columns_input
>
],
SelectionSet<T>
>;
/**
* @description update data of the table: "tracks"
*/
readonly update_tracks: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | tracks_inc_input;
_set?: Variable<"_set"> | tracks_set_input;
where?: Variable<"where"> | tracks_bool_exp;
},
select: (t: tracks_mutation_responseSelector) => T
) => Field<
"update_tracks",
[
Argument<"_inc", Variable<"_inc"> | tracks_inc_input>,
Argument<"_set", Variable<"_set"> | tracks_set_input>,
Argument<"where", Variable<"where"> | tracks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description update single row of the table: "tracks"
*/
readonly update_tracks_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | tracks_inc_input;
_set?: Variable<"_set"> | tracks_set_input;
pk_columns?: Variable<"pk_columns"> | tracks_pk_columns_input;
},
select: (t: tracksSelector) => T
) => Field<
"update_tracks_by_pk",
[
Argument<"_inc", Variable<"_inc"> | tracks_inc_input>,
Argument<"_set", Variable<"_set"> | tracks_set_input>,
Argument<"pk_columns", Variable<"pk_columns"> | tracks_pk_columns_input>
],
SelectionSet<T>
>;
/**
* @description update data of the table: "users"
*/
readonly update_users: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | users_inc_input;
_set?: Variable<"_set"> | users_set_input;
where?: Variable<"where"> | users_bool_exp;
},
select: (t: users_mutation_responseSelector) => T
) => Field<
"update_users",
[
Argument<"_inc", Variable<"_inc"> | users_inc_input>,
Argument<"_set", Variable<"_set"> | users_set_input>,
Argument<"where", Variable<"where"> | users_bool_exp>
],
SelectionSet<T>
>;
/**
* @description update single row of the table: "users"
*/
readonly update_users_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: Variable<"_inc"> | users_inc_input;
_set?: Variable<"_set"> | users_set_input;
pk_columns?: Variable<"pk_columns"> | users_pk_columns_input;
},
select: (t: usersSelector) => T
) => Field<
"update_users_by_pk",
[
Argument<"_inc", Variable<"_inc"> | users_inc_input>,
Argument<"_set", Variable<"_set"> | users_set_input>,
Argument<"pk_columns", Variable<"pk_columns"> | users_pk_columns_input>
],
SelectionSet<T>
>;
}
export const mutation_root: mutation_rootSelector = {
__typename: () => new Field("__typename"),
/**
* @description delete data from the table: "bookmarks"
*/
delete_bookmarks: (variables, select) =>
new Field(
"delete_bookmarks",
[new Argument("where", variables.where)],
new SelectionSet(select(bookmarks_mutation_response))
),
/**
* @description delete single row from the table: "bookmarks"
*/
delete_bookmarks_by_pk: (variables, select) =>
new Field(
"delete_bookmarks_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(bookmarks))
),
/**
* @description delete data from the table: "playlist_items"
*/
delete_playlist_items: (variables, select) =>
new Field(
"delete_playlist_items",
[new Argument("where", variables.where)],
new SelectionSet(select(playlist_items_mutation_response))
),
/**
* @description delete single row from the table: "playlist_items"
*/
delete_playlist_items_by_pk: (variables, select) =>
new Field(
"delete_playlist_items_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(playlist_items))
),
/**
* @description delete data from the table: "playlists"
*/
delete_playlists: (variables, select) =>
new Field(
"delete_playlists",
[new Argument("where", variables.where)],
new SelectionSet(select(playlists_mutation_response))
),
/**
* @description delete single row from the table: "playlists"
*/
delete_playlists_by_pk: (variables, select) =>
new Field(
"delete_playlists_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(playlists))
),
/**
* @description delete data from the table: "tracks"
*/
delete_tracks: (variables, select) =>
new Field(
"delete_tracks",
[new Argument("where", variables.where)],
new SelectionSet(select(tracks_mutation_response))
),
/**
* @description delete single row from the table: "tracks"
*/
delete_tracks_by_pk: (variables, select) =>
new Field(
"delete_tracks_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(tracks))
),
/**
* @description delete data from the table: "users"
*/
delete_users: (variables, select) =>
new Field(
"delete_users",
[new Argument("where", variables.where)],
new SelectionSet(select(users_mutation_response))
),
/**
* @description delete single row from the table: "users"
*/
delete_users_by_pk: (variables, select) =>
new Field(
"delete_users_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(users))
),
/**
* @description insert data into the table: "bookmarks"
*/
insert_bookmarks: (variables, select) =>
new Field(
"insert_bookmarks",
[
new Argument("objects", variables.objects),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(bookmarks_mutation_response))
),
/**
* @description insert a single row into the table: "bookmarks"
*/
insert_bookmarks_one: (variables, select) =>
new Field(
"insert_bookmarks_one",
[
new Argument("object", variables.object),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(bookmarks))
),
/**
* @description insert data into the table: "playlist_items"
*/
insert_playlist_items: (variables, select) =>
new Field(
"insert_playlist_items",
[
new Argument("objects", variables.objects),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(playlist_items_mutation_response))
),
/**
* @description insert a single row into the table: "playlist_items"
*/
insert_playlist_items_one: (variables, select) =>
new Field(
"insert_playlist_items_one",
[
new Argument("object", variables.object),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(playlist_items))
),
/**
* @description insert data into the table: "playlists"
*/
insert_playlists: (variables, select) =>
new Field(
"insert_playlists",
[
new Argument("objects", variables.objects),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(playlists_mutation_response))
),
/**
* @description insert a single row into the table: "playlists"
*/
insert_playlists_one: (variables, select) =>
new Field(
"insert_playlists_one",
[
new Argument("object", variables.object),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(playlists))
),
/**
* @description insert data into the table: "tracks"
*/
insert_tracks: (variables, select) =>
new Field(
"insert_tracks",
[
new Argument("objects", variables.objects),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(tracks_mutation_response))
),
/**
* @description insert a single row into the table: "tracks"
*/
insert_tracks_one: (variables, select) =>
new Field(
"insert_tracks_one",
[
new Argument("object", variables.object),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(tracks))
),
/**
* @description insert data into the table: "users"
*/
insert_users: (variables, select) =>
new Field(
"insert_users",
[
new Argument("objects", variables.objects),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(users_mutation_response))
),
/**
* @description insert a single row into the table: "users"
*/
insert_users_one: (variables, select) =>
new Field(
"insert_users_one",
[
new Argument("object", variables.object),
new Argument("on_conflict", variables.on_conflict),
],
new SelectionSet(select(users))
),
/**
* @description update data of the table: "bookmarks"
*/
update_bookmarks: (variables, select) =>
new Field(
"update_bookmarks",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks_mutation_response))
),
/**
* @description update single row of the table: "bookmarks"
*/
update_bookmarks_by_pk: (variables, select) =>
new Field(
"update_bookmarks_by_pk",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("pk_columns", variables.pk_columns),
],
new SelectionSet(select(bookmarks))
),
/**
* @description update data of the table: "playlist_items"
*/
update_playlist_items: (variables, select) =>
new Field(
"update_playlist_items",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("where", variables.where),
],
new SelectionSet(select(playlist_items_mutation_response))
),
/**
* @description update single row of the table: "playlist_items"
*/
update_playlist_items_by_pk: (variables, select) =>
new Field(
"update_playlist_items_by_pk",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("pk_columns", variables.pk_columns),
],
new SelectionSet(select(playlist_items))
),
/**
* @description update data of the table: "playlists"
*/
update_playlists: (variables, select) =>
new Field(
"update_playlists",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("where", variables.where),
],
new SelectionSet(select(playlists_mutation_response))
),
/**
* @description update single row of the table: "playlists"
*/
update_playlists_by_pk: (variables, select) =>
new Field(
"update_playlists_by_pk",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("pk_columns", variables.pk_columns),
],
new SelectionSet(select(playlists))
),
/**
* @description update data of the table: "tracks"
*/
update_tracks: (variables, select) =>
new Field(
"update_tracks",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("where", variables.where),
],
new SelectionSet(select(tracks_mutation_response))
),
/**
* @description update single row of the table: "tracks"
*/
update_tracks_by_pk: (variables, select) =>
new Field(
"update_tracks_by_pk",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("pk_columns", variables.pk_columns),
],
new SelectionSet(select(tracks))
),
/**
* @description update data of the table: "users"
*/
update_users: (variables, select) =>
new Field(
"update_users",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("where", variables.where),
],
new SelectionSet(select(users_mutation_response))
),
/**
* @description update single row of the table: "users"
*/
update_users_by_pk: (variables, select) =>
new Field(
"update_users_by_pk",
[
new Argument("_inc", variables._inc),
new Argument("_set", variables._set),
new Argument("pk_columns", variables.pk_columns),
],
new SelectionSet(select(users))
),
};
export interface Iplaylist_items {
readonly __typename: "playlist_items";
readonly createdUtc: unknown | null;
readonly id: number;
readonly playlistId: number;
readonly position: number;
readonly trackId: number;
}
interface playlist_itemsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items: playlist_itemsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_aggregate {
readonly __typename: "playlist_items_aggregate";
readonly aggregate: Iplaylist_items_aggregate_fields | null;
readonly nodes: ReadonlyArray<Iplaylist_items>;
}
interface playlist_items_aggregateSelector {
readonly __typename: () => Field<"__typename">;
readonly aggregate: <T extends Array<Selection>>(
select: (t: playlist_items_aggregate_fieldsSelector) => T
) => Field<"aggregate", never, SelectionSet<T>>;
readonly nodes: <T extends Array<Selection>>(
select: (t: playlist_itemsSelector) => T
) => Field<"nodes", never, SelectionSet<T>>;
}
export const playlist_items_aggregate: playlist_items_aggregateSelector = {
__typename: () => new Field("__typename"),
aggregate: (select) =>
new Field(
"aggregate",
undefined as never,
new SelectionSet(select(playlist_items_aggregate_fields))
),
nodes: (select) =>
new Field(
"nodes",
undefined as never,
new SelectionSet(select(playlist_items))
),
};
export interface Iplaylist_items_aggregate_fields {
readonly __typename: "playlist_items_aggregate_fields";
readonly avg: Iplaylist_items_avg_fields | null;
readonly count: number | null;
readonly max: Iplaylist_items_max_fields | null;
readonly min: Iplaylist_items_min_fields | null;
readonly stddev: Iplaylist_items_stddev_fields | null;
readonly stddev_pop: Iplaylist_items_stddev_pop_fields | null;
readonly stddev_samp: Iplaylist_items_stddev_samp_fields | null;
readonly sum: Iplaylist_items_sum_fields | null;
readonly var_pop: Iplaylist_items_var_pop_fields | null;
readonly var_samp: Iplaylist_items_var_samp_fields | null;
readonly variance: Iplaylist_items_variance_fields | null;
}
interface playlist_items_aggregate_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly avg: <T extends Array<Selection>>(
select: (t: playlist_items_avg_fieldsSelector) => T
) => Field<"avg", never, SelectionSet<T>>;
readonly count: (variables: {
columns?: Variable<"columns"> | playlist_items_select_column;
distinct?: Variable<"distinct"> | boolean;
}) => Field<
"count",
[
Argument<"columns", Variable<"columns"> | playlist_items_select_column>,
Argument<"distinct", Variable<"distinct"> | boolean>
]
>;
readonly max: <T extends Array<Selection>>(
select: (t: playlist_items_max_fieldsSelector) => T
) => Field<"max", never, SelectionSet<T>>;
readonly min: <T extends Array<Selection>>(
select: (t: playlist_items_min_fieldsSelector) => T
) => Field<"min", never, SelectionSet<T>>;
readonly stddev: <T extends Array<Selection>>(
select: (t: playlist_items_stddev_fieldsSelector) => T
) => Field<"stddev", never, SelectionSet<T>>;
readonly stddev_pop: <T extends Array<Selection>>(
select: (t: playlist_items_stddev_pop_fieldsSelector) => T
) => Field<"stddev_pop", never, SelectionSet<T>>;
readonly stddev_samp: <T extends Array<Selection>>(
select: (t: playlist_items_stddev_samp_fieldsSelector) => T
) => Field<"stddev_samp", never, SelectionSet<T>>;
readonly sum: <T extends Array<Selection>>(
select: (t: playlist_items_sum_fieldsSelector) => T
) => Field<"sum", never, SelectionSet<T>>;
readonly var_pop: <T extends Array<Selection>>(
select: (t: playlist_items_var_pop_fieldsSelector) => T
) => Field<"var_pop", never, SelectionSet<T>>;
readonly var_samp: <T extends Array<Selection>>(
select: (t: playlist_items_var_samp_fieldsSelector) => T
) => Field<"var_samp", never, SelectionSet<T>>;
readonly variance: <T extends Array<Selection>>(
select: (t: playlist_items_variance_fieldsSelector) => T
) => Field<"variance", never, SelectionSet<T>>;
}
export const playlist_items_aggregate_fields: playlist_items_aggregate_fieldsSelector = {
__typename: () => new Field("__typename"),
avg: (select) =>
new Field(
"avg",
undefined as never,
new SelectionSet(select(playlist_items_avg_fields))
),
count: (variables) => new Field("count"),
max: (select) =>
new Field(
"max",
undefined as never,
new SelectionSet(select(playlist_items_max_fields))
),
min: (select) =>
new Field(
"min",
undefined as never,
new SelectionSet(select(playlist_items_min_fields))
),
stddev: (select) =>
new Field(
"stddev",
undefined as never,
new SelectionSet(select(playlist_items_stddev_fields))
),
stddev_pop: (select) =>
new Field(
"stddev_pop",
undefined as never,
new SelectionSet(select(playlist_items_stddev_pop_fields))
),
stddev_samp: (select) =>
new Field(
"stddev_samp",
undefined as never,
new SelectionSet(select(playlist_items_stddev_samp_fields))
),
sum: (select) =>
new Field(
"sum",
undefined as never,
new SelectionSet(select(playlist_items_sum_fields))
),
var_pop: (select) =>
new Field(
"var_pop",
undefined as never,
new SelectionSet(select(playlist_items_var_pop_fields))
),
var_samp: (select) =>
new Field(
"var_samp",
undefined as never,
new SelectionSet(select(playlist_items_var_samp_fields))
),
variance: (select) =>
new Field(
"variance",
undefined as never,
new SelectionSet(select(playlist_items_variance_fields))
),
};
export interface Iplaylist_items_avg_fields {
readonly __typename: "playlist_items_avg_fields";
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_avg_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_avg_fields: playlist_items_avg_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_max_fields {
readonly __typename: "playlist_items_max_fields";
readonly createdUtc: unknown | null;
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_max_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_max_fields: playlist_items_max_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_min_fields {
readonly __typename: "playlist_items_min_fields";
readonly createdUtc: unknown | null;
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_min_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_min_fields: playlist_items_min_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_mutation_response {
readonly __typename: "playlist_items_mutation_response";
readonly affected_rows: number;
readonly returning: ReadonlyArray<Iplaylist_items>;
}
interface playlist_items_mutation_responseSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description number of affected rows by the mutation
*/
readonly affected_rows: () => Field<"affected_rows">;
/**
* @description data of the affected rows by the mutation
*/
readonly returning: <T extends Array<Selection>>(
select: (t: playlist_itemsSelector) => T
) => Field<"returning", never, SelectionSet<T>>;
}
export const playlist_items_mutation_response: playlist_items_mutation_responseSelector = {
__typename: () => new Field("__typename"),
/**
* @description number of affected rows by the mutation
*/
affected_rows: () => new Field("affected_rows"),
/**
* @description data of the affected rows by the mutation
*/
returning: (select) =>
new Field(
"returning",
undefined as never,
new SelectionSet(select(playlist_items))
),
};
export interface Iplaylist_items_stddev_fields {
readonly __typename: "playlist_items_stddev_fields";
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_stddev_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_stddev_fields: playlist_items_stddev_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_stddev_pop_fields {
readonly __typename: "playlist_items_stddev_pop_fields";
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_stddev_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_stddev_pop_fields: playlist_items_stddev_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_stddev_samp_fields {
readonly __typename: "playlist_items_stddev_samp_fields";
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_stddev_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_stddev_samp_fields: playlist_items_stddev_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_sum_fields {
readonly __typename: "playlist_items_sum_fields";
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_sum_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_sum_fields: playlist_items_sum_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_var_pop_fields {
readonly __typename: "playlist_items_var_pop_fields";
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_var_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_var_pop_fields: playlist_items_var_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_var_samp_fields {
readonly __typename: "playlist_items_var_samp_fields";
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_var_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_var_samp_fields: playlist_items_var_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylist_items_variance_fields {
readonly __typename: "playlist_items_variance_fields";
readonly id: number | null;
readonly playlistId: number | null;
readonly position: number | null;
readonly trackId: number | null;
}
interface playlist_items_variance_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly playlistId: () => Field<"playlistId">;
readonly position: () => Field<"position">;
readonly trackId: () => Field<"trackId">;
}
export const playlist_items_variance_fields: playlist_items_variance_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
playlistId: () => new Field("playlistId"),
position: () => new Field("position"),
trackId: () => new Field("trackId"),
};
export interface Iplaylists {
readonly __typename: "playlists";
readonly createdUtc: unknown | null;
readonly id: number;
readonly name: string;
readonly ownerUserId: number;
}
interface playlistsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists: playlistsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_aggregate {
readonly __typename: "playlists_aggregate";
readonly aggregate: Iplaylists_aggregate_fields | null;
readonly nodes: ReadonlyArray<Iplaylists>;
}
interface playlists_aggregateSelector {
readonly __typename: () => Field<"__typename">;
readonly aggregate: <T extends Array<Selection>>(
select: (t: playlists_aggregate_fieldsSelector) => T
) => Field<"aggregate", never, SelectionSet<T>>;
readonly nodes: <T extends Array<Selection>>(
select: (t: playlistsSelector) => T
) => Field<"nodes", never, SelectionSet<T>>;
}
export const playlists_aggregate: playlists_aggregateSelector = {
__typename: () => new Field("__typename"),
aggregate: (select) =>
new Field(
"aggregate",
undefined as never,
new SelectionSet(select(playlists_aggregate_fields))
),
nodes: (select) =>
new Field("nodes", undefined as never, new SelectionSet(select(playlists))),
};
export interface Iplaylists_aggregate_fields {
readonly __typename: "playlists_aggregate_fields";
readonly avg: Iplaylists_avg_fields | null;
readonly count: number | null;
readonly max: Iplaylists_max_fields | null;
readonly min: Iplaylists_min_fields | null;
readonly stddev: Iplaylists_stddev_fields | null;
readonly stddev_pop: Iplaylists_stddev_pop_fields | null;
readonly stddev_samp: Iplaylists_stddev_samp_fields | null;
readonly sum: Iplaylists_sum_fields | null;
readonly var_pop: Iplaylists_var_pop_fields | null;
readonly var_samp: Iplaylists_var_samp_fields | null;
readonly variance: Iplaylists_variance_fields | null;
}
interface playlists_aggregate_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly avg: <T extends Array<Selection>>(
select: (t: playlists_avg_fieldsSelector) => T
) => Field<"avg", never, SelectionSet<T>>;
readonly count: (variables: {
columns?: Variable<"columns"> | playlists_select_column;
distinct?: Variable<"distinct"> | boolean;
}) => Field<
"count",
[
Argument<"columns", Variable<"columns"> | playlists_select_column>,
Argument<"distinct", Variable<"distinct"> | boolean>
]
>;
readonly max: <T extends Array<Selection>>(
select: (t: playlists_max_fieldsSelector) => T
) => Field<"max", never, SelectionSet<T>>;
readonly min: <T extends Array<Selection>>(
select: (t: playlists_min_fieldsSelector) => T
) => Field<"min", never, SelectionSet<T>>;
readonly stddev: <T extends Array<Selection>>(
select: (t: playlists_stddev_fieldsSelector) => T
) => Field<"stddev", never, SelectionSet<T>>;
readonly stddev_pop: <T extends Array<Selection>>(
select: (t: playlists_stddev_pop_fieldsSelector) => T
) => Field<"stddev_pop", never, SelectionSet<T>>;
readonly stddev_samp: <T extends Array<Selection>>(
select: (t: playlists_stddev_samp_fieldsSelector) => T
) => Field<"stddev_samp", never, SelectionSet<T>>;
readonly sum: <T extends Array<Selection>>(
select: (t: playlists_sum_fieldsSelector) => T
) => Field<"sum", never, SelectionSet<T>>;
readonly var_pop: <T extends Array<Selection>>(
select: (t: playlists_var_pop_fieldsSelector) => T
) => Field<"var_pop", never, SelectionSet<T>>;
readonly var_samp: <T extends Array<Selection>>(
select: (t: playlists_var_samp_fieldsSelector) => T
) => Field<"var_samp", never, SelectionSet<T>>;
readonly variance: <T extends Array<Selection>>(
select: (t: playlists_variance_fieldsSelector) => T
) => Field<"variance", never, SelectionSet<T>>;
}
export const playlists_aggregate_fields: playlists_aggregate_fieldsSelector = {
__typename: () => new Field("__typename"),
avg: (select) =>
new Field(
"avg",
undefined as never,
new SelectionSet(select(playlists_avg_fields))
),
count: (variables) => new Field("count"),
max: (select) =>
new Field(
"max",
undefined as never,
new SelectionSet(select(playlists_max_fields))
),
min: (select) =>
new Field(
"min",
undefined as never,
new SelectionSet(select(playlists_min_fields))
),
stddev: (select) =>
new Field(
"stddev",
undefined as never,
new SelectionSet(select(playlists_stddev_fields))
),
stddev_pop: (select) =>
new Field(
"stddev_pop",
undefined as never,
new SelectionSet(select(playlists_stddev_pop_fields))
),
stddev_samp: (select) =>
new Field(
"stddev_samp",
undefined as never,
new SelectionSet(select(playlists_stddev_samp_fields))
),
sum: (select) =>
new Field(
"sum",
undefined as never,
new SelectionSet(select(playlists_sum_fields))
),
var_pop: (select) =>
new Field(
"var_pop",
undefined as never,
new SelectionSet(select(playlists_var_pop_fields))
),
var_samp: (select) =>
new Field(
"var_samp",
undefined as never,
new SelectionSet(select(playlists_var_samp_fields))
),
variance: (select) =>
new Field(
"variance",
undefined as never,
new SelectionSet(select(playlists_variance_fields))
),
};
export interface Iplaylists_avg_fields {
readonly __typename: "playlists_avg_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
}
interface playlists_avg_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_avg_fields: playlists_avg_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_max_fields {
readonly __typename: "playlists_max_fields";
readonly createdUtc: unknown | null;
readonly id: number | null;
readonly name: string | null;
readonly ownerUserId: number | null;
}
interface playlists_max_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_max_fields: playlists_max_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_min_fields {
readonly __typename: "playlists_min_fields";
readonly createdUtc: unknown | null;
readonly id: number | null;
readonly name: string | null;
readonly ownerUserId: number | null;
}
interface playlists_min_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_min_fields: playlists_min_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_mutation_response {
readonly __typename: "playlists_mutation_response";
readonly affected_rows: number;
readonly returning: ReadonlyArray<Iplaylists>;
}
interface playlists_mutation_responseSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description number of affected rows by the mutation
*/
readonly affected_rows: () => Field<"affected_rows">;
/**
* @description data of the affected rows by the mutation
*/
readonly returning: <T extends Array<Selection>>(
select: (t: playlistsSelector) => T
) => Field<"returning", never, SelectionSet<T>>;
}
export const playlists_mutation_response: playlists_mutation_responseSelector = {
__typename: () => new Field("__typename"),
/**
* @description number of affected rows by the mutation
*/
affected_rows: () => new Field("affected_rows"),
/**
* @description data of the affected rows by the mutation
*/
returning: (select) =>
new Field(
"returning",
undefined as never,
new SelectionSet(select(playlists))
),
};
export interface Iplaylists_stddev_fields {
readonly __typename: "playlists_stddev_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
}
interface playlists_stddev_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_stddev_fields: playlists_stddev_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_stddev_pop_fields {
readonly __typename: "playlists_stddev_pop_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
}
interface playlists_stddev_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_stddev_pop_fields: playlists_stddev_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_stddev_samp_fields {
readonly __typename: "playlists_stddev_samp_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
}
interface playlists_stddev_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_stddev_samp_fields: playlists_stddev_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_sum_fields {
readonly __typename: "playlists_sum_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
}
interface playlists_sum_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_sum_fields: playlists_sum_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_var_pop_fields {
readonly __typename: "playlists_var_pop_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
}
interface playlists_var_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_var_pop_fields: playlists_var_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_var_samp_fields {
readonly __typename: "playlists_var_samp_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
}
interface playlists_var_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_var_samp_fields: playlists_var_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iplaylists_variance_fields {
readonly __typename: "playlists_variance_fields";
readonly id: number | null;
readonly ownerUserId: number | null;
}
interface playlists_variance_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
readonly ownerUserId: () => Field<"ownerUserId">;
}
export const playlists_variance_fields: playlists_variance_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
ownerUserId: () => new Field("ownerUserId"),
};
export interface Iquery_root {
readonly __typename: "query_root";
readonly bookmarks: ReadonlyArray<Ibookmarks>;
readonly bookmarks_aggregate: Ibookmarks_aggregate;
readonly bookmarks_by_pk: Ibookmarks | null;
readonly playlist_items: ReadonlyArray<Iplaylist_items>;
readonly playlist_items_aggregate: Iplaylist_items_aggregate;
readonly playlist_items_by_pk: Iplaylist_items | null;
readonly playlists: ReadonlyArray<Iplaylists>;
readonly playlists_aggregate: Iplaylists_aggregate;
readonly playlists_by_pk: Iplaylists | null;
readonly tracks: ReadonlyArray<Itracks>;
readonly tracks_aggregate: Itracks_aggregate;
readonly tracks_by_pk: Itracks | null;
readonly users: ReadonlyArray<Iusers>;
readonly users_aggregate: Iusers_aggregate;
readonly users_by_pk: Iusers | null;
}
interface query_rootSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description fetch data from the table: "bookmarks"
*/
readonly bookmarks: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | bookmarks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | bookmarks_order_by;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarksSelector) => T
) => Field<
"bookmarks",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | bookmarks_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | bookmarks_order_by>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "bookmarks"
*/
readonly bookmarks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | bookmarks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | bookmarks_order_by;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarks_aggregateSelector) => T
) => Field<
"bookmarks_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | bookmarks_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | bookmarks_order_by>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "bookmarks" using primary key columns
*/
readonly bookmarks_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: bookmarksSelector) => T
) => Field<
"bookmarks_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "playlist_items"
*/
readonly playlist_items: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlist_items_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlist_items_order_by;
where?: Variable<"where"> | playlist_items_bool_exp;
},
select: (t: playlist_itemsSelector) => T
) => Field<
"playlist_items",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlist_items_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlist_items_order_by>,
Argument<"where", Variable<"where"> | playlist_items_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "playlist_items"
*/
readonly playlist_items_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlist_items_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlist_items_order_by;
where?: Variable<"where"> | playlist_items_bool_exp;
},
select: (t: playlist_items_aggregateSelector) => T
) => Field<
"playlist_items_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlist_items_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlist_items_order_by>,
Argument<"where", Variable<"where"> | playlist_items_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "playlist_items" using primary key columns
*/
readonly playlist_items_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: playlist_itemsSelector) => T
) => Field<
"playlist_items_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "playlists"
*/
readonly playlists: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlists_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlists_order_by;
where?: Variable<"where"> | playlists_bool_exp;
},
select: (t: playlistsSelector) => T
) => Field<
"playlists",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlists_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlists_order_by>,
Argument<"where", Variable<"where"> | playlists_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "playlists"
*/
readonly playlists_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlists_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlists_order_by;
where?: Variable<"where"> | playlists_bool_exp;
},
select: (t: playlists_aggregateSelector) => T
) => Field<
"playlists_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlists_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlists_order_by>,
Argument<"where", Variable<"where"> | playlists_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "playlists" using primary key columns
*/
readonly playlists_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: playlistsSelector) => T
) => Field<
"playlists_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "tracks"
*/
readonly tracks: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | tracks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | tracks_order_by;
where?: Variable<"where"> | tracks_bool_exp;
},
select: (t: tracksSelector) => T
) => Field<
"tracks",
[
Argument<"distinct_on", Variable<"distinct_on"> | tracks_select_column>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | tracks_order_by>,
Argument<"where", Variable<"where"> | tracks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "tracks"
*/
readonly tracks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | tracks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | tracks_order_by;
where?: Variable<"where"> | tracks_bool_exp;
},
select: (t: tracks_aggregateSelector) => T
) => Field<
"tracks_aggregate",
[
Argument<"distinct_on", Variable<"distinct_on"> | tracks_select_column>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | tracks_order_by>,
Argument<"where", Variable<"where"> | tracks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "tracks" using primary key columns
*/
readonly tracks_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: tracksSelector) => T
) => Field<
"tracks_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "users"
*/
readonly users: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | users_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | users_order_by;
where?: Variable<"where"> | users_bool_exp;
},
select: (t: usersSelector) => T
) => Field<
"users",
[
Argument<"distinct_on", Variable<"distinct_on"> | users_select_column>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | users_order_by>,
Argument<"where", Variable<"where"> | users_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "users"
*/
readonly users_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | users_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | users_order_by;
where?: Variable<"where"> | users_bool_exp;
},
select: (t: users_aggregateSelector) => T
) => Field<
"users_aggregate",
[
Argument<"distinct_on", Variable<"distinct_on"> | users_select_column>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | users_order_by>,
Argument<"where", Variable<"where"> | users_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "users" using primary key columns
*/
readonly users_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: usersSelector) => T
) => Field<
"users_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
}
export const query_root: query_rootSelector = {
__typename: () => new Field("__typename"),
/**
* @description fetch data from the table: "bookmarks"
*/
bookmarks: (variables, select) =>
new Field(
"bookmarks",
[
new Argument(
"distinct_on",
variables.distinct_on,
bookmarks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks))
),
/**
* @description fetch aggregated fields from the table: "bookmarks"
*/
bookmarks_aggregate: (variables, select) =>
new Field(
"bookmarks_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
bookmarks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks_aggregate))
),
/**
* @description fetch data from the table: "bookmarks" using primary key columns
*/
bookmarks_by_pk: (variables, select) =>
new Field(
"bookmarks_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(bookmarks))
),
/**
* @description fetch data from the table: "playlist_items"
*/
playlist_items: (variables, select) =>
new Field(
"playlist_items",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlist_items_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlist_items))
),
/**
* @description fetch aggregated fields from the table: "playlist_items"
*/
playlist_items_aggregate: (variables, select) =>
new Field(
"playlist_items_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlist_items_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlist_items_aggregate))
),
/**
* @description fetch data from the table: "playlist_items" using primary key columns
*/
playlist_items_by_pk: (variables, select) =>
new Field(
"playlist_items_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(playlist_items))
),
/**
* @description fetch data from the table: "playlists"
*/
playlists: (variables, select) =>
new Field(
"playlists",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlists_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlists))
),
/**
* @description fetch aggregated fields from the table: "playlists"
*/
playlists_aggregate: (variables, select) =>
new Field(
"playlists_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlists_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlists_aggregate))
),
/**
* @description fetch data from the table: "playlists" using primary key columns
*/
playlists_by_pk: (variables, select) =>
new Field(
"playlists_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(playlists))
),
/**
* @description fetch data from the table: "tracks"
*/
tracks: (variables, select) =>
new Field(
"tracks",
[
new Argument(
"distinct_on",
variables.distinct_on,
tracks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(tracks))
),
/**
* @description fetch aggregated fields from the table: "tracks"
*/
tracks_aggregate: (variables, select) =>
new Field(
"tracks_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
tracks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(tracks_aggregate))
),
/**
* @description fetch data from the table: "tracks" using primary key columns
*/
tracks_by_pk: (variables, select) =>
new Field(
"tracks_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(tracks))
),
/**
* @description fetch data from the table: "users"
*/
users: (variables, select) =>
new Field(
"users",
[
new Argument("distinct_on", variables.distinct_on, users_select_column),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(users))
),
/**
* @description fetch aggregated fields from the table: "users"
*/
users_aggregate: (variables, select) =>
new Field(
"users_aggregate",
[
new Argument("distinct_on", variables.distinct_on, users_select_column),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(users_aggregate))
),
/**
* @description fetch data from the table: "users" using primary key columns
*/
users_by_pk: (variables, select) =>
new Field(
"users_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(users))
),
};
export interface Isubscription_root {
readonly __typename: "subscription_root";
readonly bookmarks: ReadonlyArray<Ibookmarks>;
readonly bookmarks_aggregate: Ibookmarks_aggregate;
readonly bookmarks_by_pk: Ibookmarks | null;
readonly playlist_items: ReadonlyArray<Iplaylist_items>;
readonly playlist_items_aggregate: Iplaylist_items_aggregate;
readonly playlist_items_by_pk: Iplaylist_items | null;
readonly playlists: ReadonlyArray<Iplaylists>;
readonly playlists_aggregate: Iplaylists_aggregate;
readonly playlists_by_pk: Iplaylists | null;
readonly tracks: ReadonlyArray<Itracks>;
readonly tracks_aggregate: Itracks_aggregate;
readonly tracks_by_pk: Itracks | null;
readonly users: ReadonlyArray<Iusers>;
readonly users_aggregate: Iusers_aggregate;
readonly users_by_pk: Iusers | null;
}
interface subscription_rootSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description fetch data from the table: "bookmarks"
*/
readonly bookmarks: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | bookmarks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | bookmarks_order_by;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarksSelector) => T
) => Field<
"bookmarks",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | bookmarks_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | bookmarks_order_by>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "bookmarks"
*/
readonly bookmarks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | bookmarks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | bookmarks_order_by;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarks_aggregateSelector) => T
) => Field<
"bookmarks_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | bookmarks_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | bookmarks_order_by>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "bookmarks" using primary key columns
*/
readonly bookmarks_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: bookmarksSelector) => T
) => Field<
"bookmarks_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "playlist_items"
*/
readonly playlist_items: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlist_items_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlist_items_order_by;
where?: Variable<"where"> | playlist_items_bool_exp;
},
select: (t: playlist_itemsSelector) => T
) => Field<
"playlist_items",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlist_items_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlist_items_order_by>,
Argument<"where", Variable<"where"> | playlist_items_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "playlist_items"
*/
readonly playlist_items_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlist_items_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlist_items_order_by;
where?: Variable<"where"> | playlist_items_bool_exp;
},
select: (t: playlist_items_aggregateSelector) => T
) => Field<
"playlist_items_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlist_items_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlist_items_order_by>,
Argument<"where", Variable<"where"> | playlist_items_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "playlist_items" using primary key columns
*/
readonly playlist_items_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: playlist_itemsSelector) => T
) => Field<
"playlist_items_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "playlists"
*/
readonly playlists: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlists_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlists_order_by;
where?: Variable<"where"> | playlists_bool_exp;
},
select: (t: playlistsSelector) => T
) => Field<
"playlists",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlists_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlists_order_by>,
Argument<"where", Variable<"where"> | playlists_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "playlists"
*/
readonly playlists_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlists_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlists_order_by;
where?: Variable<"where"> | playlists_bool_exp;
},
select: (t: playlists_aggregateSelector) => T
) => Field<
"playlists_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlists_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlists_order_by>,
Argument<"where", Variable<"where"> | playlists_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "playlists" using primary key columns
*/
readonly playlists_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: playlistsSelector) => T
) => Field<
"playlists_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "tracks"
*/
readonly tracks: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | tracks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | tracks_order_by;
where?: Variable<"where"> | tracks_bool_exp;
},
select: (t: tracksSelector) => T
) => Field<
"tracks",
[
Argument<"distinct_on", Variable<"distinct_on"> | tracks_select_column>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | tracks_order_by>,
Argument<"where", Variable<"where"> | tracks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "tracks"
*/
readonly tracks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | tracks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | tracks_order_by;
where?: Variable<"where"> | tracks_bool_exp;
},
select: (t: tracks_aggregateSelector) => T
) => Field<
"tracks_aggregate",
[
Argument<"distinct_on", Variable<"distinct_on"> | tracks_select_column>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | tracks_order_by>,
Argument<"where", Variable<"where"> | tracks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "tracks" using primary key columns
*/
readonly tracks_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: tracksSelector) => T
) => Field<
"tracks_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "users"
*/
readonly users: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | users_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | users_order_by;
where?: Variable<"where"> | users_bool_exp;
},
select: (t: usersSelector) => T
) => Field<
"users",
[
Argument<"distinct_on", Variable<"distinct_on"> | users_select_column>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | users_order_by>,
Argument<"where", Variable<"where"> | users_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch aggregated fields from the table: "users"
*/
readonly users_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | users_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | users_order_by;
where?: Variable<"where"> | users_bool_exp;
},
select: (t: users_aggregateSelector) => T
) => Field<
"users_aggregate",
[
Argument<"distinct_on", Variable<"distinct_on"> | users_select_column>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | users_order_by>,
Argument<"where", Variable<"where"> | users_bool_exp>
],
SelectionSet<T>
>;
/**
* @description fetch data from the table: "users" using primary key columns
*/
readonly users_by_pk: <T extends Array<Selection>>(
variables: { id?: Variable<"id"> | number },
select: (t: usersSelector) => T
) => Field<
"users_by_pk",
[Argument<"id", Variable<"id"> | number>],
SelectionSet<T>
>;
}
export const subscription_root: subscription_rootSelector = {
__typename: () => new Field("__typename"),
/**
* @description fetch data from the table: "bookmarks"
*/
bookmarks: (variables, select) =>
new Field(
"bookmarks",
[
new Argument(
"distinct_on",
variables.distinct_on,
bookmarks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks))
),
/**
* @description fetch aggregated fields from the table: "bookmarks"
*/
bookmarks_aggregate: (variables, select) =>
new Field(
"bookmarks_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
bookmarks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks_aggregate))
),
/**
* @description fetch data from the table: "bookmarks" using primary key columns
*/
bookmarks_by_pk: (variables, select) =>
new Field(
"bookmarks_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(bookmarks))
),
/**
* @description fetch data from the table: "playlist_items"
*/
playlist_items: (variables, select) =>
new Field(
"playlist_items",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlist_items_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlist_items))
),
/**
* @description fetch aggregated fields from the table: "playlist_items"
*/
playlist_items_aggregate: (variables, select) =>
new Field(
"playlist_items_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlist_items_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlist_items_aggregate))
),
/**
* @description fetch data from the table: "playlist_items" using primary key columns
*/
playlist_items_by_pk: (variables, select) =>
new Field(
"playlist_items_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(playlist_items))
),
/**
* @description fetch data from the table: "playlists"
*/
playlists: (variables, select) =>
new Field(
"playlists",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlists_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlists))
),
/**
* @description fetch aggregated fields from the table: "playlists"
*/
playlists_aggregate: (variables, select) =>
new Field(
"playlists_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlists_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlists_aggregate))
),
/**
* @description fetch data from the table: "playlists" using primary key columns
*/
playlists_by_pk: (variables, select) =>
new Field(
"playlists_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(playlists))
),
/**
* @description fetch data from the table: "tracks"
*/
tracks: (variables, select) =>
new Field(
"tracks",
[
new Argument(
"distinct_on",
variables.distinct_on,
tracks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(tracks))
),
/**
* @description fetch aggregated fields from the table: "tracks"
*/
tracks_aggregate: (variables, select) =>
new Field(
"tracks_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
tracks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(tracks_aggregate))
),
/**
* @description fetch data from the table: "tracks" using primary key columns
*/
tracks_by_pk: (variables, select) =>
new Field(
"tracks_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(tracks))
),
/**
* @description fetch data from the table: "users"
*/
users: (variables, select) =>
new Field(
"users",
[
new Argument("distinct_on", variables.distinct_on, users_select_column),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(users))
),
/**
* @description fetch aggregated fields from the table: "users"
*/
users_aggregate: (variables, select) =>
new Field(
"users_aggregate",
[
new Argument("distinct_on", variables.distinct_on, users_select_column),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(users_aggregate))
),
/**
* @description fetch data from the table: "users" using primary key columns
*/
users_by_pk: (variables, select) =>
new Field(
"users_by_pk",
[new Argument("id", variables.id)],
new SelectionSet(select(users))
),
};
export interface Itracks {
readonly __typename: "tracks";
readonly createdUtc: unknown;
readonly id: number;
readonly name: string;
readonly napsterId: string | null;
}
interface tracksSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
readonly napsterId: () => Field<"napsterId">;
}
export const tracks: tracksSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
napsterId: () => new Field("napsterId"),
};
export interface Itracks_aggregate {
readonly __typename: "tracks_aggregate";
readonly aggregate: Itracks_aggregate_fields | null;
readonly nodes: ReadonlyArray<Itracks>;
}
interface tracks_aggregateSelector {
readonly __typename: () => Field<"__typename">;
readonly aggregate: <T extends Array<Selection>>(
select: (t: tracks_aggregate_fieldsSelector) => T
) => Field<"aggregate", never, SelectionSet<T>>;
readonly nodes: <T extends Array<Selection>>(
select: (t: tracksSelector) => T
) => Field<"nodes", never, SelectionSet<T>>;
}
export const tracks_aggregate: tracks_aggregateSelector = {
__typename: () => new Field("__typename"),
aggregate: (select) =>
new Field(
"aggregate",
undefined as never,
new SelectionSet(select(tracks_aggregate_fields))
),
nodes: (select) =>
new Field("nodes", undefined as never, new SelectionSet(select(tracks))),
};
export interface Itracks_aggregate_fields {
readonly __typename: "tracks_aggregate_fields";
readonly avg: Itracks_avg_fields | null;
readonly count: number | null;
readonly max: Itracks_max_fields | null;
readonly min: Itracks_min_fields | null;
readonly stddev: Itracks_stddev_fields | null;
readonly stddev_pop: Itracks_stddev_pop_fields | null;
readonly stddev_samp: Itracks_stddev_samp_fields | null;
readonly sum: Itracks_sum_fields | null;
readonly var_pop: Itracks_var_pop_fields | null;
readonly var_samp: Itracks_var_samp_fields | null;
readonly variance: Itracks_variance_fields | null;
}
interface tracks_aggregate_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly avg: <T extends Array<Selection>>(
select: (t: tracks_avg_fieldsSelector) => T
) => Field<"avg", never, SelectionSet<T>>;
readonly count: (variables: {
columns?: Variable<"columns"> | tracks_select_column;
distinct?: Variable<"distinct"> | boolean;
}) => Field<
"count",
[
Argument<"columns", Variable<"columns"> | tracks_select_column>,
Argument<"distinct", Variable<"distinct"> | boolean>
]
>;
readonly max: <T extends Array<Selection>>(
select: (t: tracks_max_fieldsSelector) => T
) => Field<"max", never, SelectionSet<T>>;
readonly min: <T extends Array<Selection>>(
select: (t: tracks_min_fieldsSelector) => T
) => Field<"min", never, SelectionSet<T>>;
readonly stddev: <T extends Array<Selection>>(
select: (t: tracks_stddev_fieldsSelector) => T
) => Field<"stddev", never, SelectionSet<T>>;
readonly stddev_pop: <T extends Array<Selection>>(
select: (t: tracks_stddev_pop_fieldsSelector) => T
) => Field<"stddev_pop", never, SelectionSet<T>>;
readonly stddev_samp: <T extends Array<Selection>>(
select: (t: tracks_stddev_samp_fieldsSelector) => T
) => Field<"stddev_samp", never, SelectionSet<T>>;
readonly sum: <T extends Array<Selection>>(
select: (t: tracks_sum_fieldsSelector) => T
) => Field<"sum", never, SelectionSet<T>>;
readonly var_pop: <T extends Array<Selection>>(
select: (t: tracks_var_pop_fieldsSelector) => T
) => Field<"var_pop", never, SelectionSet<T>>;
readonly var_samp: <T extends Array<Selection>>(
select: (t: tracks_var_samp_fieldsSelector) => T
) => Field<"var_samp", never, SelectionSet<T>>;
readonly variance: <T extends Array<Selection>>(
select: (t: tracks_variance_fieldsSelector) => T
) => Field<"variance", never, SelectionSet<T>>;
}
export const tracks_aggregate_fields: tracks_aggregate_fieldsSelector = {
__typename: () => new Field("__typename"),
avg: (select) =>
new Field(
"avg",
undefined as never,
new SelectionSet(select(tracks_avg_fields))
),
count: (variables) => new Field("count"),
max: (select) =>
new Field(
"max",
undefined as never,
new SelectionSet(select(tracks_max_fields))
),
min: (select) =>
new Field(
"min",
undefined as never,
new SelectionSet(select(tracks_min_fields))
),
stddev: (select) =>
new Field(
"stddev",
undefined as never,
new SelectionSet(select(tracks_stddev_fields))
),
stddev_pop: (select) =>
new Field(
"stddev_pop",
undefined as never,
new SelectionSet(select(tracks_stddev_pop_fields))
),
stddev_samp: (select) =>
new Field(
"stddev_samp",
undefined as never,
new SelectionSet(select(tracks_stddev_samp_fields))
),
sum: (select) =>
new Field(
"sum",
undefined as never,
new SelectionSet(select(tracks_sum_fields))
),
var_pop: (select) =>
new Field(
"var_pop",
undefined as never,
new SelectionSet(select(tracks_var_pop_fields))
),
var_samp: (select) =>
new Field(
"var_samp",
undefined as never,
new SelectionSet(select(tracks_var_samp_fields))
),
variance: (select) =>
new Field(
"variance",
undefined as never,
new SelectionSet(select(tracks_variance_fields))
),
};
export interface Itracks_avg_fields {
readonly __typename: "tracks_avg_fields";
readonly id: number | null;
}
interface tracks_avg_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const tracks_avg_fields: tracks_avg_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Itracks_max_fields {
readonly __typename: "tracks_max_fields";
readonly createdUtc: unknown | null;
readonly id: number | null;
readonly name: string | null;
readonly napsterId: string | null;
}
interface tracks_max_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
readonly napsterId: () => Field<"napsterId">;
}
export const tracks_max_fields: tracks_max_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
napsterId: () => new Field("napsterId"),
};
export interface Itracks_min_fields {
readonly __typename: "tracks_min_fields";
readonly createdUtc: unknown | null;
readonly id: number | null;
readonly name: string | null;
readonly napsterId: string | null;
}
interface tracks_min_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly id: () => Field<"id">;
readonly name: () => Field<"name">;
readonly napsterId: () => Field<"napsterId">;
}
export const tracks_min_fields: tracks_min_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
id: () => new Field("id"),
name: () => new Field("name"),
napsterId: () => new Field("napsterId"),
};
export interface Itracks_mutation_response {
readonly __typename: "tracks_mutation_response";
readonly affected_rows: number;
readonly returning: ReadonlyArray<Itracks>;
}
interface tracks_mutation_responseSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description number of affected rows by the mutation
*/
readonly affected_rows: () => Field<"affected_rows">;
/**
* @description data of the affected rows by the mutation
*/
readonly returning: <T extends Array<Selection>>(
select: (t: tracksSelector) => T
) => Field<"returning", never, SelectionSet<T>>;
}
export const tracks_mutation_response: tracks_mutation_responseSelector = {
__typename: () => new Field("__typename"),
/**
* @description number of affected rows by the mutation
*/
affected_rows: () => new Field("affected_rows"),
/**
* @description data of the affected rows by the mutation
*/
returning: (select) =>
new Field(
"returning",
undefined as never,
new SelectionSet(select(tracks))
),
};
export interface Itracks_stddev_fields {
readonly __typename: "tracks_stddev_fields";
readonly id: number | null;
}
interface tracks_stddev_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const tracks_stddev_fields: tracks_stddev_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Itracks_stddev_pop_fields {
readonly __typename: "tracks_stddev_pop_fields";
readonly id: number | null;
}
interface tracks_stddev_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const tracks_stddev_pop_fields: tracks_stddev_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Itracks_stddev_samp_fields {
readonly __typename: "tracks_stddev_samp_fields";
readonly id: number | null;
}
interface tracks_stddev_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const tracks_stddev_samp_fields: tracks_stddev_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Itracks_sum_fields {
readonly __typename: "tracks_sum_fields";
readonly id: number | null;
}
interface tracks_sum_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const tracks_sum_fields: tracks_sum_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Itracks_var_pop_fields {
readonly __typename: "tracks_var_pop_fields";
readonly id: number | null;
}
interface tracks_var_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const tracks_var_pop_fields: tracks_var_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Itracks_var_samp_fields {
readonly __typename: "tracks_var_samp_fields";
readonly id: number | null;
}
interface tracks_var_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const tracks_var_samp_fields: tracks_var_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Itracks_variance_fields {
readonly __typename: "tracks_variance_fields";
readonly id: number | null;
}
interface tracks_variance_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const tracks_variance_fields: tracks_variance_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Iusers {
readonly __typename: "users";
readonly bookmarks: ReadonlyArray<Ibookmarks>;
readonly bookmarks_aggregate: Ibookmarks_aggregate;
readonly createdUtc: unknown | null;
readonly email: string | null;
readonly id: number;
readonly playlists: ReadonlyArray<Iplaylists>;
readonly playlists_aggregate: Iplaylists_aggregate;
}
interface usersSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description An array relationship
*/
readonly bookmarks: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | bookmarks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | bookmarks_order_by;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarksSelector) => T
) => Field<
"bookmarks",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | bookmarks_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | bookmarks_order_by>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
/**
* @description An aggregated array relationship
*/
readonly bookmarks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | bookmarks_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | bookmarks_order_by;
where?: Variable<"where"> | bookmarks_bool_exp;
},
select: (t: bookmarks_aggregateSelector) => T
) => Field<
"bookmarks_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | bookmarks_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | bookmarks_order_by>,
Argument<"where", Variable<"where"> | bookmarks_bool_exp>
],
SelectionSet<T>
>;
readonly createdUtc: () => Field<"createdUtc">;
readonly email: () => Field<"email">;
readonly id: () => Field<"id">;
/**
* @description An array relationship
*/
readonly playlists: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlists_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlists_order_by;
where?: Variable<"where"> | playlists_bool_exp;
},
select: (t: playlistsSelector) => T
) => Field<
"playlists",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlists_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlists_order_by>,
Argument<"where", Variable<"where"> | playlists_bool_exp>
],
SelectionSet<T>
>;
/**
* @description An aggregated array relationship
*/
readonly playlists_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: Variable<"distinct_on"> | playlists_select_column;
limit?: Variable<"limit"> | number;
offset?: Variable<"offset"> | number;
order_by?: Variable<"order_by"> | playlists_order_by;
where?: Variable<"where"> | playlists_bool_exp;
},
select: (t: playlists_aggregateSelector) => T
) => Field<
"playlists_aggregate",
[
Argument<
"distinct_on",
Variable<"distinct_on"> | playlists_select_column
>,
Argument<"limit", Variable<"limit"> | number>,
Argument<"offset", Variable<"offset"> | number>,
Argument<"order_by", Variable<"order_by"> | playlists_order_by>,
Argument<"where", Variable<"where"> | playlists_bool_exp>
],
SelectionSet<T>
>;
}
export const users: usersSelector = {
__typename: () => new Field("__typename"),
/**
* @description An array relationship
*/
bookmarks: (variables, select) =>
new Field(
"bookmarks",
[
new Argument(
"distinct_on",
variables.distinct_on,
bookmarks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks))
),
/**
* @description An aggregated array relationship
*/
bookmarks_aggregate: (variables, select) =>
new Field(
"bookmarks_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
bookmarks_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(bookmarks_aggregate))
),
createdUtc: () => new Field("createdUtc"),
email: () => new Field("email"),
id: () => new Field("id"),
/**
* @description An array relationship
*/
playlists: (variables, select) =>
new Field(
"playlists",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlists_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlists))
),
/**
* @description An aggregated array relationship
*/
playlists_aggregate: (variables, select) =>
new Field(
"playlists_aggregate",
[
new Argument(
"distinct_on",
variables.distinct_on,
playlists_select_column
),
new Argument("limit", variables.limit),
new Argument("offset", variables.offset),
new Argument("order_by", variables.order_by),
new Argument("where", variables.where),
],
new SelectionSet(select(playlists_aggregate))
),
};
export interface Iusers_aggregate {
readonly __typename: "users_aggregate";
readonly aggregate: Iusers_aggregate_fields | null;
readonly nodes: ReadonlyArray<Iusers>;
}
interface users_aggregateSelector {
readonly __typename: () => Field<"__typename">;
readonly aggregate: <T extends Array<Selection>>(
select: (t: users_aggregate_fieldsSelector) => T
) => Field<"aggregate", never, SelectionSet<T>>;
readonly nodes: <T extends Array<Selection>>(
select: (t: usersSelector) => T
) => Field<"nodes", never, SelectionSet<T>>;
}
export const users_aggregate: users_aggregateSelector = {
__typename: () => new Field("__typename"),
aggregate: (select) =>
new Field(
"aggregate",
undefined as never,
new SelectionSet(select(users_aggregate_fields))
),
nodes: (select) =>
new Field("nodes", undefined as never, new SelectionSet(select(users))),
};
export interface Iusers_aggregate_fields {
readonly __typename: "users_aggregate_fields";
readonly avg: Iusers_avg_fields | null;
readonly count: number | null;
readonly max: Iusers_max_fields | null;
readonly min: Iusers_min_fields | null;
readonly stddev: Iusers_stddev_fields | null;
readonly stddev_pop: Iusers_stddev_pop_fields | null;
readonly stddev_samp: Iusers_stddev_samp_fields | null;
readonly sum: Iusers_sum_fields | null;
readonly var_pop: Iusers_var_pop_fields | null;
readonly var_samp: Iusers_var_samp_fields | null;
readonly variance: Iusers_variance_fields | null;
}
interface users_aggregate_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly avg: <T extends Array<Selection>>(
select: (t: users_avg_fieldsSelector) => T
) => Field<"avg", never, SelectionSet<T>>;
readonly count: (variables: {
columns?: Variable<"columns"> | users_select_column;
distinct?: Variable<"distinct"> | boolean;
}) => Field<
"count",
[
Argument<"columns", Variable<"columns"> | users_select_column>,
Argument<"distinct", Variable<"distinct"> | boolean>
]
>;
readonly max: <T extends Array<Selection>>(
select: (t: users_max_fieldsSelector) => T
) => Field<"max", never, SelectionSet<T>>;
readonly min: <T extends Array<Selection>>(
select: (t: users_min_fieldsSelector) => T
) => Field<"min", never, SelectionSet<T>>;
readonly stddev: <T extends Array<Selection>>(
select: (t: users_stddev_fieldsSelector) => T
) => Field<"stddev", never, SelectionSet<T>>;
readonly stddev_pop: <T extends Array<Selection>>(
select: (t: users_stddev_pop_fieldsSelector) => T
) => Field<"stddev_pop", never, SelectionSet<T>>;
readonly stddev_samp: <T extends Array<Selection>>(
select: (t: users_stddev_samp_fieldsSelector) => T
) => Field<"stddev_samp", never, SelectionSet<T>>;
readonly sum: <T extends Array<Selection>>(
select: (t: users_sum_fieldsSelector) => T
) => Field<"sum", never, SelectionSet<T>>;
readonly var_pop: <T extends Array<Selection>>(
select: (t: users_var_pop_fieldsSelector) => T
) => Field<"var_pop", never, SelectionSet<T>>;
readonly var_samp: <T extends Array<Selection>>(
select: (t: users_var_samp_fieldsSelector) => T
) => Field<"var_samp", never, SelectionSet<T>>;
readonly variance: <T extends Array<Selection>>(
select: (t: users_variance_fieldsSelector) => T
) => Field<"variance", never, SelectionSet<T>>;
}
export const users_aggregate_fields: users_aggregate_fieldsSelector = {
__typename: () => new Field("__typename"),
avg: (select) =>
new Field(
"avg",
undefined as never,
new SelectionSet(select(users_avg_fields))
),
count: (variables) => new Field("count"),
max: (select) =>
new Field(
"max",
undefined as never,
new SelectionSet(select(users_max_fields))
),
min: (select) =>
new Field(
"min",
undefined as never,
new SelectionSet(select(users_min_fields))
),
stddev: (select) =>
new Field(
"stddev",
undefined as never,
new SelectionSet(select(users_stddev_fields))
),
stddev_pop: (select) =>
new Field(
"stddev_pop",
undefined as never,
new SelectionSet(select(users_stddev_pop_fields))
),
stddev_samp: (select) =>
new Field(
"stddev_samp",
undefined as never,
new SelectionSet(select(users_stddev_samp_fields))
),
sum: (select) =>
new Field(
"sum",
undefined as never,
new SelectionSet(select(users_sum_fields))
),
var_pop: (select) =>
new Field(
"var_pop",
undefined as never,
new SelectionSet(select(users_var_pop_fields))
),
var_samp: (select) =>
new Field(
"var_samp",
undefined as never,
new SelectionSet(select(users_var_samp_fields))
),
variance: (select) =>
new Field(
"variance",
undefined as never,
new SelectionSet(select(users_variance_fields))
),
};
export interface Iusers_avg_fields {
readonly __typename: "users_avg_fields";
readonly id: number | null;
}
interface users_avg_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const users_avg_fields: users_avg_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Iusers_max_fields {
readonly __typename: "users_max_fields";
readonly createdUtc: unknown | null;
readonly email: string | null;
readonly id: number | null;
}
interface users_max_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly email: () => Field<"email">;
readonly id: () => Field<"id">;
}
export const users_max_fields: users_max_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
email: () => new Field("email"),
id: () => new Field("id"),
};
export interface Iusers_min_fields {
readonly __typename: "users_min_fields";
readonly createdUtc: unknown | null;
readonly email: string | null;
readonly id: number | null;
}
interface users_min_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly createdUtc: () => Field<"createdUtc">;
readonly email: () => Field<"email">;
readonly id: () => Field<"id">;
}
export const users_min_fields: users_min_fieldsSelector = {
__typename: () => new Field("__typename"),
createdUtc: () => new Field("createdUtc"),
email: () => new Field("email"),
id: () => new Field("id"),
};
export interface Iusers_mutation_response {
readonly __typename: "users_mutation_response";
readonly affected_rows: number;
readonly returning: ReadonlyArray<Iusers>;
}
interface users_mutation_responseSelector {
readonly __typename: () => Field<"__typename">;
/**
* @description number of affected rows by the mutation
*/
readonly affected_rows: () => Field<"affected_rows">;
/**
* @description data of the affected rows by the mutation
*/
readonly returning: <T extends Array<Selection>>(
select: (t: usersSelector) => T
) => Field<"returning", never, SelectionSet<T>>;
}
export const users_mutation_response: users_mutation_responseSelector = {
__typename: () => new Field("__typename"),
/**
* @description number of affected rows by the mutation
*/
affected_rows: () => new Field("affected_rows"),
/**
* @description data of the affected rows by the mutation
*/
returning: (select) =>
new Field("returning", undefined as never, new SelectionSet(select(users))),
};
export interface Iusers_stddev_fields {
readonly __typename: "users_stddev_fields";
readonly id: number | null;
}
interface users_stddev_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const users_stddev_fields: users_stddev_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Iusers_stddev_pop_fields {
readonly __typename: "users_stddev_pop_fields";
readonly id: number | null;
}
interface users_stddev_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const users_stddev_pop_fields: users_stddev_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Iusers_stddev_samp_fields {
readonly __typename: "users_stddev_samp_fields";
readonly id: number | null;
}
interface users_stddev_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const users_stddev_samp_fields: users_stddev_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Iusers_sum_fields {
readonly __typename: "users_sum_fields";
readonly id: number | null;
}
interface users_sum_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const users_sum_fields: users_sum_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Iusers_var_pop_fields {
readonly __typename: "users_var_pop_fields";
readonly id: number | null;
}
interface users_var_pop_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const users_var_pop_fields: users_var_pop_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Iusers_var_samp_fields {
readonly __typename: "users_var_samp_fields";
readonly id: number | null;
}
interface users_var_samp_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const users_var_samp_fields: users_var_samp_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export interface Iusers_variance_fields {
readonly __typename: "users_variance_fields";
readonly id: number | null;
}
interface users_variance_fieldsSelector {
readonly __typename: () => Field<"__typename">;
readonly id: () => Field<"id">;
}
export const users_variance_fields: users_variance_fieldsSelector = {
__typename: () => new Field("__typename"),
id: () => new Field("id"),
};
export const query = <T extends Array<Selection>>(
name: string,
select: (t: typeof query_root) => T
): Operation<SelectionSet<T>> =>
new Operation(name, "query", new SelectionSet(select(query_root)));
export const mutation = <T extends Array<Selection>>(
name: string,
select: (t: typeof mutation_root) => T
): Operation<SelectionSet<T>> =>
new Operation(name, "mutation", new SelectionSet(select(mutation_root)));
export const subscription = <T extends Array<Selection>>(
name: string,
select: (t: typeof subscription_root) => T
): Operation<SelectionSet<T>> =>
new Operation(
name,
"subscription",
new SelectionSet(select(subscription_root))
);
export class Hasura implements Client {
public static readonly VERSION = VERSION;
public static readonly SCHEMA_SHA = SCHEMA_SHA;
constructor(public readonly executor: Executor) {}
public readonly query = {
/**
* @description fetch data from the table: "bookmarks"
*/
bookmarks: <T extends Array<Selection>>(
variables: {
distinct_on?: bookmarks_select_column;
limit?: number;
offset?: number;
order_by?: bookmarks_order_by;
where?: bookmarks_bool_exp;
},
select: (t: bookmarksSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<SelectionSet<[Field<"bookmarks", any, SelectionSet<T>>]>>
>(
new Operation(
"bookmarks",
"query",
new SelectionSet([query_root.bookmarks<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "bookmarks"
*/
bookmarks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: bookmarks_select_column;
limit?: number;
offset?: number;
order_by?: bookmarks_order_by;
where?: bookmarks_bool_exp;
},
select: (t: bookmarks_aggregateSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<
SelectionSet<[Field<"bookmarks_aggregate", any, SelectionSet<T>>]>
>
>(
new Operation(
"bookmarks_aggregate",
"query",
new SelectionSet([
query_root.bookmarks_aggregate<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "bookmarks" using primary key columns
*/
bookmarks_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: bookmarksSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<
SelectionSet<[Field<"bookmarks_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"bookmarks_by_pk",
"query",
new SelectionSet([query_root.bookmarks_by_pk<T>(variables, select)])
)
),
/**
* @description fetch data from the table: "playlist_items"
*/
playlist_items: <T extends Array<Selection>>(
variables: {
distinct_on?: playlist_items_select_column;
limit?: number;
offset?: number;
order_by?: playlist_items_order_by;
where?: playlist_items_bool_exp;
},
select: (t: playlist_itemsSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<SelectionSet<[Field<"playlist_items", any, SelectionSet<T>>]>>
>(
new Operation(
"playlist_items",
"query",
new SelectionSet([query_root.playlist_items<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "playlist_items"
*/
playlist_items_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: playlist_items_select_column;
limit?: number;
offset?: number;
order_by?: playlist_items_order_by;
where?: playlist_items_bool_exp;
},
select: (t: playlist_items_aggregateSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<
SelectionSet<
[Field<"playlist_items_aggregate", any, SelectionSet<T>>]
>
>
>(
new Operation(
"playlist_items_aggregate",
"query",
new SelectionSet([
query_root.playlist_items_aggregate<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "playlist_items" using primary key columns
*/
playlist_items_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: playlist_itemsSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<
SelectionSet<[Field<"playlist_items_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"playlist_items_by_pk",
"query",
new SelectionSet([
query_root.playlist_items_by_pk<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "playlists"
*/
playlists: <T extends Array<Selection>>(
variables: {
distinct_on?: playlists_select_column;
limit?: number;
offset?: number;
order_by?: playlists_order_by;
where?: playlists_bool_exp;
},
select: (t: playlistsSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<SelectionSet<[Field<"playlists", any, SelectionSet<T>>]>>
>(
new Operation(
"playlists",
"query",
new SelectionSet([query_root.playlists<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "playlists"
*/
playlists_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: playlists_select_column;
limit?: number;
offset?: number;
order_by?: playlists_order_by;
where?: playlists_bool_exp;
},
select: (t: playlists_aggregateSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<
SelectionSet<[Field<"playlists_aggregate", any, SelectionSet<T>>]>
>
>(
new Operation(
"playlists_aggregate",
"query",
new SelectionSet([
query_root.playlists_aggregate<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "playlists" using primary key columns
*/
playlists_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: playlistsSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<
SelectionSet<[Field<"playlists_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"playlists_by_pk",
"query",
new SelectionSet([query_root.playlists_by_pk<T>(variables, select)])
)
),
/**
* @description fetch data from the table: "tracks"
*/
tracks: <T extends Array<Selection>>(
variables: {
distinct_on?: tracks_select_column;
limit?: number;
offset?: number;
order_by?: tracks_order_by;
where?: tracks_bool_exp;
},
select: (t: tracksSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<SelectionSet<[Field<"tracks", any, SelectionSet<T>>]>>
>(
new Operation(
"tracks",
"query",
new SelectionSet([query_root.tracks<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "tracks"
*/
tracks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: tracks_select_column;
limit?: number;
offset?: number;
order_by?: tracks_order_by;
where?: tracks_bool_exp;
},
select: (t: tracks_aggregateSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<
SelectionSet<[Field<"tracks_aggregate", any, SelectionSet<T>>]>
>
>(
new Operation(
"tracks_aggregate",
"query",
new SelectionSet([query_root.tracks_aggregate<T>(variables, select)])
)
),
/**
* @description fetch data from the table: "tracks" using primary key columns
*/
tracks_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: tracksSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<SelectionSet<[Field<"tracks_by_pk", any, SelectionSet<T>>]>>
>(
new Operation(
"tracks_by_pk",
"query",
new SelectionSet([query_root.tracks_by_pk<T>(variables, select)])
)
),
/**
* @description fetch data from the table: "users"
*/
users: <T extends Array<Selection>>(
variables: {
distinct_on?: users_select_column;
limit?: number;
offset?: number;
order_by?: users_order_by;
where?: users_bool_exp;
},
select: (t: usersSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<SelectionSet<[Field<"users", any, SelectionSet<T>>]>>
>(
new Operation(
"users",
"query",
new SelectionSet([query_root.users<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "users"
*/
users_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: users_select_column;
limit?: number;
offset?: number;
order_by?: users_order_by;
where?: users_bool_exp;
},
select: (t: users_aggregateSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<
SelectionSet<[Field<"users_aggregate", any, SelectionSet<T>>]>
>
>(
new Operation(
"users_aggregate",
"query",
new SelectionSet([query_root.users_aggregate<T>(variables, select)])
)
),
/**
* @description fetch data from the table: "users" using primary key columns
*/
users_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: usersSelector) => T
) =>
this.executor.execute<
Iquery_root,
Operation<SelectionSet<[Field<"users_by_pk", any, SelectionSet<T>>]>>
>(
new Operation(
"users_by_pk",
"query",
new SelectionSet([query_root.users_by_pk<T>(variables, select)])
)
),
};
public readonly mutate = {
/**
* @description delete data from the table: "bookmarks"
*/
delete_bookmarks: <T extends Array<Selection>>(
variables: { where?: bookmarks_bool_exp },
select: (t: bookmarks_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"delete_bookmarks", any, SelectionSet<T>>]>
>
>(
new Operation(
"delete_bookmarks",
"mutation",
new SelectionSet([
mutation_root.delete_bookmarks<T>(variables, select),
])
)
),
/**
* @description delete single row from the table: "bookmarks"
*/
delete_bookmarks_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: bookmarksSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"delete_bookmarks_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"delete_bookmarks_by_pk",
"mutation",
new SelectionSet([
mutation_root.delete_bookmarks_by_pk<T>(variables, select),
])
)
),
/**
* @description delete data from the table: "playlist_items"
*/
delete_playlist_items: <T extends Array<Selection>>(
variables: { where?: playlist_items_bool_exp },
select: (t: playlist_items_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"delete_playlist_items", any, SelectionSet<T>>]>
>
>(
new Operation(
"delete_playlist_items",
"mutation",
new SelectionSet([
mutation_root.delete_playlist_items<T>(variables, select),
])
)
),
/**
* @description delete single row from the table: "playlist_items"
*/
delete_playlist_items_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: playlist_itemsSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<
[Field<"delete_playlist_items_by_pk", any, SelectionSet<T>>]
>
>
>(
new Operation(
"delete_playlist_items_by_pk",
"mutation",
new SelectionSet([
mutation_root.delete_playlist_items_by_pk<T>(variables, select),
])
)
),
/**
* @description delete data from the table: "playlists"
*/
delete_playlists: <T extends Array<Selection>>(
variables: { where?: playlists_bool_exp },
select: (t: playlists_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"delete_playlists", any, SelectionSet<T>>]>
>
>(
new Operation(
"delete_playlists",
"mutation",
new SelectionSet([
mutation_root.delete_playlists<T>(variables, select),
])
)
),
/**
* @description delete single row from the table: "playlists"
*/
delete_playlists_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: playlistsSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"delete_playlists_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"delete_playlists_by_pk",
"mutation",
new SelectionSet([
mutation_root.delete_playlists_by_pk<T>(variables, select),
])
)
),
/**
* @description delete data from the table: "tracks"
*/
delete_tracks: <T extends Array<Selection>>(
variables: { where?: tracks_bool_exp },
select: (t: tracks_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<SelectionSet<[Field<"delete_tracks", any, SelectionSet<T>>]>>
>(
new Operation(
"delete_tracks",
"mutation",
new SelectionSet([mutation_root.delete_tracks<T>(variables, select)])
)
),
/**
* @description delete single row from the table: "tracks"
*/
delete_tracks_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: tracksSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"delete_tracks_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"delete_tracks_by_pk",
"mutation",
new SelectionSet([
mutation_root.delete_tracks_by_pk<T>(variables, select),
])
)
),
/**
* @description delete data from the table: "users"
*/
delete_users: <T extends Array<Selection>>(
variables: { where?: users_bool_exp },
select: (t: users_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<SelectionSet<[Field<"delete_users", any, SelectionSet<T>>]>>
>(
new Operation(
"delete_users",
"mutation",
new SelectionSet([mutation_root.delete_users<T>(variables, select)])
)
),
/**
* @description delete single row from the table: "users"
*/
delete_users_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: usersSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"delete_users_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"delete_users_by_pk",
"mutation",
new SelectionSet([
mutation_root.delete_users_by_pk<T>(variables, select),
])
)
),
/**
* @description insert data into the table: "bookmarks"
*/
insert_bookmarks: <T extends Array<Selection>>(
variables: {
objects?: bookmarks_insert_input;
on_conflict?: bookmarks_on_conflict;
},
select: (t: bookmarks_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"insert_bookmarks", any, SelectionSet<T>>]>
>
>(
new Operation(
"insert_bookmarks",
"mutation",
new SelectionSet([
mutation_root.insert_bookmarks<T>(variables, select),
])
)
),
/**
* @description insert a single row into the table: "bookmarks"
*/
insert_bookmarks_one: <T extends Array<Selection>>(
variables: {
object?: bookmarks_insert_input;
on_conflict?: bookmarks_on_conflict;
},
select: (t: bookmarksSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"insert_bookmarks_one", any, SelectionSet<T>>]>
>
>(
new Operation(
"insert_bookmarks_one",
"mutation",
new SelectionSet([
mutation_root.insert_bookmarks_one<T>(variables, select),
])
)
),
/**
* @description insert data into the table: "playlist_items"
*/
insert_playlist_items: <T extends Array<Selection>>(
variables: {
objects?: playlist_items_insert_input;
on_conflict?: playlist_items_on_conflict;
},
select: (t: playlist_items_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"insert_playlist_items", any, SelectionSet<T>>]>
>
>(
new Operation(
"insert_playlist_items",
"mutation",
new SelectionSet([
mutation_root.insert_playlist_items<T>(variables, select),
])
)
),
/**
* @description insert a single row into the table: "playlist_items"
*/
insert_playlist_items_one: <T extends Array<Selection>>(
variables: {
object?: playlist_items_insert_input;
on_conflict?: playlist_items_on_conflict;
},
select: (t: playlist_itemsSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<
[Field<"insert_playlist_items_one", any, SelectionSet<T>>]
>
>
>(
new Operation(
"insert_playlist_items_one",
"mutation",
new SelectionSet([
mutation_root.insert_playlist_items_one<T>(variables, select),
])
)
),
/**
* @description insert data into the table: "playlists"
*/
insert_playlists: <T extends Array<Selection>>(
variables: {
objects?: playlists_insert_input;
on_conflict?: playlists_on_conflict;
},
select: (t: playlists_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"insert_playlists", any, SelectionSet<T>>]>
>
>(
new Operation(
"insert_playlists",
"mutation",
new SelectionSet([
mutation_root.insert_playlists<T>(variables, select),
])
)
),
/**
* @description insert a single row into the table: "playlists"
*/
insert_playlists_one: <T extends Array<Selection>>(
variables: {
object?: playlists_insert_input;
on_conflict?: playlists_on_conflict;
},
select: (t: playlistsSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"insert_playlists_one", any, SelectionSet<T>>]>
>
>(
new Operation(
"insert_playlists_one",
"mutation",
new SelectionSet([
mutation_root.insert_playlists_one<T>(variables, select),
])
)
),
/**
* @description insert data into the table: "tracks"
*/
insert_tracks: <T extends Array<Selection>>(
variables: {
objects?: tracks_insert_input;
on_conflict?: tracks_on_conflict;
},
select: (t: tracks_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<SelectionSet<[Field<"insert_tracks", any, SelectionSet<T>>]>>
>(
new Operation(
"insert_tracks",
"mutation",
new SelectionSet([mutation_root.insert_tracks<T>(variables, select)])
)
),
/**
* @description insert a single row into the table: "tracks"
*/
insert_tracks_one: <T extends Array<Selection>>(
variables: {
object?: tracks_insert_input;
on_conflict?: tracks_on_conflict;
},
select: (t: tracksSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"insert_tracks_one", any, SelectionSet<T>>]>
>
>(
new Operation(
"insert_tracks_one",
"mutation",
new SelectionSet([
mutation_root.insert_tracks_one<T>(variables, select),
])
)
),
/**
* @description insert data into the table: "users"
*/
insert_users: <T extends Array<Selection>>(
variables: {
objects?: users_insert_input;
on_conflict?: users_on_conflict;
},
select: (t: users_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<SelectionSet<[Field<"insert_users", any, SelectionSet<T>>]>>
>(
new Operation(
"insert_users",
"mutation",
new SelectionSet([mutation_root.insert_users<T>(variables, select)])
)
),
/**
* @description insert a single row into the table: "users"
*/
insert_users_one: <T extends Array<Selection>>(
variables: {
object?: users_insert_input;
on_conflict?: users_on_conflict;
},
select: (t: usersSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"insert_users_one", any, SelectionSet<T>>]>
>
>(
new Operation(
"insert_users_one",
"mutation",
new SelectionSet([
mutation_root.insert_users_one<T>(variables, select),
])
)
),
/**
* @description update data of the table: "bookmarks"
*/
update_bookmarks: <T extends Array<Selection>>(
variables: {
_inc?: bookmarks_inc_input;
_set?: bookmarks_set_input;
where?: bookmarks_bool_exp;
},
select: (t: bookmarks_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"update_bookmarks", any, SelectionSet<T>>]>
>
>(
new Operation(
"update_bookmarks",
"mutation",
new SelectionSet([
mutation_root.update_bookmarks<T>(variables, select),
])
)
),
/**
* @description update single row of the table: "bookmarks"
*/
update_bookmarks_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: bookmarks_inc_input;
_set?: bookmarks_set_input;
pk_columns?: bookmarks_pk_columns_input;
},
select: (t: bookmarksSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"update_bookmarks_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"update_bookmarks_by_pk",
"mutation",
new SelectionSet([
mutation_root.update_bookmarks_by_pk<T>(variables, select),
])
)
),
/**
* @description update data of the table: "playlist_items"
*/
update_playlist_items: <T extends Array<Selection>>(
variables: {
_inc?: playlist_items_inc_input;
_set?: playlist_items_set_input;
where?: playlist_items_bool_exp;
},
select: (t: playlist_items_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"update_playlist_items", any, SelectionSet<T>>]>
>
>(
new Operation(
"update_playlist_items",
"mutation",
new SelectionSet([
mutation_root.update_playlist_items<T>(variables, select),
])
)
),
/**
* @description update single row of the table: "playlist_items"
*/
update_playlist_items_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: playlist_items_inc_input;
_set?: playlist_items_set_input;
pk_columns?: playlist_items_pk_columns_input;
},
select: (t: playlist_itemsSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<
[Field<"update_playlist_items_by_pk", any, SelectionSet<T>>]
>
>
>(
new Operation(
"update_playlist_items_by_pk",
"mutation",
new SelectionSet([
mutation_root.update_playlist_items_by_pk<T>(variables, select),
])
)
),
/**
* @description update data of the table: "playlists"
*/
update_playlists: <T extends Array<Selection>>(
variables: {
_inc?: playlists_inc_input;
_set?: playlists_set_input;
where?: playlists_bool_exp;
},
select: (t: playlists_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"update_playlists", any, SelectionSet<T>>]>
>
>(
new Operation(
"update_playlists",
"mutation",
new SelectionSet([
mutation_root.update_playlists<T>(variables, select),
])
)
),
/**
* @description update single row of the table: "playlists"
*/
update_playlists_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: playlists_inc_input;
_set?: playlists_set_input;
pk_columns?: playlists_pk_columns_input;
},
select: (t: playlistsSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"update_playlists_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"update_playlists_by_pk",
"mutation",
new SelectionSet([
mutation_root.update_playlists_by_pk<T>(variables, select),
])
)
),
/**
* @description update data of the table: "tracks"
*/
update_tracks: <T extends Array<Selection>>(
variables: {
_inc?: tracks_inc_input;
_set?: tracks_set_input;
where?: tracks_bool_exp;
},
select: (t: tracks_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<SelectionSet<[Field<"update_tracks", any, SelectionSet<T>>]>>
>(
new Operation(
"update_tracks",
"mutation",
new SelectionSet([mutation_root.update_tracks<T>(variables, select)])
)
),
/**
* @description update single row of the table: "tracks"
*/
update_tracks_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: tracks_inc_input;
_set?: tracks_set_input;
pk_columns?: tracks_pk_columns_input;
},
select: (t: tracksSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"update_tracks_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"update_tracks_by_pk",
"mutation",
new SelectionSet([
mutation_root.update_tracks_by_pk<T>(variables, select),
])
)
),
/**
* @description update data of the table: "users"
*/
update_users: <T extends Array<Selection>>(
variables: {
_inc?: users_inc_input;
_set?: users_set_input;
where?: users_bool_exp;
},
select: (t: users_mutation_responseSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<SelectionSet<[Field<"update_users", any, SelectionSet<T>>]>>
>(
new Operation(
"update_users",
"mutation",
new SelectionSet([mutation_root.update_users<T>(variables, select)])
)
),
/**
* @description update single row of the table: "users"
*/
update_users_by_pk: <T extends Array<Selection>>(
variables: {
_inc?: users_inc_input;
_set?: users_set_input;
pk_columns?: users_pk_columns_input;
},
select: (t: usersSelector) => T
) =>
this.executor.execute<
Imutation_root,
Operation<
SelectionSet<[Field<"update_users_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"update_users_by_pk",
"mutation",
new SelectionSet([
mutation_root.update_users_by_pk<T>(variables, select),
])
)
),
};
public readonly subscribe = {
/**
* @description fetch data from the table: "bookmarks"
*/
bookmarks: <T extends Array<Selection>>(
variables: {
distinct_on?: bookmarks_select_column;
limit?: number;
offset?: number;
order_by?: bookmarks_order_by;
where?: bookmarks_bool_exp;
},
select: (t: bookmarksSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<SelectionSet<[Field<"bookmarks", any, SelectionSet<T>>]>>
>(
new Operation(
"bookmarks",
"subscription",
new SelectionSet([subscription_root.bookmarks<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "bookmarks"
*/
bookmarks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: bookmarks_select_column;
limit?: number;
offset?: number;
order_by?: bookmarks_order_by;
where?: bookmarks_bool_exp;
},
select: (t: bookmarks_aggregateSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<
SelectionSet<[Field<"bookmarks_aggregate", any, SelectionSet<T>>]>
>
>(
new Operation(
"bookmarks_aggregate",
"subscription",
new SelectionSet([
subscription_root.bookmarks_aggregate<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "bookmarks" using primary key columns
*/
bookmarks_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: bookmarksSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<
SelectionSet<[Field<"bookmarks_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"bookmarks_by_pk",
"subscription",
new SelectionSet([
subscription_root.bookmarks_by_pk<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "playlist_items"
*/
playlist_items: <T extends Array<Selection>>(
variables: {
distinct_on?: playlist_items_select_column;
limit?: number;
offset?: number;
order_by?: playlist_items_order_by;
where?: playlist_items_bool_exp;
},
select: (t: playlist_itemsSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<SelectionSet<[Field<"playlist_items", any, SelectionSet<T>>]>>
>(
new Operation(
"playlist_items",
"subscription",
new SelectionSet([
subscription_root.playlist_items<T>(variables, select),
])
)
),
/**
* @description fetch aggregated fields from the table: "playlist_items"
*/
playlist_items_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: playlist_items_select_column;
limit?: number;
offset?: number;
order_by?: playlist_items_order_by;
where?: playlist_items_bool_exp;
},
select: (t: playlist_items_aggregateSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<
SelectionSet<
[Field<"playlist_items_aggregate", any, SelectionSet<T>>]
>
>
>(
new Operation(
"playlist_items_aggregate",
"subscription",
new SelectionSet([
subscription_root.playlist_items_aggregate<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "playlist_items" using primary key columns
*/
playlist_items_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: playlist_itemsSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<
SelectionSet<[Field<"playlist_items_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"playlist_items_by_pk",
"subscription",
new SelectionSet([
subscription_root.playlist_items_by_pk<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "playlists"
*/
playlists: <T extends Array<Selection>>(
variables: {
distinct_on?: playlists_select_column;
limit?: number;
offset?: number;
order_by?: playlists_order_by;
where?: playlists_bool_exp;
},
select: (t: playlistsSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<SelectionSet<[Field<"playlists", any, SelectionSet<T>>]>>
>(
new Operation(
"playlists",
"subscription",
new SelectionSet([subscription_root.playlists<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "playlists"
*/
playlists_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: playlists_select_column;
limit?: number;
offset?: number;
order_by?: playlists_order_by;
where?: playlists_bool_exp;
},
select: (t: playlists_aggregateSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<
SelectionSet<[Field<"playlists_aggregate", any, SelectionSet<T>>]>
>
>(
new Operation(
"playlists_aggregate",
"subscription",
new SelectionSet([
subscription_root.playlists_aggregate<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "playlists" using primary key columns
*/
playlists_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: playlistsSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<
SelectionSet<[Field<"playlists_by_pk", any, SelectionSet<T>>]>
>
>(
new Operation(
"playlists_by_pk",
"subscription",
new SelectionSet([
subscription_root.playlists_by_pk<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "tracks"
*/
tracks: <T extends Array<Selection>>(
variables: {
distinct_on?: tracks_select_column;
limit?: number;
offset?: number;
order_by?: tracks_order_by;
where?: tracks_bool_exp;
},
select: (t: tracksSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<SelectionSet<[Field<"tracks", any, SelectionSet<T>>]>>
>(
new Operation(
"tracks",
"subscription",
new SelectionSet([subscription_root.tracks<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "tracks"
*/
tracks_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: tracks_select_column;
limit?: number;
offset?: number;
order_by?: tracks_order_by;
where?: tracks_bool_exp;
},
select: (t: tracks_aggregateSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<
SelectionSet<[Field<"tracks_aggregate", any, SelectionSet<T>>]>
>
>(
new Operation(
"tracks_aggregate",
"subscription",
new SelectionSet([
subscription_root.tracks_aggregate<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "tracks" using primary key columns
*/
tracks_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: tracksSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<SelectionSet<[Field<"tracks_by_pk", any, SelectionSet<T>>]>>
>(
new Operation(
"tracks_by_pk",
"subscription",
new SelectionSet([
subscription_root.tracks_by_pk<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "users"
*/
users: <T extends Array<Selection>>(
variables: {
distinct_on?: users_select_column;
limit?: number;
offset?: number;
order_by?: users_order_by;
where?: users_bool_exp;
},
select: (t: usersSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<SelectionSet<[Field<"users", any, SelectionSet<T>>]>>
>(
new Operation(
"users",
"subscription",
new SelectionSet([subscription_root.users<T>(variables, select)])
)
),
/**
* @description fetch aggregated fields from the table: "users"
*/
users_aggregate: <T extends Array<Selection>>(
variables: {
distinct_on?: users_select_column;
limit?: number;
offset?: number;
order_by?: users_order_by;
where?: users_bool_exp;
},
select: (t: users_aggregateSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<
SelectionSet<[Field<"users_aggregate", any, SelectionSet<T>>]>
>
>(
new Operation(
"users_aggregate",
"subscription",
new SelectionSet([
subscription_root.users_aggregate<T>(variables, select),
])
)
),
/**
* @description fetch data from the table: "users" using primary key columns
*/
users_by_pk: <T extends Array<Selection>>(
variables: { id?: number },
select: (t: usersSelector) => T
) =>
this.executor.execute<
Isubscription_root,
Operation<SelectionSet<[Field<"users_by_pk", any, SelectionSet<T>>]>>
>(
new Operation(
"users_by_pk",
"subscription",
new SelectionSet([
subscription_root.users_by_pk<T>(variables, select),
])
)
),
};
} | the_stack |
import Dimensions = Utils.Measurements.Dimensions;
import DisplayObject = etch.drawing.DisplayObject;
import IDisplayContext = etch.drawing.IDisplayContext;
import Size = minerva.Size;
import {Device} from '../Device';
import {Commands} from './../Commands';
import {IApp} from '../IApp';
import {Recorder} from './../Blocks/Sources/Recorder';
declare var App: IApp;
export class SharePanel extends DisplayObject {
public Open: boolean;
public OffsetX: number;
public OffsetY: number;
private _FirstSession: boolean;
private _CopyJson;
public SessionTitle: string;
public SessionURL: string;
private _NameUrl: string;
private _UrlSelecting: boolean;
private _RollOvers: boolean[];
private _CommandManager;
private _Blink:number = 0;
private _SessionId: string;
private _Saving: boolean;
private _Warning: boolean;
private _NoBlocks: boolean;
public TitleInputContainer: HTMLElement;
public URLInputContainer: HTMLElement;
public TitleInput: HTMLInputElement;
public URLInput: HTMLInputElement;
Init(drawTo: IDisplayContext): void {
super.Init(drawTo);
this.Open = false;
this._FirstSession = true;
this._Saving = false;
this._Warning = false;
this._NoBlocks = true;
this._NameUrl = "";
this.SessionURL = "";
this._SessionId = "";
this.OffsetX = 0;
this.OffsetY = -this.DrawTo.Height;
this._UrlSelecting = false;
this._RollOvers = [];
// DOM ELEMENTS //
this.TitleInputContainer = <HTMLElement>document.getElementById("shareTitle");
this.URLInputContainer = <HTMLElement>document.getElementById("shareUrl");
this.TitleInput = <HTMLInputElement>document.getElementById("shareTitleInput");
this.URLInput = <HTMLInputElement>document.getElementById("shareUrlText");
this.TitleInput.addEventListener(
'input',
(event):void => {
this.TestString(this.TitleInput);
this.UpdateString(this.TitleInput);
}
);
this.TitleInput.addEventListener(
'keydown',
(event):void => {
this.EnterCheck(event);
}
);
this._CommandManager = App.CommandManager;
// todo: add to config
this._CopyJson = {
genUrl: "Generate share link",
shareLine: "Made something cool? Generate your own unique link to share it with the world (we'd love to see):",
copyLine: "Share your creation with this unique URL:",
titleLine: "Title",
generateLine: "Randomise Title",
domain: this.GetUrl() + "?c=",
facebook: "post to facebook",
twitter: "post to twitter",
subreddit: "post to BD subreddit",
bookmark: "bookmark creation",
save: "overwrite",
saveAs: "create new",
saving: "saving...",
tweetText: "I made this @blokdust creation: "
};
if (App.SessionId) {
this._FirstSession = false;
}
if (App.CompositionId) {
this._SessionId = App.CompositionId;
}
this.GetTitleFromUrl();
this.Resize();
}
// GET OUR START DOMAIN (localhost / blokdust.com) //
GetUrl() {
return [location.protocol, '//', location.host, location.pathname].join('');
}
//-------------------------------------------------------------------------------------------
// INPUT
//-------------------------------------------------------------------------------------------
// IF WE'VE LOADED A NEW COMP, SET THE TITLE FROM THE URL STRING //
GetTitleFromUrl() {
var decoded = decodeURI(window.location.href);
var getName = decoded.split("&t=");
// Set title from Url //
if (getName.length>1) {
this.UpdateFormText(this.TitleInput, getName[1]);
this.TestString(this.TitleInput);
this.UpdateString(this.TitleInput);
}
// Generate random title //
else {
this.SessionTitle = this.GenerateLabel();
}
}
// DOES INPUT STRING NEED CHARS REMOVED //
TestString(element: HTMLInputElement) {
var caretPos = element.selectionStart;
if (caretPos > 0) {
caretPos -= 1;
}
// [^A-Za-z0-9_] alpha-numeric
// [][!"#$%&'()*+,./:;<=>?@\^_`{|}~-] punctuation
// [.,\/#!$%\^&\*;:{}=\-_`~()] punctuation 2
if (/[.,\/#\?\"\'$£%\^&\*;:{|}<=>\\@\`\+~()]/.test(element.value)) {
element.value = element.value.replace(/[.,\/#\?\"\'$£%\^&\*;:{|}<=>\\@\`\+~()]/g, '');
element.selectionStart = caretPos;
element.selectionEnd = caretPos;
}
}
// TITLE INPUT HAS CHANGED, USE UPDATED INPUT VALUE //
UpdateString(element: HTMLInputElement) {
var string: string = element.value;
this.SessionTitle = string;
this.SetNameUrl(string);
this.UpdateUrlText();
}
// COMBINE DOMAIN, COMP ID & TITLE, AND UPDATE URL INPUT & ADDRESS BAR //
UpdateUrlText() {
this.SessionURL = "" + this._CopyJson.domain + this._SessionId + this._NameUrl;
this.UpdateFormText(this.URLInput,this.SessionURL);
if (this._SessionId) {
App.AddressBarManager.UpdateURL(this.SessionURL);
}
}
// FORMAT THE TITLE FOR USE IN THE URL //
SetNameUrl(string: string) {
this._NameUrl = "&t=" + encodeURI(this.Capitalise(string));
}
// SET A PROVIDED DOM ELEMENT'S STRING //
UpdateFormText(element: HTMLInputElement, str: string) {
element.value = str;
}
// ENTER PRESSED ON INPUT //
EnterCheck(e: any) {
var key = e.which || e.keyCode;
if (key === 13) {
this.Submit();
}
}
Submit() {
this.TitleInput.blur();
this.ClearScroll();
}
//-------------------------------------------------------------------------------------------
// SAVE CHECK
//-------------------------------------------------------------------------------------------
// CHECK IF BLOCKS EXIST BEFORE ALLOWING SAVE/SHARE //
GetWarning() {
var warnBlocks = [];
for (var i = 0; i < App.Blocks.length; i++) {
var block:any = App.Blocks[i];
if (block instanceof Recorder) {
warnBlocks.push(block);
}
}
this._Warning = (warnBlocks.length > 0);
this._NoBlocks = (!App.Blocks.length);
}
//-------------------------------------------------------------------------------------------
// DRAW
//-------------------------------------------------------------------------------------------
Draw() {
var ctx = this.Ctx;
var midType = App.Metrics.TxtMid;
var headType = App.Metrics.TxtHeader;
var urlType = App.Metrics.TxtUrl2;
var italicType = App.Metrics.TxtItalic2;
var units = App.Unit;
var centerY = this.OffsetY + (App.Height * 0.5);
var shareX = this.OffsetX + (App.Width*1.5);
var buttonY = centerY + (35*units);
var appWidth = App.Width;
var appHeight = App.Height;
if (this.Open) {
// BG //
App.FillColor(ctx,App.Palette[2]);
ctx.globalAlpha = 0.95;
ctx.fillRect(0,this.OffsetY,appWidth,appHeight);
// URL BOX //
ctx.globalAlpha = 1;
App.FillColor(ctx,App.Palette[1]);
ctx.fillRect(shareX + (appWidth*0.5) - (210*units),centerY - (20*units),420*units,40*units); // solid
var arrowX = 275;
var arrowY = 0;
if (App.Metrics.Device === Device.tablet) {
arrowX = 245;
}
if (App.Metrics.Device === Device.mobile) {
arrowX = 190;
arrowY = 110;
}
if (this._FirstSession) {
// GENERATE URL //
if (this._Saving || this._NoBlocks) {
App.FillColor(ctx,App.Palette[1]);
} else {
App.FillColor(ctx,App.Palette[App.ThemeManager.MenuOrder[3]]);
}
ctx.fillRect(this.OffsetX + (appWidth * 0.5) - (210 * units), centerY - (20 * units), 420 * units, 40 * units);
if (this._RollOvers[3] && !this._Saving) {
ctx.beginPath();
ctx.moveTo(this.OffsetX + (appWidth*0.5), centerY + (29*units));
ctx.lineTo(this.OffsetX + (appWidth*0.5) - (10*units), centerY + (19*units));
ctx.lineTo(this.OffsetX + (appWidth*0.5) + (10*units), centerY + (19*units));
ctx.closePath();
ctx.fill();
}
ctx.font = urlType;
ctx.textAlign = "center";
App.FillColor(ctx,App.Palette[App.ThemeManager.Txt]);
ctx.fillText(this._CopyJson.genUrl.toUpperCase(), this.OffsetX + (appWidth * 0.5), centerY + (9 * units));
ctx.font = italicType;
ctx.textAlign = "left";
this.WordWrap(ctx, this._CopyJson.shareLine, this.OffsetX + (appWidth * 0.5) - (210 * units), centerY - (59 * units), 14 * units, 210 * units);
} else {
// SAVE & SAVE AS //
if (this._Saving || this._NoBlocks) {
App.FillColor(ctx,App.Palette[1]);
} else {
App.FillColor(ctx,App.Palette[App.ThemeManager.MenuOrder[3]]);
}
ctx.fillRect(this.OffsetX + (appWidth * 0.5) - (210 * units), centerY - (20 * units), 202.5 * units, 40 * units);
if (this._RollOvers[4] && !this._Saving) {
ctx.beginPath();
ctx.moveTo(this.OffsetX + (appWidth*0.5) - (108.75*units), centerY + (29*units));
ctx.lineTo(this.OffsetX + (appWidth*0.5) - (118.75*units), centerY + (19*units));
ctx.lineTo(this.OffsetX + (appWidth*0.5) - (98.75*units), centerY + (19*units));
ctx.closePath();
ctx.fill();
}
if (this._Saving || this._NoBlocks) {
App.FillColor(ctx,App.Palette[1]);
} else {
App.FillColor(ctx,App.Palette[App.ThemeManager.MenuOrder[1]]);
}
ctx.fillRect(this.OffsetX + (appWidth * 0.5) + (7.5 * units), centerY - (20 * units), 202.5 * units, 40 * units);
if (this._RollOvers[5] && !this._Saving) {
ctx.beginPath();
ctx.moveTo(this.OffsetX + (appWidth*0.5) + (108.75*units), centerY + (29*units));
ctx.lineTo(this.OffsetX + (appWidth*0.5) + (118.75*units), centerY + (19*units));
ctx.lineTo(this.OffsetX + (appWidth*0.5) + (98.75*units), centerY + (19*units));
ctx.closePath();
ctx.fill();
}
ctx.font = urlType;
ctx.textAlign = "center";
App.FillColor(ctx,App.Palette[App.ThemeManager.Txt]);
ctx.fillText(this._CopyJson.save.toUpperCase(), this.OffsetX + (appWidth * 0.5) - (108.75 * units), centerY + (9 * units));
ctx.fillText(this._CopyJson.saveAs.toUpperCase(), this.OffsetX + (appWidth * 0.5) + (108.75 * units), centerY + (9 * units));
ctx.font = italicType;
ctx.textAlign = "left";
this.WordWrap(ctx, this._CopyJson.shareLine, this.OffsetX + (appWidth * 0.5) - (210 * units), centerY - (59 * units), 14 * units, 210 * units);
// SKIP //
ctx.lineWidth = 2;
App.StrokeColor(ctx,App.Palette[App.ThemeManager.Txt]);
ctx.beginPath();
ctx.moveTo( this.OffsetX + (appWidth*0.5) + (arrowX * units), centerY + ((arrowY-20)*units));
ctx.lineTo( this.OffsetX + (appWidth*0.5) + ((arrowX+20) * units), centerY + (arrowY*units));
ctx.lineTo( this.OffsetX + (appWidth*0.5) + (arrowX * units), centerY + ((arrowY+20)*units));
ctx.stroke();
ctx.font = midType;
ctx.fillText("SKIP", this.OffsetX + (appWidth*0.5) + (arrowX * units), centerY + ((arrowY+35)*units));
}
// SAVE MESSAGE //
if (this._Saving) {
ctx.font = midType;
ctx.textAlign = "center";
ctx.fillText(this._CopyJson.saving.toUpperCase(), this.OffsetX + (appWidth * 0.5), centerY + (75 * units));
App.AnimationsLayer.DrawSprite(ctx,'loading',appWidth*0.5, centerY + (50 * units),16,true);
} else {
// WARNING MESSAGE //
if (this._Warning) {
ctx.font = italicType;
ctx.textAlign = "left";
this.WordWrap(ctx, App.L10n.UI.SharePanel.SaveWarning, this.OffsetX + (appWidth * 0.5) - (210 * units), centerY + (75 * units), 14 * units, 420 * units);
}
if (this._NoBlocks) {
ctx.font = italicType;
ctx.textAlign = "left";
this.WordWrap(ctx, App.L10n.UI.SharePanel.NoBlocks, this.OffsetX + (appWidth * 0.5) - (210 * units), centerY + (75 * units), 14 * units, 420 * units);
}
}
// BACK ARROW //
ctx.lineWidth = 2;
App.StrokeColor(ctx,App.Palette[App.ThemeManager.Txt]);
ctx.beginPath();
ctx.moveTo(shareX + (appWidth*0.5) - (arrowX * units), centerY + ((arrowY-20)*units));
ctx.lineTo(shareX + (appWidth*0.5) - ((arrowX+20) * units), centerY + (arrowY*units));
ctx.lineTo(shareX + (appWidth*0.5) - (arrowX * units), centerY + ((arrowY+20)*units));
ctx.stroke();
// SHARE BUTTONS //
ctx.fillStyle = "#fc4742";// gp //TODO: Store these share colours somewhere
ctx.fillRect(shareX + (appWidth*0.5) + (80*units),buttonY,130*units,30*units);
if (this._RollOvers[8]) {
ctx.beginPath();
ctx.moveTo(shareX + (appWidth*0.5) + (145*units), buttonY + (39*units));
ctx.lineTo(shareX + (appWidth*0.5) + (135*units), buttonY + (29*units));
ctx.lineTo(shareX + (appWidth*0.5) + (155*units), buttonY + (29*units));
ctx.closePath();
ctx.fill();
}
ctx.fillStyle = "#2db0e7"; // tw
ctx.fillRect(shareX + (appWidth*0.5) - (65*units),buttonY,130*units,30*units);
if (this._RollOvers[7]) {
ctx.beginPath();
ctx.moveTo(shareX + (appWidth*0.5), buttonY + (39*units));
ctx.lineTo(shareX + (appWidth*0.5) - (10*units), buttonY + (29*units));
ctx.lineTo(shareX + (appWidth*0.5) + (10*units), buttonY + (29*units));
ctx.closePath();
ctx.fill();
}
ctx.fillStyle = "#2152ad"; // fb
ctx.fillRect(shareX + (appWidth*0.5) - (210*units),buttonY,130*units,30*units);
if (this._RollOvers[6]) {
ctx.beginPath();
ctx.moveTo(shareX + (appWidth*0.5) - (145*units), buttonY + (39*units));
ctx.lineTo(shareX + (appWidth*0.5) - (135*units), buttonY + (29*units));
ctx.lineTo(shareX + (appWidth*0.5) - (155*units), buttonY + (29*units));
ctx.closePath();
ctx.fill();
}
// SHARE COPY //
App.FillColor(ctx,App.Palette[App.ThemeManager.Txt]);
App.StrokeColor(ctx,App.Palette[App.ThemeManager.Txt]);
ctx.textAlign = "left";
ctx.font = midType;
ctx.font = italicType;
ctx.fillText(this._CopyJson.copyLine, shareX + (appWidth*0.5) - (210*units), centerY - (33*units) );
ctx.textAlign = "center";
ctx.font = midType;
ctx.fillText(this._CopyJson.facebook.toUpperCase(), shareX + (appWidth*0.5) - (145*units), buttonY + (18.5*units) );
ctx.fillText(this._CopyJson.twitter.toUpperCase(), shareX + (appWidth*0.5), buttonY + (18.5*units) );
ctx.fillText(this._CopyJson.subreddit.toUpperCase(), shareX + (appWidth*0.5) + (145*units), buttonY + (18.5*units) );
// TITLE //
if (App.Metrics.Device === Device.mobile) {
ctx.textAlign = "left";
ctx.fillText(this._CopyJson.titleLine.toUpperCase(), (appWidth*0.5) - (210*units), centerY - (150*units) );
} else {
ctx.textAlign = "right";
ctx.fillText(this._CopyJson.titleLine.toUpperCase(), (appWidth*0.5) - (225*units), centerY - (106*units) );
}
ctx.beginPath();
ctx.moveTo((appWidth*0.5) - (210*units), centerY - (90*units));
ctx.lineTo((appWidth*0.5) + (210*units), centerY - (90*units));
ctx.stroke();
ctx.textAlign = "left";
ctx.font = headType;
//ctx.fillText(this.SessionTitle, (appWidth*0.5) - (210*units), centerY - (100*units) );
var titleW = ctx.measureText(this.SessionTitle.toUpperCase()).width;
// TYPE BAR //
/*if (App.FocusManager.IsActive()) {
if (this._Blink > 50) {
ctx.fillRect((appWidth*0.5) - (210*units) + titleW + (5*units),centerY - (123*units),2*units,26*units);
}
this._Blink += 1;
if (this._Blink == 100) {
this._Blink = 0;
}
}*/
// PANEL TITLE //
ctx.font = headType;
ctx.fillText("SHARE",20*units,this.OffsetY + (30*units) + (11*units));
// GEN TITLE //
ctx.font = midType;
var genW = ctx.measureText(this._CopyJson.generateLine.toUpperCase()).width;
ctx.fillText(this._CopyJson.generateLine.toUpperCase(), (appWidth*0.5) + (205*units) - genW, centerY - (106*units) );
ctx.beginPath();
ctx.moveTo((appWidth*0.5) + (210*units), centerY - (120*units));
ctx.lineTo((appWidth*0.5) + (200*units) - genW, centerY - (120*units));
ctx.lineTo((appWidth*0.5) + (200*units) - genW, centerY - (100*units));
ctx.lineTo((appWidth*0.5) + (210*units), centerY - (100*units));
ctx.closePath();
ctx.stroke();
var clx = 230;
var cly = 130;
if (App.Metrics.Device === Device.mobile) {
clx = 202.5;
cly = 150;
}
// CLOSE BUTTON //
ctx.beginPath();
ctx.moveTo((appWidth*0.5) + ((clx-7.5)*units), centerY - ((cly-7.5)*units));
ctx.lineTo((appWidth*0.5) + ((clx+7.5)*units), centerY - ((cly+7.5)*units));
ctx.moveTo((appWidth*0.5) + ((clx+7.5)*units), centerY - ((cly-7.5)*units));
ctx.lineTo((appWidth*0.5) + ((clx-7.5)*units), centerY - ((cly+7.5)*units));
ctx.stroke();
}
}
//-------------------------------------------------------------------------------------------
// STRING FUNCTIONS
//-------------------------------------------------------------------------------------------
WordWrap( context , text, x, y, lineHeight, fitWidth) {
fitWidth = fitWidth || 0;
if (fitWidth <= 0)
{
context.fillText( text, x, y );
return;
}
var words = text.split(' ');
var currentLine = 0;
var idx = 1;
while (words.length > 0 && idx <= words.length)
{
var str = words.slice(0,idx).join(' ');
var w = context.measureText(str).width;
if ( w > fitWidth )
{
if (idx==1)
{
idx=2;
}
context.fillText( words.slice(0,idx-1).join(' '), x, y + (lineHeight*currentLine) );
currentLine++;
words = words.splice(idx-1);
idx = 1;
}
else
{idx++;}
}
if (idx > 0)
context.fillText( words.join(' '), x, y + (lineHeight*currentLine) );
}
Capitalise(string: string) {
var s = string.toLowerCase();
s = this.UppercaseAt(s,0);
for (var i = 0; i < s.length; i++) {
if (s.charAt(i)===" ") {
s = this.UppercaseAt(s,i+1);
}
}
return s;
}
UppercaseAt(str,index) {
if(index > str.length-1) return str;
var chr = str.substr(index,1).toUpperCase();
return str.substr(0,index) + chr + str.substr(index+1);
}
//-------------------------------------------------------------------------------------------
// TWEEN
//-------------------------------------------------------------------------------------------
DelayTo(panel,destination,t,delay,v){
var offsetTween = new window.TWEEN.Tween({x: panel[""+v]});
offsetTween.to({x: destination}, t);
offsetTween.onUpdate(function () {
panel[""+v] = this.x;
if (v=="OffsetX") {
panel.TweenDom(panel.URLInputContainer, this.x, "x", 200, 1.5);
}
if (v=="OffsetY") {
panel.TweenDom(panel.URLInputContainer, this.x, "y", 20, 0);
panel.TweenDom(panel.TitleInputContainer, this.x, "y", 132, 0);
}
});
offsetTween.onComplete(function() {
if (v=="OffsetY") {
if (destination!==0) {
panel.Open = false;
panel.HideDom();
}
panel.OffsetX = 0;
var shareUrl = document.getElementById("shareUrl");
shareUrl.style.left = "1000%";
}
if (v=="OffsetX" && panel._FirstSession) {
panel._FirstSession = false;
}
});
offsetTween.easing(window.TWEEN.Easing.Exponential.InOut);
offsetTween.delay(delay);
offsetTween.start(this.LastVisualTick);
}
//-------------------------------------------------------------------------------------------
// TITLE GENERATOR
//-------------------------------------------------------------------------------------------
GenerateLabel() {
var label = "";
var diceRoll;
// DIALECT 1 (Norwegian alphabet)
var prefixA = ["al","aal","blok","bjør","brø","drø","du","ef","en","jen","ja","lek","lu","mal","svi","svar","sku","spru","kø","kin","kvi","kna","kvar","hof","tje","fja","ub","rø","vø","vol","va","ey","ly","sky","ske","skø","sji","yø","ø"];
var syllableA = ["jen","ke","kil","kol","kof","nø","ken","ren","re","rol","sen","se","sa","then","tol","te","ty","ple","pa","ka","y"];
var suffixA = ["berg","holm","sorg","fla","trad","stad","mark","dt","de","s","kla","ken","kjen","gen","gan","likt","tra","tet","tal","man","la","tt","bb","na","k","ka","bø","dø","gø","jø","kø","lø","mø","nø","pø","sø","slø","tø","vø","lok","vik","slik","dust"];
var joinerA = ["berg","sorg","fla","lag","tra","tet","tal","du","na","bø","dø","gø","jø","kø","lø","mø","nø","pø","sø","slø","tø","vø","lok","vik","dust","dok","blok"];
// DIALECT 2 (Swedish alphabet)
var prefixB = ["al","aal","blok","björ","brö","drö","du","ef","en","jen","jä","lek","lü","mal","svi","svar","sku","spru","ko","kin","kvi","kna","kvär","höf","tje","fjä","ub","ro","vo","vol","vä","ey","ly","sky","ske","sko","sji","yö","ö"];
var syllableB = ["jen","ke","kil","kol","kof","nö","ken","ren","re","rol","sen","se","sa","then","tol","te","ty","ple","pa","ka","y"];
var suffixB = ["berg","holm","sorg","fla","träd","städ","mark","dt","de","s","kla","ken","kjen","gen","gan","likt","tra","tet","tal","man","la","tt","bb","na","k","ka","bö","dö","gö","jö","kö","lö","mö","nö","pö","sö","slö","tö","vö","lok","vik","slik","dust"];
var joinerB = ["berg","sorg","fla","lag","tra","tet","tal","dü","na","bö","dö","gö","jö","kö","lö","mö","nö","pö","sö","slö","tö","vö","lok","vik","dust","dok","blok"];
var prefixes = [prefixA,prefixB];
var syllables = [syllableA,syllableB];
var suffixes = [suffixA,suffixB];
var joiners = [joinerA,joinerB];
// randomly select dialect
var dialect = Math.round(Math.random());
var prefix = prefixes[dialect];
var syllable = syllables[dialect];
var suffix = suffixes[dialect];
var joiner = joiners[dialect];
// ALGORITHM //
// FIRST WORD //
label = "" + label + prefix[Math.floor(Math.random()*prefix.length)];
diceRoll = Math.floor(Math.random()*8);
if (diceRoll==0) { label = "" + label + syllable[Math.floor(Math.random()*syllable.length)]; }
label = "" + label + suffix[Math.floor(Math.random()*suffix.length)];
// JOINER //
diceRoll = Math.floor(Math.random()*10);
if (diceRoll==0) { label = "" + label + " " + joiner[Math.floor(Math.random()*joiner.length)]; }
// SECOND WORD //
diceRoll = Math.floor(Math.random()*2);
if (diceRoll!==0) {
label = "" + label + " " + prefix[Math.floor(Math.random()*prefix.length)];
diceRoll = Math.floor(Math.random()*5);
if (diceRoll==0) { label = "" + label + syllable[Math.floor(Math.random()*syllable.length)]; }
label = "" + label + suffix[Math.floor(Math.random()*suffix.length)];
}
// DONE //
this.SetNameUrl(label);
this.UpdateFormText(this.TitleInput, label);
this.UpdateUrlText();
return label;
}
//-------------------------------------------------------------------------------------------
// INTERACTION
//-------------------------------------------------------------------------------------------
OpenPanel() {
this.Open = true;
this.OffsetY = -App.Height;
//this.TitleInput.focus();
this.GetWarning();
this.ShowDom();
this.DelayTo(this,0,500,0,"OffsetY");
}
ClosePanel() {
this._Saving = false;
this.DelayTo(this,-App.Height,500,0,"OffsetY");
}
GenerateLink() {
if (!this._NoBlocks) {
this._Saving = true;
this._CommandManager.ExecuteCommand(Commands.SAVE_AS);
}
}
UpdateLink() {
if (!this._NoBlocks) {
this._Saving = true;
this._CommandManager.ExecuteCommand(Commands.SAVE);
}
}
ReturnLink(id) {
this._Saving = false;
this._SessionId = id;
this.UpdateUrlText();
this.DelayTo(this,-(App.Width*1.5),500,0,"OffsetX");
}
ShareFacebook() {
var href = "http://www.facebook.com/sharer.php?u=";
href = "" + href + encodeURIComponent(this.SessionURL);
window.open(href,'', 'menubar=no,toolbar=no,resizable=yes,scrollbars=yes,height=600,width=600');
}
ShareTwitter() {
var href = "https://twitter.com/intent/tweet?text=";
href = "" + href + encodeURIComponent(this._CopyJson.tweetText);
href = "" + href + "&url=" + encodeURIComponent(this.SessionURL);
window.open(href,'', 'menubar=no,toolbar=no,resizable=yes,scrollbars=yes,height=600,width=600');
}
ShareSubreddit() {
const title = encodeURIComponent(Utils.Urls.getQuerystringParameterFromString('t', this.SessionURL));
const url = encodeURIComponent(this.SessionURL);
const href = `https://www.reddit.com/r/blokdust/submit?title=${title}&url=${url}`;
window.open(href,'ShareToBlokDustSubreddit');
}
MouseDown(point) {
this.HitTests(point);
if (!this._RollOvers[0]) { // url
if (this._RollOvers[1]) { // close
this.ClosePanel();
return;
}
if (this._RollOvers[2]) { // gen title
this.SessionTitle = this.GenerateLabel();
return;
}
if (this._RollOvers[3]) { // gen URL
this.GenerateLink();
return;
}
if (this._RollOvers[4]) { // save
this.UpdateLink();
return;
}
if (this._RollOvers[5]) { // save as
this.GenerateLink();
return;
}
if (this._RollOvers[6]) { // fb
this.ShareFacebook();
return;
}
if (this._RollOvers[7]) { // tw
this.ShareTwitter();
return;
}
if (this._RollOvers[8]) { // subreddit
this.ShareSubreddit();
return;
}
if (this._RollOvers[9]) { // back arrow
this.DelayTo(this,0,500,0,"OffsetX");
return;
}
if (this._RollOvers[10]) { // skip
this.DelayTo(this,-(App.Width*1.5),500,0,"OffsetX");
return;
}
this._UrlSelecting = false;
} else {
this._UrlSelecting = true;
}
}
MouseUp(point,isTouch?) {
}
MouseMove(point) {
this.HitTests(point);
}
HitTests(point) {
var units = App.Unit;
var shareX = this.OffsetX + (App.Width*1.5);
var centerY = this.OffsetY + (App.Height * 0.5);
var buttonY = centerY + (35*units);
var ctx = this.Ctx;
var midType = App.Metrics.TxtMid;
var appWidth = App.Width;
ctx.font = midType;
var genW = ctx.measureText(this._CopyJson.generateLine.toUpperCase()).width;
var clx = 230;
var cly = 130;
var arrowX = 285;
var arrowY = 0;
if (App.Metrics.Device === Device.tablet) {
arrowX = 255;
}
if (App.Metrics.Device === Device.mobile) {
clx = 202.5;
cly = 150;
arrowX = 200;
arrowY = 110;
}
this._RollOvers[0] = Dimensions.hitRect(shareX + (appWidth*0.5) - (210*units), centerY - (20*units),420*units,40*units, point.x, point.y); // url
this._RollOvers[1] = Dimensions.hitRect((appWidth*0.5) + ((clx-20)*units), centerY - ((cly+20)*units),40*units,40*units, point.x, point.y); // close
this._RollOvers[2] = Dimensions.hitRect((appWidth*0.5) + (200*units) - genW, centerY - (130*units),genW + (10*units),40*units, point.x, point.y); // gen title
if (this._FirstSession) {
this._RollOvers[3] = Dimensions.hitRect(this.OffsetX + (appWidth*0.5) - (210*units), centerY - (20*units),420*units,40*units, point.x, point.y); // gen URL
this._RollOvers[4] = false;
this._RollOvers[5] = false;
this._RollOvers[10] = false;
} else {
this._RollOvers[3] = false;
this._RollOvers[4] = Dimensions.hitRect(this.OffsetX + (appWidth*0.5) - (210*units), centerY - (20*units),202.5*units,40*units, point.x, point.y); // save
this._RollOvers[5] = Dimensions.hitRect(this.OffsetX + (appWidth*0.5) + (7.5*units), centerY - (20*units),202.5*units,40*units, point.x, point.y); // save as
this._RollOvers[10] = Dimensions.hitRect(this.OffsetX + (appWidth*0.5) + ((arrowX-15)*units),centerY + (units*(arrowY-20)),30*units,40*units, point.x, point.y); // skip
}
this._RollOvers[6] = Dimensions.hitRect(shareX + (appWidth*0.5) - (210*units),buttonY,130*units,30*units, point.x, point.y); // fb
this._RollOvers[7] = Dimensions.hitRect(shareX + (appWidth*0.5) - (65*units),buttonY,130*units,30*units, point.x, point.y); // tw
this._RollOvers[8] = Dimensions.hitRect(shareX + (appWidth*0.5) + (80*units),buttonY,130*units,30*units, point.x, point.y); // gp
this._RollOvers[9] = Dimensions.hitRect(shareX + (appWidth*0.5) - ((arrowX+15)*units),centerY + (units*(arrowY-20)),30*units,40*units, point.x, point.y); // back
}
//-------------------------------------------------------------------------------------------
// GENERAL
//-------------------------------------------------------------------------------------------
Reset() {
this._FirstSession = true;
if (App.SessionId) {
this._FirstSession = false;
}
this._SessionId=App.CompositionId;
if (!this._SessionId) {
this.SessionTitle = this.GenerateLabel();
}
}
Resize() {
if (this.OffsetX!==0) {
this.OffsetX = -(App.Width*1.5);
}
this.ClearScroll();
if (this.URLInput) {
this.StyleDom(this.URLInputContainer, 400, 40, 200, 20, this.OffsetX + (App.Width*1.5), App.Metrics.TxtUrl);
this.StyleDom(this.TitleInputContainer, 300, 42, 210, 132, 0, App.Metrics.TxtHeaderPR);
}
}
//-------------------------------------------------------------------------------------------
// CSS / DOM
//-------------------------------------------------------------------------------------------
TweenDom(element: HTMLElement, value: number, mode: string, position: number, offset: number) {
var units = (App.Unit);
var pr = App.Metrics.PixelRatio;
switch (mode) {
case "x":
element.style.left = "" + ((value + (App.Width*(0.5 + offset)) - (units*position))/pr) + "px";
break;
case "y":
element.style.top = "" + ((value + (App.Height*0.5) - (units*position))/pr) + "px";
break;
}
}
StyleDom(element: HTMLElement, width: number, height: number, x: number, y: number, xOffset: number, font: string) {
var units = (App.Unit);
var pr = App.Metrics.PixelRatio;
element.style.font = font;
element.style.width = "" + (units*(width/pr)) + "px";
element.style.height = "" + (units*(height/pr)) + "px";
element.style.lineHeight = "" + (units*(height/pr)) + "px";
element.style.display = "block";
if (!this.Open) {
this.OffsetY = -App.Height;
element.style.display = "none";
element.style.visibility = "false";
}
var offsetX = xOffset/pr;
var offsetY = this.OffsetY/pr;
var width = App.Width/pr;
var height = App.Height/pr;
element.style.left = "" + (offsetX + (width*0.5) - (units*(x/pr))) + "px";
element.style.top = "" + (offsetY + (height*0.5) - (units*(y/pr))) + "px";
}
ShowDom() {
var shareUrl = this.URLInputContainer;
var shareTitle = this.TitleInputContainer;
shareUrl.style.display = "block";
shareUrl.style.visibility = "true";
shareTitle.style.display = "block";
shareTitle.style.visibility = "true";
}
HideDom() {
var shareUrl = this.URLInputContainer;
var shareTitle = this.TitleInputContainer;
shareUrl.style.display = "none";
shareUrl.style.visibility = "false";
shareTitle.style.display = "none";
shareTitle.style.visibility = "false";
}
ClearScroll() {
window.scrollTo(0,0);
}
} | the_stack |
import { incrementMockedDate } from "@labelflow/dev-utils/mockdate";
import { gql } from "@apollo/client";
import { processImage } from "../../repository/image-processing";
import { client } from "../../apollo-client/schema-client";
import { setupTestsWithLocalDatabase } from "../../../utils/setup-local-db-tests";
setupTestsWithLocalDatabase();
jest.mock("../../repository/image-processing");
const mockedProcessImage = processImage as jest.Mock;
const testDatasetId = "test dataset id";
const createLabelClass = async (data: {
name: string;
color: string;
datasetId: string;
id?: string;
}) => {
const mutationResult = await client.mutate({
mutation: gql`
mutation createLabelClass($data: LabelClassCreateInput!) {
createLabelClass(data: $data) {
id
}
}
`,
variables: {
data,
},
});
const {
data: {
createLabelClass: { id },
},
} = mutationResult;
return id;
};
const createLabel = async (
labelClassId: string,
x: number,
datasetId: string = testDatasetId,
labelId: string = "myLabelId"
) => {
mockedProcessImage.mockReturnValue({
width: 42,
height: 36,
mime: "image/jpeg",
});
const {
data: {
createImage: { id: imageId },
},
} = await client.mutate({
mutation: gql`
mutation createImage($file: Upload!, $name: String!, $datasetId: ID!) {
createImage(data: { name: $name, file: $file, datasetId: $datasetId }) {
id
}
}
`,
variables: {
file: new Blob(),
name: "someImageName",
datasetId,
},
});
return await client.mutate({
mutation: gql`
mutation createLabel($data: LabelCreateInput!) {
createLabel(data: $data) {
id
}
}
`,
variables: {
data: {
id: labelId,
imageId,
labelClassId,
geometry: {
type: "Polygon",
coordinates: [
[
[x, 0],
[x + 1, 0],
[x + 1, 1],
[x, 1],
[x, 0],
],
],
},
},
},
});
};
const createDataset = async (
name: string,
datasetId: string = testDatasetId
) => {
return await client.mutate({
mutation: gql`
mutation createDataset($datasetId: String, $name: String!) {
createDataset(
data: { id: $datasetId, name: $name, workspaceSlug: "local" }
) {
id
name
}
}
`,
variables: {
name,
datasetId,
},
fetchPolicy: "no-cache",
});
};
describe("LabelClass resolver test suite", () => {
test("Query labelClass when database is empty", async () => {
const queryResult = await client.query({
query: gql`
query {
labelClasses {
id
}
}
`,
});
expect(queryResult.data.labelClasses.length).toEqual(0);
});
test("Query labelClass when id doesn't exists", async () => {
await expect(
client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
}
}
`,
variables: {
id: "some-id",
},
})
).rejects.toThrow("No labelClass with such id");
});
it("should fail labelClass creation when there is a no dataset with the given id", async () => {
expect.assertions(1);
await expect(
createLabelClass({
name: "toto",
color: "#ff0000",
datasetId: testDatasetId,
})
).rejects.toThrow("The dataset id test dataset id doesn't exist.");
});
it("should create labelClass when there is a dataset", async () => {
await createDataset("Test dataset");
const id = await createLabelClass({
name: "toto",
color: "#ff0000",
datasetId: testDatasetId,
});
const queryResult = await client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
name
color
dataset {
id
}
}
}
`,
variables: {
id,
},
fetchPolicy: "no-cache",
});
expect(queryResult.data.labelClass).toEqual(
expect.objectContaining({
id,
name: "toto",
color: "#ff0000",
dataset: { __typename: "Dataset", id: testDatasetId },
})
);
});
it("should increment labelClass index chronologically", async () => {
await createDataset("Test dataset");
const id0 = await createLabelClass({
name: "toto",
color: "#ff0000",
datasetId: testDatasetId,
});
incrementMockedDate(1);
const id1 = await createLabelClass({
name: "tata",
color: "#00ff00",
datasetId: testDatasetId,
});
const queryResult0 = await client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
index
name
color
dataset {
id
}
}
}
`,
variables: {
id: id0,
},
fetchPolicy: "no-cache",
});
expect(queryResult0.data.labelClass).toEqual(
expect.objectContaining({
id: id0,
index: 0,
name: "toto",
color: "#ff0000",
dataset: { __typename: "Dataset", id: testDatasetId },
})
);
const queryResult1 = await client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
index
name
color
dataset {
id
}
}
}
`,
variables: {
id: id1,
},
fetchPolicy: "no-cache",
});
expect(queryResult1.data.labelClass).toEqual(
expect.objectContaining({
id: id1,
index: 1,
name: "tata",
color: "#00ff00",
dataset: { __typename: "Dataset", id: testDatasetId },
})
);
});
it("should create labelClass with an ID", async () => {
await createDataset("Test dataset");
const labelClassId = "a custom id";
const id = await createLabelClass({
id: labelClassId,
name: "toto",
color: "#ff0000",
datasetId: testDatasetId,
});
const queryResult = await client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
}
}
`,
variables: {
id,
},
});
expect(queryResult.data.labelClass.id).toEqual(labelClassId);
});
it("should update a label class", async () => {
await createDataset("Test dataset");
const labelId = await createLabelClass({
name: "toto",
color: "#ff0000",
datasetId: testDatasetId,
});
await client.mutate({
mutation: gql`
mutation updateLabelClass($id: ID!) {
updateLabelClass(
where: { id: $id }
data: { name: "tata", color: "#0000ff" }
) {
id
}
}
`,
variables: {
id: labelId,
},
});
const queryResult = await client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
name
color
}
}
`,
variables: {
id: labelId,
},
});
expect(queryResult.data.labelClass.name).toEqual("tata");
expect(queryResult.data.labelClass.color).toEqual("#0000ff");
});
it("should throw when the label class to update doesn't exist", () => {
return expect(
client.mutate({
mutation: gql`
mutation updateLabelClass($id: ID!) {
updateLabelClass(where: { id: $id }, data: { name: "tata" }) {
id
}
}
`,
variables: {
id: "id-of-a-label-that-doesnt-exist",
},
})
).rejects.toThrow("No labelClass with such id");
});
it("should reorder labelClasses indices", async () => {
await createDataset("Test dataset");
const id0 = await createLabelClass({
name: "toto",
color: "#ff0000",
datasetId: testDatasetId,
});
incrementMockedDate(1);
const id1 = await createLabelClass({
name: "tata",
color: "#00ff00",
datasetId: testDatasetId,
});
incrementMockedDate(1);
const id2 = await createLabelClass({
name: "tutu",
color: "#0000ff",
datasetId: testDatasetId,
});
const getLabelClassIndex = async (id: string) =>
(
await client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
index
name
color
datasetId
}
}
`,
variables: {
id,
},
fetchPolicy: "no-cache",
})
).data.labelClass.index;
// Nominal/default configuration
expect(await getLabelClassIndex(id0)).toEqual(0);
expect(await getLabelClassIndex(id1)).toEqual(1);
expect(await getLabelClassIndex(id2)).toEqual(2);
await client.mutate({
mutation: gql`
mutation reorderLabelClasses($id: ID!, $index: Int!) {
reorderLabelClass(where: { id: $id }, data: { index: $index }) {
id
}
}
`,
variables: {
id: id2,
index: 0,
},
});
// Now the third labelClass is at position 0, and so on
expect(await getLabelClassIndex(id0)).toEqual(1);
expect(await getLabelClassIndex(id1)).toEqual(2);
expect(await getLabelClassIndex(id2)).toEqual(0);
});
it("should delete a label class and update index", async () => {
await createDataset("Test dataset");
const labelClassId = await createLabelClass({
name: "toto",
color: "#ff0000",
datasetId: testDatasetId,
});
incrementMockedDate(1);
// The following labelClass will have index 1
const labelClassId1 = await createLabelClass({
name: "tata",
color: "#ff0000",
datasetId: testDatasetId,
});
await createLabel(labelClassId, 2, testDatasetId);
await client.mutate({
mutation: gql`
mutation deleteLabelClass($id: ID!) {
deleteLabelClass(where: { id: $id }) {
id
}
}
`,
variables: {
id: labelClassId,
},
});
const queryResultLabelClass = await client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
index
datasetId
}
}
`,
variables: {
id: labelClassId1,
},
fetchPolicy: "no-cache",
});
// Check that this labelClass now have index 0
expect(queryResultLabelClass.data?.labelClass?.index).toEqual(0);
const queryResult = client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
}
}
`,
variables: {
id: labelClassId,
},
});
await expect(queryResult).rejects.toThrow("No labelClass with such id");
});
it("should set all the labels linked to label class to labelClassId none when the class is deleted", async () => {
await createDataset("Test dataset");
const labelClassId = await createLabelClass({
name: "toto",
color: "#ff0000",
datasetId: testDatasetId,
});
await createLabel(labelClassId, 2, testDatasetId);
const labelQueryResultBeforeDelete = await client.query({
query: gql`
query getLabelData($id: ID!) {
label(where: { id: $id }) {
id
labelClass {
id
}
}
}
`,
variables: {
id: "myLabelId",
},
});
expect(labelQueryResultBeforeDelete.data.label.labelClass.id).toBe(
labelClassId
);
await client.mutate({
mutation: gql`
mutation deleteLabelClass($id: ID!) {
deleteLabelClass(where: { id: $id }) {
id
}
}
`,
variables: {
id: labelClassId,
},
});
const labelQueryResult = await client.query({
query: gql`
query getLabelData($id: ID!) {
label(where: { id: $id }) {
id
labelClass {
id
}
}
}
`,
variables: {
id: "myLabelId",
},
fetchPolicy: "no-cache",
});
expect(labelQueryResult.data.label.labelClass).toBeNull();
});
test("should throw when the label class to delete doesn't exist", () => {
return expect(
client.mutate({
mutation: gql`
mutation deleteLabelClass($id: ID!) {
deleteLabelClass(where: { id: $id }) {
id
}
}
`,
variables: {
id: "id-of-a-label-that-doesnt-exist",
},
})
).rejects.toThrow("No labelClass with such id");
});
it("should query labelClasses ignoring linked datasets", async () => {
await createDataset("Test dataset 1", "dataset 1");
const id1 = await createLabelClass({
name: "labelClass1",
color: "#ff0000",
datasetId: "dataset 1",
});
incrementMockedDate(1);
const id0 = await createLabelClass({
name: "labelClass0",
color: "#ff0000",
datasetId: "dataset 1",
});
incrementMockedDate(1);
const id2 = await createLabelClass({
name: "labelClass2",
color: "#ff0000",
datasetId: "dataset 1",
});
const queryResult = await client.query({
query: gql`
query {
labelClasses(where: { datasetId: "dataset 1" }) {
id
}
}
`,
});
expect(queryResult.data.labelClasses.length).toEqual(3);
expect(
queryResult.data.labelClasses.map(
(labelClasses: { id: string }) => labelClasses.id
)
).toEqual([id1, id0, id2]);
});
it("should query paginated labelClasses ignoring linked datasets", async () => {
await createDataset("Test dataset 1", "dataset 1");
await createLabelClass({
name: "labelClass0",
color: "#ff0000",
datasetId: "dataset 1",
});
incrementMockedDate(1);
const id0 = await createLabelClass({
name: "labelClass1",
color: "#ff0000",
datasetId: "dataset 1",
});
incrementMockedDate(1);
const id1 = await createLabelClass({
name: "labelClass2",
color: "#ff0000",
datasetId: "dataset 1",
});
incrementMockedDate(1);
await createLabelClass({
name: "labelClass3",
color: "#ff0000",
datasetId: "dataset 1",
});
const queryResult = await client.query({
query: gql`
query {
labelClasses(first: 2, skip: 1, where: { datasetId: "dataset 1" }) {
id
name
index
}
}
`,
});
expect(queryResult.data.labelClasses.length).toEqual(2);
expect(
queryResult.data.labelClasses.map(
(labelClass: { id: string }) => labelClass.id
)
).toEqual([id0, id1]);
});
it("should query a labelClass with labels", async () => {
await createDataset("Test dataset", "a dataset id");
const labelClassId = await createLabelClass({
name: "some labelClass",
color: "#ff0000",
datasetId: "a dataset id",
});
await createLabel(labelClassId, 2, "a dataset id", "myLabelId1");
incrementMockedDate(1);
await createLabel(labelClassId, 1, "a dataset id", "myLabelId2");
const queryResult = await client.query({
query: gql`
query getLabelClass($id: ID!) {
labelClass(where: { id: $id }) {
id
labels {
x
}
}
}
`,
variables: {
id: labelClassId,
},
});
// labels should show in the right order
expect(
queryResult.data.labelClass.labels.map((l: { x: number }) => l.x)
).toEqual([2, 1]);
});
it("should query label classes linked to a dataset", async () => {
await createDataset("Test dataset 1", "dataset 1");
await createDataset("Test dataset 2", "dataset 2");
const labelClassId2 = await createLabelClass({
name: "second labelClass",
color: "#ff0000",
datasetId: "dataset 1",
});
incrementMockedDate(1);
const labelClassId1 = await createLabelClass({
name: "first labelClass",
color: "#ff0000",
datasetId: "dataset 1",
});
incrementMockedDate(1);
await createLabelClass({
name: "other first labelClass",
color: "#ff0000",
datasetId: "dataset 2",
});
const queryResult = await client.query({
query: gql`
query {
labelClasses(where: { datasetId: "dataset 1" }) {
id
}
}
`,
});
expect(queryResult.data.labelClasses.length).toEqual(2);
expect(
queryResult.data.labelClasses.map(
(labelClass: { id: string }) => labelClass.id
)
).toEqual([labelClassId2, labelClassId1]);
});
it("should returns the correct count of labelClasses", async () => {
await createDataset("Test dataset 1", "dataset 1");
await createDataset("Test dataset 2", "dataset 2");
await Promise.all([
createLabelClass({
name: "first labelClass",
color: "#ff0000",
datasetId: "dataset 1",
}),
createLabelClass({
name: "second labelClass",
color: "#ff0000",
datasetId: "dataset 2",
}),
createLabelClass({
name: "third labelClass",
color: "#ff0000",
datasetId: "dataset 2",
}),
]);
const queryResult = await client.query({
query: gql`
query getLabelClass {
labelClassesAggregates {
totalCount
}
}
`,
});
expect(queryResult.data.labelClassesAggregates.totalCount).toEqual(3);
});
}); | the_stack |
import React, { Component, ComponentType, MutableRefObject, Ref } from 'react';
import { findNodeHandle, Platform, StyleSheet } from 'react-native';
import WorkletEventHandler from './reanimated2/WorkletEventHandler';
import setAndForwardRef from './setAndForwardRef';
import './reanimated2/layoutReanimation/LayoutAnimationRepository';
import invariant from 'invariant';
import { adaptViewConfig } from './ConfigHelper';
import { RNRenderer } from './reanimated2/platform-specific/RNRenderer';
import {
makeMutable,
runOnUI,
enableLayoutAnimations,
} from './reanimated2/core';
import {
DefaultEntering,
DefaultExiting,
DefaultLayout,
} from './reanimated2/layoutReanimation/defaultAnimations/Default';
import {
isJest,
isChromeDebugger,
shouldBeUseWeb,
} from './reanimated2/PlatformChecker';
import { initialUpdaterRun } from './reanimated2/animation';
import {
BaseAnimationBuilder,
EntryExitAnimationFunction,
ILayoutAnimationBuilder,
} from './reanimated2/layoutReanimation';
import {
SharedValue,
StyleProps,
ShadowNodeWrapper,
} from './reanimated2/commonTypes';
import {
ViewDescriptorsSet,
ViewRefSet,
} from './reanimated2/ViewDescriptorsSet';
import { getShadowNodeWrapperFromRef } from './reanimated2/fabricUtils';
function dummyListener() {
// empty listener we use to assign to listener properties for which animated
// event is used.
}
type NestedArray<T> = T | NestedArray<T>[];
function flattenArray<T>(array: NestedArray<T>): T[] {
if (!Array.isArray(array)) {
return [array];
}
const resultArr: T[] = [];
const _flattenArray = (arr: NestedArray<T>[]): void => {
arr.forEach((item) => {
if (Array.isArray(item)) {
_flattenArray(item);
} else {
resultArr.push(item);
}
});
};
_flattenArray(array);
return resultArr;
}
function onlyAnimatedStyles(styles: StyleProps[]) {
return styles.filter((style) => style?.viewDescriptors);
}
function isSameAnimatedStyle(
style1?: StyleProps,
style2?: StyleProps
): boolean {
// We cannot use equality check to compare useAnimatedStyle outputs directly.
// Instead, we can compare its viewsRefs.
return style1?.viewsRef === style2?.viewsRef;
}
const isSameAnimatedProps = isSameAnimatedStyle;
const has = <K extends string>(
key: K,
x: unknown
): x is { [key in K]: unknown } => {
if (typeof x === 'function' || typeof x === 'object') {
if (x === null || x === undefined) {
return false;
} else {
return key in x;
}
}
return false;
};
interface AnimatedProps extends Record<string, unknown> {
viewDescriptors?: ViewDescriptorsSet;
viewsRef?: ViewRefSet<unknown>;
initial?: SharedValue<StyleProps>;
}
export type AnimatedComponentProps<P extends Record<string, unknown>> = P & {
forwardedRef?: Ref<Component>;
style?: NestedArray<StyleProps>;
animatedProps?: Partial<AnimatedComponentProps<AnimatedProps>>;
animatedStyle?: StyleProps;
layout?:
| BaseAnimationBuilder
| ILayoutAnimationBuilder
| typeof BaseAnimationBuilder;
entering?:
| BaseAnimationBuilder
| typeof BaseAnimationBuilder
| EntryExitAnimationFunction
| Keyframe;
exiting?:
| BaseAnimationBuilder
| typeof BaseAnimationBuilder
| EntryExitAnimationFunction
| Keyframe;
};
type Options<P> = {
setNativeProps: (ref: ComponentRef, props: P) => void;
};
interface ComponentRef extends Component {
setNativeProps?: (props: Record<string, unknown>) => void;
getScrollableNode?: () => ComponentRef;
}
export interface InitialComponentProps extends Record<string, unknown> {
ref?: Ref<Component>;
collapsable?: boolean;
}
export default function createAnimatedComponent(
Component: ComponentType<InitialComponentProps>,
options?: Options<InitialComponentProps>
): ComponentType<AnimatedComponentProps<InitialComponentProps>> {
invariant(
typeof Component !== 'function' ||
(Component.prototype && Component.prototype.isReactComponent),
'`createAnimatedComponent` does not support stateless functional components; ' +
'use a class component instead.'
);
class AnimatedComponent extends React.Component<
AnimatedComponentProps<InitialComponentProps>
> {
_styles: StyleProps[] | null = null;
_animatedProps?: Partial<AnimatedComponentProps<AnimatedProps>>;
_viewTag = -1;
_isFirstRender = true;
animatedStyle: { value: StyleProps } = { value: {} };
initialStyle = {};
sv: SharedValue<null | Record<string, unknown>> | null;
_component: ComponentRef | null = null;
static displayName: string;
constructor(props: AnimatedComponentProps<InitialComponentProps>) {
super(props);
if (isJest()) {
this.animatedStyle = { value: {} };
}
this.sv = makeMutable({});
}
componentWillUnmount() {
this._detachNativeEvents();
this._detachStyles();
this.sv = null;
}
componentDidMount() {
this._attachNativeEvents();
this._attachAnimatedStyles();
}
_attachNativeEvents() {
const viewTag = findNodeHandle(this);
for (const key in this.props) {
const prop = this.props[key];
if (
has('current', prop) &&
prop.current instanceof WorkletEventHandler
) {
prop.current.registerForEvents(viewTag as number, key);
}
}
}
_detachNativeEvents() {
for (const key in this.props) {
const prop = this.props[key];
if (
has('current', prop) &&
prop.current instanceof WorkletEventHandler
) {
prop.current.unregisterFromEvents();
}
}
}
_detachStyles() {
if (Platform.OS === 'web' && this._styles !== null) {
for (const style of this._styles) {
if (style?.viewsRef) {
style.viewsRef.remove(this);
}
}
} else if (this._viewTag !== -1 && this._styles !== null) {
for (const style of this._styles) {
style.viewDescriptors.remove(this._viewTag);
}
if (this.props.animatedProps?.viewDescriptors) {
this.props.animatedProps.viewDescriptors.remove(this._viewTag);
}
if (global._IS_FABRIC) {
const shadowNodeWrapper = getShadowNodeWrapperFromRef(this);
runOnUI(() => {
'worklet';
_removeShadowNodeFromRegistry(shadowNodeWrapper);
})();
}
}
}
_reattachNativeEvents(
prevProps: AnimatedComponentProps<InitialComponentProps>
) {
let viewTag: number | undefined;
for (const key in this.props) {
const prop = this.props[key];
if (
has('current', prop) &&
prop.current instanceof WorkletEventHandler
) {
if (viewTag === undefined) {
viewTag = prop.current.viewTag;
}
}
}
for (const key in prevProps) {
const prop = this.props[key];
if (
has('current', prop) &&
prop.current instanceof WorkletEventHandler &&
prop.current.reattachNeeded
) {
prop.current.unregisterFromEvents();
}
}
for (const key in this.props) {
const prop = this.props[key];
if (
has('current', prop) &&
prop.current instanceof WorkletEventHandler &&
prop.current.reattachNeeded
) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
prop.current.registerForEvents(viewTag!, key);
prop.current.reattachNeeded = false;
}
}
}
_updateFromNative(props: StyleProps) {
if (options?.setNativeProps) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
options.setNativeProps(this._component!, props);
} else {
// eslint-disable-next-line no-unused-expressions
this._component?.setNativeProps?.(props);
}
}
_attachAnimatedStyles() {
const styles = this.props.style
? onlyAnimatedStyles(flattenArray<StyleProps>(this.props.style))
: [];
const prevStyles = this._styles;
this._styles = styles;
const prevAnimatedProps = this._animatedProps;
this._animatedProps = this.props.animatedProps;
let viewTag: number | null;
let viewName: string | null;
let shadowNodeWrapper: ShadowNodeWrapper | null = null;
if (Platform.OS === 'web') {
viewTag = findNodeHandle(this);
viewName = null;
shadowNodeWrapper = null;
} else {
// hostInstance can be null for a component that doesn't render anything (render function returns null). Example: svg Stop: https://github.com/react-native-svg/react-native-svg/blob/develop/src/elements/Stop.tsx
const hostInstance = RNRenderer.findHostInstance_DEPRECATED(this);
if (!hostInstance) {
throw new Error(
'Cannot find host instance for this component. Maybe it renders nothing?'
);
}
// we can access view tag in the same way it's accessed here https://github.com/facebook/react/blob/e3f4eb7272d4ca0ee49f27577156b57eeb07cf73/packages/react-native-renderer/src/ReactFabric.js#L146
viewTag = hostInstance?._nativeTag;
/**
* RN uses viewConfig for components for storing different properties of the component(example: https://github.com/facebook/react-native/blob/master/Libraries/Components/ScrollView/ScrollViewViewConfig.js#L16).
* The name we're looking for is in the field named uiViewClassName.
*/
viewName = hostInstance?.viewConfig?.uiViewClassName;
// update UI props whitelist for this view
const hasReanimated2Props =
this.props.animatedProps?.viewDescriptors || styles.length;
if (hasReanimated2Props && hostInstance?.viewConfig) {
adaptViewConfig(hostInstance.viewConfig);
}
if (global._IS_FABRIC) {
shadowNodeWrapper = getShadowNodeWrapperFromRef(this);
}
}
this._viewTag = viewTag as number;
// remove old styles
if (prevStyles) {
// in most of the cases, views have only a single animated style and it remains unchanged
const hasOneSameStyle =
styles.length === 1 &&
prevStyles.length === 1 &&
isSameAnimatedStyle(styles[0], prevStyles[0]);
if (!hasOneSameStyle) {
// otherwise, remove each style that is not present in new styles
for (const prevStyle of prevStyles) {
const isPresent = styles.some((style) =>
isSameAnimatedStyle(style, prevStyle)
);
if (!isPresent) {
prevStyle.viewDescriptors.remove(viewTag);
}
}
}
}
styles.forEach((style) => {
style.viewDescriptors.add({
tag: viewTag,
name: viewName,
shadowNodeWrapper,
});
if (isJest()) {
/**
* We need to connect Jest's TestObject instance whose contains just props object
* with the updateProps() function where we update the properties of the component.
* We can't update props object directly because TestObject contains a copy of props - look at render function:
* const props = this._filterNonAnimatedProps(this.props);
*/
this.animatedStyle.value = {
...this.animatedStyle.value,
...style.initial.value,
};
style.animatedStyle.current = this.animatedStyle;
}
});
// detach old animatedProps
if (
prevAnimatedProps &&
!isSameAnimatedProps(prevAnimatedProps, this.props.animatedProps)
) {
prevAnimatedProps.viewDescriptors!.remove(viewTag as number);
}
// attach animatedProps property
if (this.props.animatedProps?.viewDescriptors) {
this.props.animatedProps.viewDescriptors.add({
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
tag: viewTag!,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
name: viewName!,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
shadowNodeWrapper: shadowNodeWrapper!,
});
}
}
componentDidUpdate(
prevProps: AnimatedComponentProps<InitialComponentProps>
) {
this._reattachNativeEvents(prevProps);
this._attachAnimatedStyles();
}
_setComponentRef = setAndForwardRef<Component>({
getForwardedRef: () =>
this.props.forwardedRef as MutableRefObject<
Component<Record<string, unknown>, Record<string, unknown>, unknown>
>,
setLocalRef: (ref) => {
// TODO update config
const tag = findNodeHandle(ref);
if (
(this.props.layout || this.props.entering || this.props.exiting) &&
tag != null
) {
if (!shouldBeUseWeb()) {
enableLayoutAnimations(true, false);
}
let layout = this.props.layout ? this.props.layout : DefaultLayout;
let entering = this.props.entering
? this.props.entering
: DefaultEntering;
let exiting = this.props.exiting
? this.props.exiting
: DefaultExiting;
if (has('build', layout)) {
layout = layout.build();
}
if (has('build', entering)) {
entering = entering.build() as EntryExitAnimationFunction;
}
if (has('build', exiting)) {
exiting = exiting.build() as EntryExitAnimationFunction;
}
const config = {
layout,
entering,
exiting,
sv: this.sv,
};
runOnUI(() => {
'worklet';
global.LayoutAnimationRepository.registerConfig(tag, config);
})();
}
if (ref !== this._component) {
this._component = ref;
}
},
});
_filterNonAnimatedProps(
inputProps: AnimatedComponentProps<InitialComponentProps>
): Record<string, unknown> {
const props: Record<string, unknown> = {};
for (const key in inputProps) {
const value = inputProps[key];
if (key === 'style') {
const styleProp = inputProps.style;
const styles = flattenArray<StyleProps>(styleProp ?? []);
const processedStyle: StyleProps = styles.map((style) => {
if (style && style.viewDescriptors) {
// this is how we recognize styles returned by useAnimatedStyle
style.viewsRef.add(this);
if (this._isFirstRender) {
this.initialStyle = {
...style.initial.value,
...initialUpdaterRun<StyleProps>(style.initial.updater),
};
}
return this.initialStyle;
} else {
return style;
}
});
props[key] = StyleSheet.flatten(processedStyle);
} else if (key === 'animatedProps') {
const animatedProp = inputProps.animatedProps as Partial<
AnimatedComponentProps<AnimatedProps>
>;
if (animatedProp.initial !== undefined) {
Object.keys(animatedProp.initial.value).forEach((key) => {
props[key] = animatedProp.initial?.value[key];
animatedProp.viewsRef?.add(this);
});
}
} else if (
has('current', value) &&
value.current instanceof WorkletEventHandler
) {
if (value.current.eventNames.length > 0) {
value.current.eventNames.forEach((eventName) => {
props[eventName] = has('listeners', value.current)
? (value.current.listeners as Record<string, unknown>)[
eventName
]
: dummyListener;
});
} else {
props[key] = dummyListener;
}
} else if (
key !== 'onGestureHandlerStateChange' ||
!isChromeDebugger()
) {
props[key] = value;
}
}
return props;
}
render() {
const props = this._filterNonAnimatedProps(this.props);
if (isJest()) {
props.animatedStyle = this.animatedStyle;
}
if (this._isFirstRender) {
this._isFirstRender = false;
}
const platformProps = Platform.select({
web: {},
default: { collapsable: false },
});
return (
<Component {...props} ref={this._setComponentRef} {...platformProps} />
);
}
}
AnimatedComponent.displayName = `AnimatedComponent(${
Component.displayName || Component.name || 'Component'
})`;
return React.forwardRef<Component>((props, ref) => {
return (
<AnimatedComponent
{...props}
{...(ref === null ? null : { forwardedRef: ref })}
/>
);
});
} | the_stack |
export interface paths {
"/": {
get: operations["meta/root"];
};
"/app": {
get: operations["apps/get-authenticated"];
};
"/app-manifests/{code}/conversions": {
post: operations["apps/create-from-manifest"];
};
"/app/hook/config": {
get: operations["apps/get-webhook-config-for-app"];
patch: operations["apps/update-webhook-config-for-app"];
};
"/app/installations": {
get: operations["apps/list-installations"];
};
"/app/installations/{installation_id}": {
get: operations["apps/get-installation"];
delete: operations["apps/delete-installation"];
};
"/app/installations/{installation_id}/access_tokens": {
post: operations["apps/create-installation-access-token"];
};
"/app/installations/{installation_id}/suspended": {
put: operations["apps/suspend-installation"];
delete: operations["apps/unsuspend-installation"];
};
"/applications/grants": {
get: operations["oauth-authorizations/list-grants"];
};
"/applications/grants/{grant_id}": {
get: operations["oauth-authorizations/get-grant"];
delete: operations["oauth-authorizations/delete-grant"];
};
"/applications/{client_id}/grant": {
delete: operations["apps/delete-authorization"];
};
"/applications/{client_id}/grants/{access_token}": {
delete: operations["apps/revoke-grant-for-application"];
};
"/applications/{client_id}/token": {
post: operations["apps/check-token"];
patch: operations["apps/reset-token"];
delete: operations["apps/delete-token"];
};
"/applications/{client_id}/tokens/{access_token}": {
get: operations["apps/check-authorization"];
post: operations["apps/reset-authorization"];
delete: operations["apps/revoke-authorization-for-application"];
};
"/apps/{app_slug}": {
get: operations["apps/get-by-slug"];
};
"/authorizations": {
get: operations["oauth-authorizations/list-authorizations"];
post: operations["oauth-authorizations/create-authorization"];
};
"/authorizations/clients/{client_id}": {
put: operations["oauth-authorizations/get-or-create-authorization-for-app"];
};
"/authorizations/clients/{client_id}/{fingerprint}": {
put: operations["oauth-authorizations/get-or-create-authorization-for-app-and-fingerprint"];
};
"/authorizations/{authorization_id}": {
get: operations["oauth-authorizations/get-authorization"];
patch: operations["oauth-authorizations/update-authorization"];
delete: operations["oauth-authorizations/delete-authorization"];
};
"/codes_of_conduct": {
get: operations["codes-of-conduct/get-all-codes-of-conduct"];
};
"/codes_of_conduct/{key}": {
get: operations["codes-of-conduct/get-conduct-code"];
};
"/content_references/{content_reference_id}/attachments": {
post: operations["apps/create-content-attachment"];
};
"/emojis": {
get: operations["emojis/get"];
};
"/enterprises/{enterprise}/actions/permissions": {
get: operations["enterprise-admin/get-github-actions-permissions-enterprise"];
put: operations["enterprise-admin/set-github-actions-permissions-enterprise"];
};
"/enterprises/{enterprise}/actions/permissions/organizations": {
get: operations["enterprise-admin/list-selected-organizations-enabled-github-actions-enterprise"];
put: operations["enterprise-admin/set-selected-organizations-enabled-github-actions-enterprise"];
};
"/enterprises/{enterprise}/actions/permissions/organizations/{org_id}": {
put: operations["enterprise-admin/enable-selected-organization-github-actions-enterprise"];
delete: operations["enterprise-admin/disable-selected-organization-github-actions-enterprise"];
};
"/enterprises/{enterprise}/actions/permissions/selected-actions": {
get: operations["enterprise-admin/get-allowed-actions-enterprise"];
put: operations["enterprise-admin/set-allowed-actions-enterprise"];
};
"/enterprises/{enterprise}/actions/runner-groups": {
get: operations["enterprise-admin/list-self-hosted-runner-groups-for-enterprise"];
post: operations["enterprise-admin/create-self-hosted-runner-group-for-enterprise"];
};
"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": {
get: operations["enterprise-admin/get-self-hosted-runner-group-for-enterprise"];
patch: operations["enterprise-admin/update-self-hosted-runner-group-for-enterprise"];
delete: operations["enterprise-admin/delete-self-hosted-runner-group-from-enterprise"];
};
"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": {
get: operations["enterprise-admin/list-org-access-to-self-hosted-runner-group-in-enterprise"];
put: operations["enterprise-admin/set-org-access-to-self-hosted-runner-group-in-enterprise"];
};
"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}": {
put: operations["enterprise-admin/add-org-access-to-self-hosted-runner-group-in-enterprise"];
delete: operations["enterprise-admin/remove-org-access-to-self-hosted-runner-group-in-enterprise"];
};
"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": {
get: operations["enterprise-admin/list-self-hosted-runners-in-group-for-enterprise"];
put: operations["enterprise-admin/set-self-hosted-runners-in-group-for-enterprise"];
};
"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": {
put: operations["enterprise-admin/add-self-hosted-runner-to-group-for-enterprise"];
delete: operations["enterprise-admin/remove-self-hosted-runner-from-group-for-enterprise"];
};
"/enterprises/{enterprise}/actions/runners": {
get: operations["enterprise-admin/list-self-hosted-runners-for-enterprise"];
};
"/enterprises/{enterprise}/actions/runners/downloads": {
get: operations["enterprise-admin/list-runner-applications-for-enterprise"];
};
"/enterprises/{enterprise}/actions/runners/registration-token": {
post: operations["enterprise-admin/create-registration-token-for-enterprise"];
};
"/enterprises/{enterprise}/actions/runners/remove-token": {
post: operations["enterprise-admin/create-remove-token-for-enterprise"];
};
"/enterprises/{enterprise}/actions/runners/{runner_id}": {
get: operations["enterprise-admin/get-self-hosted-runner-for-enterprise"];
delete: operations["enterprise-admin/delete-self-hosted-runner-from-enterprise"];
};
"/enterprises/{enterprise}/settings/billing/actions": {
get: operations["billing/get-github-actions-billing-ghe"];
};
"/enterprises/{enterprise}/settings/billing/packages": {
get: operations["billing/get-github-packages-billing-ghe"];
};
"/enterprises/{enterprise}/settings/billing/shared-storage": {
get: operations["billing/get-shared-storage-billing-ghe"];
};
"/events": {
get: operations["activity/list-public-events"];
};
"/feeds": {
get: operations["activity/get-feeds"];
};
"/gists": {
get: operations["gists/list"];
post: operations["gists/create"];
};
"/gists/public": {
get: operations["gists/list-public"];
};
"/gists/starred": {
get: operations["gists/list-starred"];
};
"/gists/{gist_id}": {
get: operations["gists/get"];
patch: operations["gists/update"];
delete: operations["gists/delete"];
};
"/gists/{gist_id}/comments": {
get: operations["gists/list-comments"];
post: operations["gists/create-comment"];
};
"/gists/{gist_id}/comments/{comment_id}": {
get: operations["gists/get-comment"];
patch: operations["gists/update-comment"];
delete: operations["gists/delete-comment"];
};
"/gists/{gist_id}/commits": {
get: operations["gists/list-commits"];
};
"/gists/{gist_id}/forks": {
get: operations["gists/list-forks"];
post: operations["gists/fork"];
};
"/gists/{gist_id}/star": {
get: operations["gists/check-is-starred"];
put: operations["gists/star"];
delete: operations["gists/unstar"];
};
"/gists/{gist_id}/{sha}": {
get: operations["gists/get-revision"];
};
"/gitignore/templates": {
get: operations["gitignore/get-all-templates"];
};
"/gitignore/templates/{name}": {
get: operations["gitignore/get-template"];
};
"/installation/repositories": {
get: operations["apps/list-repos-accessible-to-installation"];
};
"/installation/token": {
delete: operations["apps/revoke-installation-access-token"];
};
"/issues": {
get: operations["issues/list"];
};
"/licenses": {
get: operations["licenses/get-all-commonly-used"];
};
"/licenses/{license}": {
get: operations["licenses/get"];
};
"/markdown": {
post: operations["markdown/render"];
};
"/markdown/raw": {
post: operations["markdown/render-raw"];
};
"/marketplace_listing/accounts/{account_id}": {
get: operations["apps/get-subscription-plan-for-account"];
};
"/marketplace_listing/plans": {
get: operations["apps/list-plans"];
};
"/marketplace_listing/plans/{plan_id}/accounts": {
get: operations["apps/list-accounts-for-plan"];
};
"/marketplace_listing/stubbed/accounts/{account_id}": {
get: operations["apps/get-subscription-plan-for-account-stubbed"];
};
"/marketplace_listing/stubbed/plans": {
get: operations["apps/list-plans-stubbed"];
};
"/marketplace_listing/stubbed/plans/{plan_id}/accounts": {
get: operations["apps/list-accounts-for-plan-stubbed"];
};
"/meta": {
get: operations["meta/get"];
};
"/networks/{owner}/{repo}/events": {
get: operations["activity/list-public-events-for-repo-network"];
};
"/notifications": {
get: operations["activity/list-notifications-for-authenticated-user"];
put: operations["activity/mark-notifications-as-read"];
};
"/notifications/threads/{thread_id}": {
get: operations["activity/get-thread"];
patch: operations["activity/mark-thread-as-read"];
};
"/notifications/threads/{thread_id}/subscription": {
get: operations["activity/get-thread-subscription-for-authenticated-user"];
put: operations["activity/set-thread-subscription"];
delete: operations["activity/delete-thread-subscription"];
};
"/octocat": {
get: operations["meta/get-octocat"];
};
"/organizations": {
get: operations["orgs/list"];
};
"/orgs/{org}": {
get: operations["orgs/get"];
patch: operations["orgs/update"];
};
"/orgs/{org}/actions/permissions": {
get: operations["actions/get-github-actions-permissions-organization"];
put: operations["actions/set-github-actions-permissions-organization"];
};
"/orgs/{org}/actions/permissions/repositories": {
get: operations["actions/list-selected-repositories-enabled-github-actions-organization"];
put: operations["actions/set-selected-repositories-enabled-github-actions-organization"];
};
"/orgs/{org}/actions/permissions/repositories/{repository_id}": {
put: operations["actions/enable-selected-repository-github-actions-organization"];
delete: operations["actions/disable-selected-repository-github-actions-organization"];
};
"/orgs/{org}/actions/permissions/selected-actions": {
get: operations["actions/get-allowed-actions-organization"];
put: operations["actions/set-allowed-actions-organization"];
};
"/orgs/{org}/actions/runner-groups": {
get: operations["actions/list-self-hosted-runner-groups-for-org"];
post: operations["actions/create-self-hosted-runner-group-for-org"];
};
"/orgs/{org}/actions/runner-groups/{runner_group_id}": {
get: operations["actions/get-self-hosted-runner-group-for-org"];
patch: operations["actions/update-self-hosted-runner-group-for-org"];
delete: operations["actions/delete-self-hosted-runner-group-from-org"];
};
"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": {
get: operations["actions/list-repo-access-to-self-hosted-runner-group-in-org"];
put: operations["actions/set-repo-access-to-self-hosted-runner-group-in-org"];
};
"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}": {
put: operations["actions/add-repo-access-to-self-hosted-runner-group-in-org"];
delete: operations["actions/remove-repo-access-to-self-hosted-runner-group-in-org"];
};
"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners": {
get: operations["actions/list-self-hosted-runners-in-group-for-org"];
put: operations["actions/set-self-hosted-runners-in-group-for-org"];
};
"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": {
put: operations["actions/add-self-hosted-runner-to-group-for-org"];
delete: operations["actions/remove-self-hosted-runner-from-group-for-org"];
};
"/orgs/{org}/actions/runners": {
get: operations["actions/list-self-hosted-runners-for-org"];
};
"/orgs/{org}/actions/runners/downloads": {
get: operations["actions/list-runner-applications-for-org"];
};
"/orgs/{org}/actions/runners/registration-token": {
post: operations["actions/create-registration-token-for-org"];
};
"/orgs/{org}/actions/runners/remove-token": {
post: operations["actions/create-remove-token-for-org"];
};
"/orgs/{org}/actions/runners/{runner_id}": {
get: operations["actions/get-self-hosted-runner-for-org"];
delete: operations["actions/delete-self-hosted-runner-from-org"];
};
"/orgs/{org}/actions/secrets": {
get: operations["actions/list-org-secrets"];
};
"/orgs/{org}/actions/secrets/public-key": {
get: operations["actions/get-org-public-key"];
};
"/orgs/{org}/actions/secrets/{secret_name}": {
get: operations["actions/get-org-secret"];
put: operations["actions/create-or-update-org-secret"];
delete: operations["actions/delete-org-secret"];
};
"/orgs/{org}/actions/secrets/{secret_name}/repositories": {
get: operations["actions/list-selected-repos-for-org-secret"];
put: operations["actions/set-selected-repos-for-org-secret"];
};
"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": {
put: operations["actions/add-selected-repo-to-org-secret"];
delete: operations["actions/remove-selected-repo-from-org-secret"];
};
"/orgs/{org}/blocks": {
get: operations["orgs/list-blocked-users"];
};
"/orgs/{org}/blocks/{username}": {
get: operations["orgs/check-blocked-user"];
put: operations["orgs/block-user"];
delete: operations["orgs/unblock-user"];
};
"/orgs/{org}/credential-authorizations": {
get: operations["orgs/list-saml-sso-authorizations"];
};
"/orgs/{org}/credential-authorizations/{credential_id}": {
delete: operations["orgs/remove-saml-sso-authorization"];
};
"/orgs/{org}/events": {
get: operations["activity/list-public-org-events"];
};
"/orgs/{org}/hooks": {
get: operations["orgs/list-webhooks"];
post: operations["orgs/create-webhook"];
};
"/orgs/{org}/hooks/{hook_id}": {
get: operations["orgs/get-webhook"];
patch: operations["orgs/update-webhook"];
delete: operations["orgs/delete-webhook"];
};
"/orgs/{org}/hooks/{hook_id}/config": {
get: operations["orgs/get-webhook-config-for-org"];
patch: operations["orgs/update-webhook-config-for-org"];
};
"/orgs/{org}/hooks/{hook_id}/pings": {
post: operations["orgs/ping-webhook"];
};
"/orgs/{org}/installation": {
get: operations["apps/get-org-installation"];
};
"/orgs/{org}/installations": {
get: operations["orgs/list-app-installations"];
};
"/orgs/{org}/interaction-limits": {
get: operations["interactions/get-restrictions-for-org"];
put: operations["interactions/set-restrictions-for-org"];
delete: operations["interactions/remove-restrictions-for-org"];
};
"/orgs/{org}/invitations": {
get: operations["orgs/list-pending-invitations"];
post: operations["orgs/create-invitation"];
};
"/orgs/{org}/invitations/{invitation_id}/teams": {
get: operations["orgs/list-invitation-teams"];
};
"/orgs/{org}/issues": {
get: operations["issues/list-for-org"];
};
"/orgs/{org}/members": {
get: operations["orgs/list-members"];
};
"/orgs/{org}/members/{username}": {
get: operations["orgs/check-membership-for-user"];
delete: operations["orgs/remove-member"];
};
"/orgs/{org}/memberships/{username}": {
get: operations["orgs/get-membership-for-user"];
put: operations["orgs/set-membership-for-user"];
delete: operations["orgs/remove-membership-for-user"];
};
"/orgs/{org}/migrations": {
get: operations["migrations/list-for-org"];
post: operations["migrations/start-for-org"];
};
"/orgs/{org}/migrations/{migration_id}": {
get: operations["migrations/get-status-for-org"];
};
"/orgs/{org}/migrations/{migration_id}/archive": {
get: operations["migrations/download-archive-for-org"];
delete: operations["migrations/delete-archive-for-org"];
};
"/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": {
delete: operations["migrations/unlock-repo-for-org"];
};
"/orgs/{org}/migrations/{migration_id}/repositories": {
get: operations["migrations/list-repos-for-org"];
};
"/orgs/{org}/outside_collaborators": {
get: operations["orgs/list-outside-collaborators"];
};
"/orgs/{org}/outside_collaborators/{username}": {
put: operations["orgs/convert-member-to-outside-collaborator"];
delete: operations["orgs/remove-outside-collaborator"];
};
"/orgs/{org}/projects": {
get: operations["projects/list-for-org"];
post: operations["projects/create-for-org"];
};
"/orgs/{org}/public_members": {
get: operations["orgs/list-public-members"];
};
"/orgs/{org}/public_members/{username}": {
get: operations["orgs/check-public-membership-for-user"];
put: operations["orgs/set-public-membership-for-authenticated-user"];
delete: operations["orgs/remove-public-membership-for-authenticated-user"];
};
"/orgs/{org}/repos": {
get: operations["repos/list-for-org"];
post: operations["repos/create-in-org"];
};
"/orgs/{org}/settings/billing/actions": {
get: operations["billing/get-github-actions-billing-org"];
};
"/orgs/{org}/settings/billing/packages": {
get: operations["billing/get-github-packages-billing-org"];
};
"/orgs/{org}/settings/billing/shared-storage": {
get: operations["billing/get-shared-storage-billing-org"];
};
"/orgs/{org}/team-sync/groups": {
get: operations["teams/list-idp-groups-for-org"];
};
"/orgs/{org}/teams": {
get: operations["teams/list"];
post: operations["teams/create"];
};
"/orgs/{org}/teams/{team_slug}": {
get: operations["teams/get-by-name"];
patch: operations["teams/update-in-org"];
delete: operations["teams/delete-in-org"];
};
"/orgs/{org}/teams/{team_slug}/discussions": {
get: operations["teams/list-discussions-in-org"];
post: operations["teams/create-discussion-in-org"];
};
"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": {
get: operations["teams/get-discussion-in-org"];
patch: operations["teams/update-discussion-in-org"];
delete: operations["teams/delete-discussion-in-org"];
};
"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": {
get: operations["teams/list-discussion-comments-in-org"];
post: operations["teams/create-discussion-comment-in-org"];
};
"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": {
get: operations["teams/get-discussion-comment-in-org"];
patch: operations["teams/update-discussion-comment-in-org"];
delete: operations["teams/delete-discussion-comment-in-org"];
};
"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": {
get: operations["reactions/list-for-team-discussion-comment-in-org"];
post: operations["reactions/create-for-team-discussion-comment-in-org"];
};
"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": {
delete: operations["reactions/delete-for-team-discussion-comment"];
};
"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": {
get: operations["reactions/list-for-team-discussion-in-org"];
post: operations["reactions/create-for-team-discussion-in-org"];
};
"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": {
delete: operations["reactions/delete-for-team-discussion"];
};
"/orgs/{org}/teams/{team_slug}/invitations": {
get: operations["teams/list-pending-invitations-in-org"];
};
"/orgs/{org}/teams/{team_slug}/members": {
get: operations["teams/list-members-in-org"];
};
"/orgs/{org}/teams/{team_slug}/memberships/{username}": {
get: operations["teams/get-membership-for-user-in-org"];
put: operations["teams/add-or-update-membership-for-user-in-org"];
delete: operations["teams/remove-membership-for-user-in-org"];
};
"/orgs/{org}/teams/{team_slug}/projects": {
get: operations["teams/list-projects-in-org"];
};
"/orgs/{org}/teams/{team_slug}/projects/{project_id}": {
get: operations["teams/check-permissions-for-project-in-org"];
put: operations["teams/add-or-update-project-permissions-in-org"];
delete: operations["teams/remove-project-in-org"];
};
"/orgs/{org}/teams/{team_slug}/repos": {
get: operations["teams/list-repos-in-org"];
};
"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": {
get: operations["teams/check-permissions-for-repo-in-org"];
put: operations["teams/add-or-update-repo-permissions-in-org"];
delete: operations["teams/remove-repo-in-org"];
};
"/orgs/{org}/teams/{team_slug}/team-sync/group-mappings": {
get: operations["teams/list-idp-groups-in-org"];
patch: operations["teams/create-or-update-idp-group-connections-in-org"];
};
"/orgs/{org}/teams/{team_slug}/teams": {
get: operations["teams/list-child-in-org"];
};
"/projects/columns/cards/{card_id}": {
get: operations["projects/get-card"];
patch: operations["projects/update-card"];
delete: operations["projects/delete-card"];
};
"/projects/columns/cards/{card_id}/moves": {
post: operations["projects/move-card"];
};
"/projects/columns/{column_id}": {
get: operations["projects/get-column"];
patch: operations["projects/update-column"];
delete: operations["projects/delete-column"];
};
"/projects/columns/{column_id}/cards": {
get: operations["projects/list-cards"];
post: operations["projects/create-card"];
};
"/projects/columns/{column_id}/moves": {
post: operations["projects/move-column"];
};
"/projects/{project_id}": {
get: operations["projects/get"];
patch: operations["projects/update"];
delete: operations["projects/delete"];
};
"/projects/{project_id}/collaborators": {
get: operations["projects/list-collaborators"];
};
"/projects/{project_id}/collaborators/{username}": {
put: operations["projects/add-collaborator"];
delete: operations["projects/remove-collaborator"];
};
"/projects/{project_id}/collaborators/{username}/permission": {
get: operations["projects/get-permission-for-user"];
};
"/projects/{project_id}/columns": {
get: operations["projects/list-columns"];
post: operations["projects/create-column"];
};
"/rate_limit": {
get: operations["rate-limit/get"];
};
"/reactions/{reaction_id}": {
delete: operations["reactions/delete-legacy"];
};
"/repos/{owner}/{repo}": {
get: operations["repos/get"];
patch: operations["repos/update"];
delete: operations["repos/delete"];
};
"/repos/{owner}/{repo}/actions/artifacts": {
get: operations["actions/list-artifacts-for-repo"];
};
"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}": {
get: operations["actions/get-artifact"];
delete: operations["actions/delete-artifact"];
};
"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": {
get: operations["actions/download-artifact"];
};
"/repos/{owner}/{repo}/actions/jobs/{job_id}": {
get: operations["actions/get-job-for-workflow-run"];
};
"/repos/{owner}/{repo}/actions/jobs/{job_id}/logs": {
get: operations["actions/download-job-logs-for-workflow-run"];
};
"/repos/{owner}/{repo}/actions/permissions": {
get: operations["actions/get-github-actions-permissions-repository"];
put: operations["actions/set-github-actions-permissions-repository"];
};
"/repos/{owner}/{repo}/actions/permissions/selected-actions": {
get: operations["actions/get-allowed-actions-repository"];
put: operations["actions/set-allowed-actions-repository"];
};
"/repos/{owner}/{repo}/actions/runners": {
get: operations["actions/list-self-hosted-runners-for-repo"];
};
"/repos/{owner}/{repo}/actions/runners/downloads": {
get: operations["actions/list-runner-applications-for-repo"];
};
"/repos/{owner}/{repo}/actions/runners/registration-token": {
post: operations["actions/create-registration-token-for-repo"];
};
"/repos/{owner}/{repo}/actions/runners/remove-token": {
post: operations["actions/create-remove-token-for-repo"];
};
"/repos/{owner}/{repo}/actions/runners/{runner_id}": {
get: operations["actions/get-self-hosted-runner-for-repo"];
delete: operations["actions/delete-self-hosted-runner-from-repo"];
};
"/repos/{owner}/{repo}/actions/runs": {
get: operations["actions/list-workflow-runs-for-repo"];
};
"/repos/{owner}/{repo}/actions/runs/{run_id}": {
get: operations["actions/get-workflow-run"];
delete: operations["actions/delete-workflow-run"];
};
"/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": {
get: operations["actions/list-workflow-run-artifacts"];
};
"/repos/{owner}/{repo}/actions/runs/{run_id}/cancel": {
post: operations["actions/cancel-workflow-run"];
};
"/repos/{owner}/{repo}/actions/runs/{run_id}/jobs": {
get: operations["actions/list-jobs-for-workflow-run"];
};
"/repos/{owner}/{repo}/actions/runs/{run_id}/logs": {
get: operations["actions/download-workflow-run-logs"];
delete: operations["actions/delete-workflow-run-logs"];
};
"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun": {
post: operations["actions/re-run-workflow"];
};
"/repos/{owner}/{repo}/actions/runs/{run_id}/timing": {
get: operations["actions/get-workflow-run-usage"];
};
"/repos/{owner}/{repo}/actions/secrets": {
get: operations["actions/list-repo-secrets"];
};
"/repos/{owner}/{repo}/actions/secrets/public-key": {
get: operations["actions/get-repo-public-key"];
};
"/repos/{owner}/{repo}/actions/secrets/{secret_name}": {
get: operations["actions/get-repo-secret"];
put: operations["actions/create-or-update-repo-secret"];
delete: operations["actions/delete-repo-secret"];
};
"/repos/{owner}/{repo}/actions/workflows": {
get: operations["actions/list-repo-workflows"];
};
"/repos/{owner}/{repo}/actions/workflows/{workflow_id}": {
get: operations["actions/get-workflow"];
};
"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": {
put: operations["actions/disable-workflow"];
};
"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": {
post: operations["actions/create-workflow-dispatch"];
};
"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": {
put: operations["actions/enable-workflow"];
};
"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": {
get: operations["actions/list-workflow-runs"];
};
"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": {
get: operations["actions/get-workflow-usage"];
};
"/repos/{owner}/{repo}/assignees": {
get: operations["issues/list-assignees"];
};
"/repos/{owner}/{repo}/assignees/{assignee}": {
get: operations["issues/check-user-can-be-assigned"];
};
"/repos/{owner}/{repo}/automated-security-fixes": {
put: operations["repos/enable-automated-security-fixes"];
delete: operations["repos/disable-automated-security-fixes"];
};
"/repos/{owner}/{repo}/branches": {
get: operations["repos/list-branches"];
};
"/repos/{owner}/{repo}/branches/{branch}": {
get: operations["repos/get-branch"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection": {
get: operations["repos/get-branch-protection"];
put: operations["repos/update-branch-protection"];
delete: operations["repos/delete-branch-protection"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": {
get: operations["repos/get-admin-branch-protection"];
post: operations["repos/set-admin-branch-protection"];
delete: operations["repos/delete-admin-branch-protection"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": {
get: operations["repos/get-pull-request-review-protection"];
patch: operations["repos/update-pull-request-review-protection"];
delete: operations["repos/delete-pull-request-review-protection"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": {
get: operations["repos/get-commit-signature-protection"];
post: operations["repos/create-commit-signature-protection"];
delete: operations["repos/delete-commit-signature-protection"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": {
get: operations["repos/get-status-checks-protection"];
patch: operations["repos/update-status-check-protection"];
delete: operations["repos/remove-status-check-protection"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": {
get: operations["repos/get-all-status-check-contexts"];
post: operations["repos/add-status-check-contexts"];
put: operations["repos/set-status-check-contexts"];
delete: operations["repos/remove-status-check-contexts"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions": {
get: operations["repos/get-access-restrictions"];
delete: operations["repos/delete-access-restrictions"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": {
get: operations["repos/get-apps-with-access-to-protected-branch"];
post: operations["repos/add-app-access-restrictions"];
put: operations["repos/set-app-access-restrictions"];
delete: operations["repos/remove-app-access-restrictions"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": {
get: operations["repos/get-teams-with-access-to-protected-branch"];
post: operations["repos/add-team-access-restrictions"];
put: operations["repos/set-team-access-restrictions"];
delete: operations["repos/remove-team-access-restrictions"];
};
"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": {
get: operations["repos/get-users-with-access-to-protected-branch"];
post: operations["repos/add-user-access-restrictions"];
put: operations["repos/set-user-access-restrictions"];
delete: operations["repos/remove-user-access-restrictions"];
};
"/repos/{owner}/{repo}/check-runs": {
post: operations["checks/create"];
};
"/repos/{owner}/{repo}/check-runs/{check_run_id}": {
get: operations["checks/get"];
patch: operations["checks/update"];
};
"/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": {
get: operations["checks/list-annotations"];
};
"/repos/{owner}/{repo}/check-suites": {
post: operations["checks/create-suite"];
};
"/repos/{owner}/{repo}/check-suites/preferences": {
patch: operations["checks/set-suites-preferences"];
};
"/repos/{owner}/{repo}/check-suites/{check_suite_id}": {
get: operations["checks/get-suite"];
};
"/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": {
get: operations["checks/list-for-suite"];
};
"/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": {
post: operations["checks/rerequest-suite"];
};
"/repos/{owner}/{repo}/code-scanning/alerts": {
get: operations["code-scanning/list-alerts-for-repo"];
};
"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": {
get: operations["code-scanning/get-alert"];
patch: operations["code-scanning/update-alert"];
};
"/repos/{owner}/{repo}/code-scanning/analyses": {
get: operations["code-scanning/list-recent-analyses"];
};
"/repos/{owner}/{repo}/code-scanning/sarifs": {
post: operations["code-scanning/upload-sarif"];
};
"/repos/{owner}/{repo}/collaborators": {
get: operations["repos/list-collaborators"];
};
"/repos/{owner}/{repo}/collaborators/{username}": {
get: operations["repos/check-collaborator"];
put: operations["repos/add-collaborator"];
delete: operations["repos/remove-collaborator"];
};
"/repos/{owner}/{repo}/collaborators/{username}/permission": {
get: operations["repos/get-collaborator-permission-level"];
};
"/repos/{owner}/{repo}/comments": {
get: operations["repos/list-commit-comments-for-repo"];
};
"/repos/{owner}/{repo}/comments/{comment_id}": {
get: operations["repos/get-commit-comment"];
patch: operations["repos/update-commit-comment"];
delete: operations["repos/delete-commit-comment"];
};
"/repos/{owner}/{repo}/comments/{comment_id}/reactions": {
get: operations["reactions/list-for-commit-comment"];
post: operations["reactions/create-for-commit-comment"];
};
"/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": {
delete: operations["reactions/delete-for-commit-comment"];
};
"/repos/{owner}/{repo}/commits": {
get: operations["repos/list-commits"];
};
"/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": {
get: operations["repos/list-branches-for-head-commit"];
};
"/repos/{owner}/{repo}/commits/{commit_sha}/comments": {
get: operations["repos/list-comments-for-commit"];
post: operations["repos/create-commit-comment"];
};
"/repos/{owner}/{repo}/commits/{commit_sha}/pulls": {
get: operations["repos/list-pull-requests-associated-with-commit"];
};
"/repos/{owner}/{repo}/commits/{ref}": {
get: operations["repos/get-commit"];
};
"/repos/{owner}/{repo}/commits/{ref}/check-runs": {
get: operations["checks/list-for-ref"];
};
"/repos/{owner}/{repo}/commits/{ref}/check-suites": {
get: operations["checks/list-suites-for-ref"];
};
"/repos/{owner}/{repo}/commits/{ref}/status": {
get: operations["repos/get-combined-status-for-ref"];
};
"/repos/{owner}/{repo}/commits/{ref}/statuses": {
get: operations["repos/list-commit-statuses-for-ref"];
};
"/repos/{owner}/{repo}/community/code_of_conduct": {
get: operations["codes-of-conduct/get-for-repo"];
};
"/repos/{owner}/{repo}/community/profile": {
get: operations["repos/get-community-profile-metrics"];
};
"/repos/{owner}/{repo}/compare/{base}...{head}": {
get: operations["repos/compare-commits"];
};
"/repos/{owner}/{repo}/contents/{path}": {
get: operations["repos/get-content"];
put: operations["repos/create-or-update-file-contents"];
delete: operations["repos/delete-file"];
};
"/repos/{owner}/{repo}/contributors": {
get: operations["repos/list-contributors"];
};
"/repos/{owner}/{repo}/deployments": {
get: operations["repos/list-deployments"];
post: operations["repos/create-deployment"];
};
"/repos/{owner}/{repo}/deployments/{deployment_id}": {
get: operations["repos/get-deployment"];
delete: operations["repos/delete-deployment"];
};
"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses": {
get: operations["repos/list-deployment-statuses"];
post: operations["repos/create-deployment-status"];
};
"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": {
get: operations["repos/get-deployment-status"];
};
"/repos/{owner}/{repo}/dispatches": {
post: operations["repos/create-dispatch-event"];
};
"/repos/{owner}/{repo}/events": {
get: operations["activity/list-repo-events"];
};
"/repos/{owner}/{repo}/forks": {
get: operations["repos/list-forks"];
post: operations["repos/create-fork"];
};
"/repos/{owner}/{repo}/git/blobs": {
post: operations["git/create-blob"];
};
"/repos/{owner}/{repo}/git/blobs/{file_sha}": {
get: operations["git/get-blob"];
};
"/repos/{owner}/{repo}/git/commits": {
post: operations["git/create-commit"];
};
"/repos/{owner}/{repo}/git/commits/{commit_sha}": {
get: operations["git/get-commit"];
};
"/repos/{owner}/{repo}/git/matching-refs/{ref}": {
get: operations["git/list-matching-refs"];
};
"/repos/{owner}/{repo}/git/ref/{ref}": {
get: operations["git/get-ref"];
};
"/repos/{owner}/{repo}/git/refs": {
post: operations["git/create-ref"];
};
"/repos/{owner}/{repo}/git/refs/{ref}": {
patch: operations["git/update-ref"];
delete: operations["git/delete-ref"];
};
"/repos/{owner}/{repo}/git/tags": {
post: operations["git/create-tag"];
};
"/repos/{owner}/{repo}/git/tags/{tag_sha}": {
get: operations["git/get-tag"];
};
"/repos/{owner}/{repo}/git/trees": {
post: operations["git/create-tree"];
};
"/repos/{owner}/{repo}/git/trees/{tree_sha}": {
get: operations["git/get-tree"];
};
"/repos/{owner}/{repo}/hooks": {
get: operations["repos/list-webhooks"];
post: operations["repos/create-webhook"];
};
"/repos/{owner}/{repo}/hooks/{hook_id}": {
get: operations["repos/get-webhook"];
patch: operations["repos/update-webhook"];
delete: operations["repos/delete-webhook"];
};
"/repos/{owner}/{repo}/hooks/{hook_id}/config": {
get: operations["repos/get-webhook-config-for-repo"];
patch: operations["repos/update-webhook-config-for-repo"];
};
"/repos/{owner}/{repo}/hooks/{hook_id}/pings": {
post: operations["repos/ping-webhook"];
};
"/repos/{owner}/{repo}/hooks/{hook_id}/tests": {
post: operations["repos/test-push-webhook"];
};
"/repos/{owner}/{repo}/import": {
get: operations["migrations/get-import-status"];
put: operations["migrations/start-import"];
patch: operations["migrations/update-import"];
delete: operations["migrations/cancel-import"];
};
"/repos/{owner}/{repo}/import/authors": {
get: operations["migrations/get-commit-authors"];
};
"/repos/{owner}/{repo}/import/authors/{author_id}": {
patch: operations["migrations/map-commit-author"];
};
"/repos/{owner}/{repo}/import/large_files": {
get: operations["migrations/get-large-files"];
};
"/repos/{owner}/{repo}/import/lfs": {
patch: operations["migrations/set-lfs-preference"];
};
"/repos/{owner}/{repo}/installation": {
get: operations["apps/get-repo-installation"];
};
"/repos/{owner}/{repo}/interaction-limits": {
get: operations["interactions/get-restrictions-for-repo"];
put: operations["interactions/set-restrictions-for-repo"];
delete: operations["interactions/remove-restrictions-for-repo"];
};
"/repos/{owner}/{repo}/invitations": {
get: operations["repos/list-invitations"];
};
"/repos/{owner}/{repo}/invitations/{invitation_id}": {
patch: operations["repos/update-invitation"];
delete: operations["repos/delete-invitation"];
};
"/repos/{owner}/{repo}/issues": {
get: operations["issues/list-for-repo"];
post: operations["issues/create"];
};
"/repos/{owner}/{repo}/issues/comments": {
get: operations["issues/list-comments-for-repo"];
};
"/repos/{owner}/{repo}/issues/comments/{comment_id}": {
get: operations["issues/get-comment"];
patch: operations["issues/update-comment"];
delete: operations["issues/delete-comment"];
};
"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": {
get: operations["reactions/list-for-issue-comment"];
post: operations["reactions/create-for-issue-comment"];
};
"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": {
delete: operations["reactions/delete-for-issue-comment"];
};
"/repos/{owner}/{repo}/issues/events": {
get: operations["issues/list-events-for-repo"];
};
"/repos/{owner}/{repo}/issues/events/{event_id}": {
get: operations["issues/get-event"];
};
"/repos/{owner}/{repo}/issues/{issue_number}": {
get: operations["issues/get"];
patch: operations["issues/update"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/assignees": {
post: operations["issues/add-assignees"];
delete: operations["issues/remove-assignees"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/comments": {
get: operations["issues/list-comments"];
post: operations["issues/create-comment"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/events": {
get: operations["issues/list-events"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/labels": {
get: operations["issues/list-labels-on-issue"];
post: operations["issues/add-labels"];
put: operations["issues/set-labels"];
delete: operations["issues/remove-all-labels"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": {
delete: operations["issues/remove-label"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/lock": {
put: operations["issues/lock"];
delete: operations["issues/unlock"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/reactions": {
get: operations["reactions/list-for-issue"];
post: operations["reactions/create-for-issue"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": {
delete: operations["reactions/delete-for-issue"];
};
"/repos/{owner}/{repo}/issues/{issue_number}/timeline": {
get: operations["issues/list-events-for-timeline"];
};
"/repos/{owner}/{repo}/keys": {
get: operations["repos/list-deploy-keys"];
post: operations["repos/create-deploy-key"];
};
"/repos/{owner}/{repo}/keys/{key_id}": {
get: operations["repos/get-deploy-key"];
delete: operations["repos/delete-deploy-key"];
};
"/repos/{owner}/{repo}/labels": {
get: operations["issues/list-labels-for-repo"];
post: operations["issues/create-label"];
};
"/repos/{owner}/{repo}/labels/{name}": {
get: operations["issues/get-label"];
patch: operations["issues/update-label"];
delete: operations["issues/delete-label"];
};
"/repos/{owner}/{repo}/languages": {
get: operations["repos/list-languages"];
};
"/repos/{owner}/{repo}/license": {
get: operations["licenses/get-for-repo"];
};
"/repos/{owner}/{repo}/merges": {
post: operations["repos/merge"];
};
"/repos/{owner}/{repo}/milestones": {
get: operations["issues/list-milestones"];
post: operations["issues/create-milestone"];
};
"/repos/{owner}/{repo}/milestones/{milestone_number}": {
get: operations["issues/get-milestone"];
patch: operations["issues/update-milestone"];
delete: operations["issues/delete-milestone"];
};
"/repos/{owner}/{repo}/milestones/{milestone_number}/labels": {
get: operations["issues/list-labels-for-milestone"];
};
"/repos/{owner}/{repo}/notifications": {
get: operations["activity/list-repo-notifications-for-authenticated-user"];
put: operations["activity/mark-repo-notifications-as-read"];
};
"/repos/{owner}/{repo}/pages": {
get: operations["repos/get-pages"];
post: operations["repos/create-pages-site"];
put: operations["repos/update-information-about-pages-site"];
delete: operations["repos/delete-pages-site"];
};
"/repos/{owner}/{repo}/pages/builds": {
get: operations["repos/list-pages-builds"];
post: operations["repos/request-pages-build"];
};
"/repos/{owner}/{repo}/pages/builds/latest": {
get: operations["repos/get-latest-pages-build"];
};
"/repos/{owner}/{repo}/pages/builds/{build_id}": {
get: operations["repos/get-pages-build"];
};
"/repos/{owner}/{repo}/projects": {
get: operations["projects/list-for-repo"];
post: operations["projects/create-for-repo"];
};
"/repos/{owner}/{repo}/pulls": {
get: operations["pulls/list"];
post: operations["pulls/create"];
};
"/repos/{owner}/{repo}/pulls/comments": {
get: operations["pulls/list-review-comments-for-repo"];
};
"/repos/{owner}/{repo}/pulls/comments/{comment_id}": {
get: operations["pulls/get-review-comment"];
patch: operations["pulls/update-review-comment"];
delete: operations["pulls/delete-review-comment"];
};
"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": {
get: operations["reactions/list-for-pull-request-review-comment"];
post: operations["reactions/create-for-pull-request-review-comment"];
};
"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": {
delete: operations["reactions/delete-for-pull-request-comment"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}": {
get: operations["pulls/get"];
patch: operations["pulls/update"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/comments": {
get: operations["pulls/list-review-comments"];
post: operations["pulls/create-review-comment"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": {
post: operations["pulls/create-reply-for-review-comment"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/commits": {
get: operations["pulls/list-commits"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/files": {
get: operations["pulls/list-files"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/merge": {
get: operations["pulls/check-if-merged"];
put: operations["pulls/merge"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": {
get: operations["pulls/list-requested-reviewers"];
post: operations["pulls/request-reviewers"];
delete: operations["pulls/remove-requested-reviewers"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/reviews": {
get: operations["pulls/list-reviews"];
post: operations["pulls/create-review"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": {
get: operations["pulls/get-review"];
put: operations["pulls/update-review"];
delete: operations["pulls/delete-pending-review"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": {
get: operations["pulls/list-comments-for-review"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": {
put: operations["pulls/dismiss-review"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": {
post: operations["pulls/submit-review"];
};
"/repos/{owner}/{repo}/pulls/{pull_number}/update-branch": {
put: operations["pulls/update-branch"];
};
"/repos/{owner}/{repo}/readme": {
get: operations["repos/get-readme"];
};
"/repos/{owner}/{repo}/releases": {
get: operations["repos/list-releases"];
post: operations["repos/create-release"];
};
"/repos/{owner}/{repo}/releases/assets/{asset_id}": {
get: operations["repos/get-release-asset"];
patch: operations["repos/update-release-asset"];
delete: operations["repos/delete-release-asset"];
};
"/repos/{owner}/{repo}/releases/latest": {
get: operations["repos/get-latest-release"];
};
"/repos/{owner}/{repo}/releases/tags/{tag}": {
get: operations["repos/get-release-by-tag"];
};
"/repos/{owner}/{repo}/releases/{release_id}": {
get: operations["repos/get-release"];
patch: operations["repos/update-release"];
delete: operations["repos/delete-release"];
};
"/repos/{owner}/{repo}/releases/{release_id}/assets": {
get: operations["repos/list-release-assets"];
post: operations["repos/upload-release-asset"];
};
"/repos/{owner}/{repo}/secret-scanning/alerts": {
get: operations["secret-scanning/list-alerts-for-repo"];
};
"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": {
get: operations["secret-scanning/get-alert"];
patch: operations["secret-scanning/update-alert"];
};
"/repos/{owner}/{repo}/stargazers": {
get: operations["activity/list-stargazers-for-repo"];
};
"/repos/{owner}/{repo}/stats/code_frequency": {
get: operations["repos/get-code-frequency-stats"];
};
"/repos/{owner}/{repo}/stats/commit_activity": {
get: operations["repos/get-commit-activity-stats"];
};
"/repos/{owner}/{repo}/stats/contributors": {
get: operations["repos/get-contributors-stats"];
};
"/repos/{owner}/{repo}/stats/participation": {
get: operations["repos/get-participation-stats"];
};
"/repos/{owner}/{repo}/stats/punch_card": {
get: operations["repos/get-punch-card-stats"];
};
"/repos/{owner}/{repo}/statuses/{sha}": {
post: operations["repos/create-commit-status"];
};
"/repos/{owner}/{repo}/subscribers": {
get: operations["activity/list-watchers-for-repo"];
};
"/repos/{owner}/{repo}/subscription": {
get: operations["activity/get-repo-subscription"];
put: operations["activity/set-repo-subscription"];
delete: operations["activity/delete-repo-subscription"];
};
"/repos/{owner}/{repo}/tags": {
get: operations["repos/list-tags"];
};
"/repos/{owner}/{repo}/tarball/{ref}": {
get: operations["repos/download-tarball-archive"];
};
"/repos/{owner}/{repo}/teams": {
get: operations["repos/list-teams"];
};
"/repos/{owner}/{repo}/topics": {
get: operations["repos/get-all-topics"];
put: operations["repos/replace-all-topics"];
};
"/repos/{owner}/{repo}/traffic/clones": {
get: operations["repos/get-clones"];
};
"/repos/{owner}/{repo}/traffic/popular/paths": {
get: operations["repos/get-top-paths"];
};
"/repos/{owner}/{repo}/traffic/popular/referrers": {
get: operations["repos/get-top-referrers"];
};
"/repos/{owner}/{repo}/traffic/views": {
get: operations["repos/get-views"];
};
"/repos/{owner}/{repo}/transfer": {
post: operations["repos/transfer"];
};
"/repos/{owner}/{repo}/vulnerability-alerts": {
get: operations["repos/check-vulnerability-alerts"];
put: operations["repos/enable-vulnerability-alerts"];
delete: operations["repos/disable-vulnerability-alerts"];
};
"/repos/{owner}/{repo}/zipball/{ref}": {
get: operations["repos/download-zipball-archive"];
};
"/repos/{template_owner}/{template_repo}/generate": {
post: operations["repos/create-using-template"];
};
"/repositories": {
get: operations["repos/list-public"];
};
"/scim/v2/enterprises/{enterprise}/Groups": {
get: operations["enterprise-admin/list-provisioned-groups-enterprise"];
post: operations["enterprise-admin/provision-and-invite-enterprise-group"];
};
"/scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}": {
get: operations["enterprise-admin/get-provisioning-information-for-enterprise-group"];
put: operations["enterprise-admin/set-information-for-provisioned-enterprise-group"];
patch: operations["enterprise-admin/update-attribute-for-enterprise-group"];
delete: operations["enterprise-admin/delete-scim-group-from-enterprise"];
};
"/scim/v2/enterprises/{enterprise}/Users": {
get: operations["enterprise-admin/list-provisioned-identities-enterprise"];
post: operations["enterprise-admin/provision-and-invite-enterprise-user"];
};
"/scim/v2/enterprises/{enterprise}/Users/{scim_user_id}": {
get: operations["enterprise-admin/get-provisioning-information-for-enterprise-user"];
put: operations["enterprise-admin/set-information-for-provisioned-enterprise-user"];
patch: operations["enterprise-admin/update-attribute-for-enterprise-user"];
delete: operations["enterprise-admin/delete-user-from-enterprise"];
};
"/scim/v2/organizations/{org}/Users": {
get: operations["scim/list-provisioned-identities"];
post: operations["scim/provision-and-invite-user"];
};
"/scim/v2/organizations/{org}/Users/{scim_user_id}": {
get: operations["scim/get-provisioning-information-for-user"];
put: operations["scim/set-information-for-provisioned-user"];
patch: operations["scim/update-attribute-for-user"];
delete: operations["scim/delete-user-from-org"];
};
"/search/code": {
get: operations["search/code"];
};
"/search/commits": {
get: operations["search/commits"];
};
"/search/issues": {
get: operations["search/issues-and-pull-requests"];
};
"/search/labels": {
get: operations["search/labels"];
};
"/search/repositories": {
get: operations["search/repos"];
};
"/search/topics": {
get: operations["search/topics"];
};
"/search/users": {
get: operations["search/users"];
};
"/teams/{team_id}": {
get: operations["teams/get-legacy"];
patch: operations["teams/update-legacy"];
delete: operations["teams/delete-legacy"];
};
"/teams/{team_id}/discussions": {
get: operations["teams/list-discussions-legacy"];
post: operations["teams/create-discussion-legacy"];
};
"/teams/{team_id}/discussions/{discussion_number}": {
get: operations["teams/get-discussion-legacy"];
patch: operations["teams/update-discussion-legacy"];
delete: operations["teams/delete-discussion-legacy"];
};
"/teams/{team_id}/discussions/{discussion_number}/comments": {
get: operations["teams/list-discussion-comments-legacy"];
post: operations["teams/create-discussion-comment-legacy"];
};
"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": {
get: operations["teams/get-discussion-comment-legacy"];
patch: operations["teams/update-discussion-comment-legacy"];
delete: operations["teams/delete-discussion-comment-legacy"];
};
"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": {
get: operations["reactions/list-for-team-discussion-comment-legacy"];
post: operations["reactions/create-for-team-discussion-comment-legacy"];
};
"/teams/{team_id}/discussions/{discussion_number}/reactions": {
get: operations["reactions/list-for-team-discussion-legacy"];
post: operations["reactions/create-for-team-discussion-legacy"];
};
"/teams/{team_id}/invitations": {
get: operations["teams/list-pending-invitations-legacy"];
};
"/teams/{team_id}/members": {
get: operations["teams/list-members-legacy"];
};
"/teams/{team_id}/members/{username}": {
get: operations["teams/get-member-legacy"];
put: operations["teams/add-member-legacy"];
delete: operations["teams/remove-member-legacy"];
};
"/teams/{team_id}/memberships/{username}": {
get: operations["teams/get-membership-for-user-legacy"];
put: operations["teams/add-or-update-membership-for-user-legacy"];
delete: operations["teams/remove-membership-for-user-legacy"];
};
"/teams/{team_id}/projects": {
get: operations["teams/list-projects-legacy"];
};
"/teams/{team_id}/projects/{project_id}": {
get: operations["teams/check-permissions-for-project-legacy"];
put: operations["teams/add-or-update-project-permissions-legacy"];
delete: operations["teams/remove-project-legacy"];
};
"/teams/{team_id}/repos": {
get: operations["teams/list-repos-legacy"];
};
"/teams/{team_id}/repos/{owner}/{repo}": {
get: operations["teams/check-permissions-for-repo-legacy"];
put: operations["teams/add-or-update-repo-permissions-legacy"];
delete: operations["teams/remove-repo-legacy"];
};
"/teams/{team_id}/team-sync/group-mappings": {
get: operations["teams/list-idp-groups-for-legacy"];
patch: operations["teams/create-or-update-idp-group-connections-legacy"];
};
"/teams/{team_id}/teams": {
get: operations["teams/list-child-legacy"];
};
"/user": {
get: operations["users/get-authenticated"];
patch: operations["users/update-authenticated"];
};
"/user/blocks": {
get: operations["users/list-blocked-by-authenticated"];
};
"/user/blocks/{username}": {
get: operations["users/check-blocked"];
put: operations["users/block"];
delete: operations["users/unblock"];
};
"/user/email/visibility": {
patch: operations["users/set-primary-email-visibility-for-authenticated"];
};
"/user/emails": {
get: operations["users/list-emails-for-authenticated"];
post: operations["users/add-email-for-authenticated"];
delete: operations["users/delete-email-for-authenticated"];
};
"/user/followers": {
get: operations["users/list-followers-for-authenticated-user"];
};
"/user/following": {
get: operations["users/list-followed-by-authenticated"];
};
"/user/following/{username}": {
get: operations["users/check-person-is-followed-by-authenticated"];
put: operations["users/follow"];
delete: operations["users/unfollow"];
};
"/user/gpg_keys": {
get: operations["users/list-gpg-keys-for-authenticated"];
post: operations["users/create-gpg-key-for-authenticated"];
};
"/user/gpg_keys/{gpg_key_id}": {
get: operations["users/get-gpg-key-for-authenticated"];
delete: operations["users/delete-gpg-key-for-authenticated"];
};
"/user/installations": {
get: operations["apps/list-installations-for-authenticated-user"];
};
"/user/installations/{installation_id}/repositories": {
get: operations["apps/list-installation-repos-for-authenticated-user"];
};
"/user/installations/{installation_id}/repositories/{repository_id}": {
put: operations["apps/add-repo-to-installation"];
delete: operations["apps/remove-repo-from-installation"];
};
"/user/interaction-limits": {
get: operations["interactions/get-restrictions-for-your-public-repos"];
put: operations["interactions/set-restrictions-for-your-public-repos"];
delete: operations["interactions/remove-restrictions-for-your-public-repos"];
};
"/user/issues": {
get: operations["issues/list-for-authenticated-user"];
};
"/user/keys": {
get: operations["users/list-public-ssh-keys-for-authenticated"];
post: operations["users/create-public-ssh-key-for-authenticated"];
};
"/user/keys/{key_id}": {
get: operations["users/get-public-ssh-key-for-authenticated"];
delete: operations["users/delete-public-ssh-key-for-authenticated"];
};
"/user/marketplace_purchases": {
get: operations["apps/list-subscriptions-for-authenticated-user"];
};
"/user/marketplace_purchases/stubbed": {
get: operations["apps/list-subscriptions-for-authenticated-user-stubbed"];
};
"/user/memberships/orgs": {
get: operations["orgs/list-memberships-for-authenticated-user"];
};
"/user/memberships/orgs/{org}": {
get: operations["orgs/get-membership-for-authenticated-user"];
patch: operations["orgs/update-membership-for-authenticated-user"];
};
"/user/migrations": {
get: operations["migrations/list-for-authenticated-user"];
post: operations["migrations/start-for-authenticated-user"];
};
"/user/migrations/{migration_id}": {
get: operations["migrations/get-status-for-authenticated-user"];
};
"/user/migrations/{migration_id}/archive": {
get: operations["migrations/get-archive-for-authenticated-user"];
delete: operations["migrations/delete-archive-for-authenticated-user"];
};
"/user/migrations/{migration_id}/repos/{repo_name}/lock": {
delete: operations["migrations/unlock-repo-for-authenticated-user"];
};
"/user/migrations/{migration_id}/repositories": {
get: operations["migrations/list-repos-for-user"];
};
"/user/orgs": {
get: operations["orgs/list-for-authenticated-user"];
};
"/user/projects": {
post: operations["projects/create-for-authenticated-user"];
};
"/user/public_emails": {
get: operations["users/list-public-emails-for-authenticated"];
};
"/user/repos": {
get: operations["repos/list-for-authenticated-user"];
post: operations["repos/create-for-authenticated-user"];
};
"/user/repository_invitations": {
get: operations["repos/list-invitations-for-authenticated-user"];
};
"/user/repository_invitations/{invitation_id}": {
patch: operations["repos/accept-invitation"];
delete: operations["repos/decline-invitation"];
};
"/user/starred": {
get: operations["activity/list-repos-starred-by-authenticated-user"];
};
"/user/starred/{owner}/{repo}": {
get: operations["activity/check-repo-is-starred-by-authenticated-user"];
put: operations["activity/star-repo-for-authenticated-user"];
delete: operations["activity/unstar-repo-for-authenticated-user"];
};
"/user/subscriptions": {
get: operations["activity/list-watched-repos-for-authenticated-user"];
};
"/user/teams": {
get: operations["teams/list-for-authenticated-user"];
};
"/users": {
get: operations["users/list"];
};
"/users/{username}": {
get: operations["users/get-by-username"];
};
"/users/{username}/events": {
get: operations["activity/list-events-for-authenticated-user"];
};
"/users/{username}/events/orgs/{org}": {
get: operations["activity/list-org-events-for-authenticated-user"];
};
"/users/{username}/events/public": {
get: operations["activity/list-public-events-for-user"];
};
"/users/{username}/followers": {
get: operations["users/list-followers-for-user"];
};
"/users/{username}/following": {
get: operations["users/list-following-for-user"];
};
"/users/{username}/following/{target_user}": {
get: operations["users/check-following-for-user"];
};
"/users/{username}/gists": {
get: operations["gists/list-for-user"];
};
"/users/{username}/gpg_keys": {
get: operations["users/list-gpg-keys-for-user"];
};
"/users/{username}/hovercard": {
get: operations["users/get-context-for-user"];
};
"/users/{username}/installation": {
get: operations["apps/get-user-installation"];
};
"/users/{username}/keys": {
get: operations["users/list-public-keys-for-user"];
};
"/users/{username}/orgs": {
get: operations["orgs/list-for-user"];
};
"/users/{username}/projects": {
get: operations["projects/list-for-user"];
};
"/users/{username}/received_events": {
get: operations["activity/list-received-events-for-user"];
};
"/users/{username}/received_events/public": {
get: operations["activity/list-received-public-events-for-user"];
};
"/users/{username}/repos": {
get: operations["repos/list-for-user"];
};
"/users/{username}/settings/billing/actions": {
get: operations["billing/get-github-actions-billing-user"];
};
"/users/{username}/settings/billing/packages": {
get: operations["billing/get-github-packages-billing-user"];
};
"/users/{username}/settings/billing/shared-storage": {
get: operations["billing/get-shared-storage-billing-user"];
};
"/users/{username}/starred": {
get: operations["activity/list-repos-starred-by-user"];
};
"/users/{username}/subscriptions": {
get: operations["activity/list-repos-watched-by-user"];
};
"/zen": {
get: operations["meta/get-zen"];
};
}
export interface operations {
/**
* Get Hypermedia links to resources accessible in GitHub's REST API
*/
"meta/root": {
responses: {
/**
* response
*/
"200": {
"application/json": {
current_user_url: string;
current_user_authorizations_html_url: string;
authorizations_url: string;
code_search_url: string;
commit_search_url: string;
emails_url: string;
emojis_url: string;
events_url: string;
feeds_url: string;
followers_url: string;
following_url: string;
gists_url: string;
hub_url: string;
issue_search_url: string;
issues_url: string;
keys_url: string;
label_search_url: string;
notifications_url: string;
organization_url: string;
organization_repositories_url: string;
organization_teams_url: string;
public_gists_url: string;
rate_limit_url: string;
repository_url: string;
repository_search_url: string;
current_user_repositories_url: string;
starred_url: string;
starred_gists_url: string;
topic_search_url?: string;
user_url: string;
user_organizations_url: string;
user_repositories_url: string;
user_search_url: string;
};
};
};
};
/**
* Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/get-authenticated": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["integration"];
};
};
};
/**
* Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`.
*/
"apps/create-from-manifest": {
parameters: {
path: {
code: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["integration"] &
({
client_id: string;
client_secret: string;
webhook_secret: string;
pem: string;
} & { [key: string]: any });
};
"404": unknown;
"422": unknown;
};
};
/**
* Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)."
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/get-webhook-config-for-app": {
responses: {
/**
* Default response
*/
"200": {
"application/json": components["schemas"]["webhook-config"];
};
};
};
/**
* Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)."
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/update-webhook-config-for-app": {
requestBody: {
"application/json": {
url?: components["schemas"]["webhook-config-url"];
content_type?: components["schemas"]["webhook-config-content-type"];
secret?: components["schemas"]["webhook-config-secret"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
};
};
responses: {
/**
* Default response
*/
"200": {
"application/json": components["schemas"]["webhook-config"];
};
};
};
/**
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*
* The permissions the installation has are included under the `permissions` key.
*/
"apps/list-installations": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
since?: components["parameters"]["since"];
outdated?: string;
};
};
responses: {
/**
* The permissions the installation has are included under the `permissions` key.
*/
"200": {
"application/json": components["schemas"]["installation"][];
};
};
};
/**
* Enables an authenticated GitHub App to find an installation's information using the installation id. The installation's account type (`target_type`) will be either an organization or a user account, depending which account the repository belongs to.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/get-installation": {
parameters: {
path: {
installation_id: components["parameters"]["installation_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["installation"];
};
"404": unknown;
"415": unknown;
};
};
/**
* Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/v3/apps/#suspend-an-app-installation)" endpoint.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/delete-installation": {
parameters: {
path: {
installation_id: components["parameters"]["installation_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/create-installation-access-token": {
parameters: {
path: {
installation_id: components["parameters"]["installation_id"];
};
};
requestBody: {
"application/json": {
/**
* List of repository names that the token should have access to
*/
repositories?: string[];
/**
* List of repository IDs that the token should have access to
*/
repository_ids?: number[];
permissions?: {
contents?: string;
issues?: string;
deployments?: string;
single_file?: string;
def_not_a_repo?: string;
};
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["installation-token"];
};
"401": unknown;
"403": unknown;
"404": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* **Note:** Suspending a GitHub App installation is currently in beta and subject to change. Before you can suspend a GitHub App, the app owner must enable suspending installations for the app by opting-in to the beta. For more information, see "[Suspending a GitHub App installation](https://docs.github.com/apps/managing-github-apps/suspending-a-github-app-installation/)."
*
* Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account.
*
* To suspend a GitHub App, you must be an account owner or have admin permissions in the repository or organization where the app is installed.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/suspend-installation": {
parameters: {
path: {
installation_id: components["parameters"]["installation_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* **Note:** Suspending a GitHub App installation is currently in beta and subject to change. Before you can suspend a GitHub App, the app owner must enable suspending installations for the app by opting-in to the beta. For more information, see "[Suspending a GitHub App installation](https://docs.github.com/apps/managing-github-apps/suspending-a-github-app-installation/)."
*
* Removes a GitHub App installation suspension.
*
* To unsuspend a GitHub App, you must be an account owner or have admin permissions in the repository or organization where the app is installed and suspended.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/unsuspend-installation": {
parameters: {
path: {
installation_id: components["parameters"]["installation_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*
* You can use this API to list the set of OAuth applications that have been granted access to your account. Unlike the [list your authorizations](https://docs.github.com/rest/reference/oauth-authorizations#list-your-authorizations) API, this API does not manage individual tokens. This API will return one entry for each OAuth application that has been granted access to your account, regardless of the number of tokens an application has generated for your user. The list of OAuth applications returned matches what is shown on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). The `scopes` returned are the union of scopes authorized for the application. For example, if an application has one token with `repo` scope and another token with `user` scope, the grant will return `["repo", "user"]`.
*/
"oauth-authorizations/list-grants": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["application-grant"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*/
"oauth-authorizations/get-grant": {
parameters: {
path: {
grant_id: components["parameters"]["grant_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["application-grant"];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*
* Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for your user. Once deleted, the application has no access to your account and is no longer listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized).
*/
"oauth-authorizations/delete-grant": {
parameters: {
path: {
grant_id: components["parameters"]["grant_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted.
* Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized).
*/
"apps/delete-authorization": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
};
};
requestBody: {
"application/json": {
/**
* The OAuth access token used to authenticate to the GitHub API.
*/
access_token?: string;
};
};
responses: {
/**
* Empty response
*/
"204": never;
"422": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will replace and discontinue OAuth endpoints containing `access_token` in the path parameter. We are introducing new endpoints that allow you to securely manage tokens for OAuth Apps by using `access_token` as an input parameter. The OAuth Application API will be removed on May 5, 2021. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/).
*
* OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid token as `:access_token` and the grant for the token's owner will be deleted.
*
* Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the Applications settings page under "Authorized OAuth Apps" on GitHub](https://github.com/settings/applications#authorized).
*/
"apps/revoke-grant-for-application": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
access_token: components["parameters"]["access-token"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`.
*/
"apps/check-token": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
};
};
requestBody: {
"application/json": {
/**
* The access_token of the OAuth application.
*/
access_token: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["authorization"];
};
"404": unknown;
"422": unknown;
};
};
/**
* OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`.
*/
"apps/reset-token": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
};
};
requestBody: {
"application/json": {
/**
* The access_token of the OAuth application.
*/
access_token: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["authorization"];
};
"422": unknown;
};
};
/**
* OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password.
*/
"apps/delete-token": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
};
};
requestBody: {
"application/json": {
/**
* The OAuth access token used to authenticate to the GitHub API.
*/
access_token?: string;
};
};
responses: {
/**
* Empty response
*/
"204": never;
"422": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will replace and discontinue OAuth endpoints containing `access_token` in the path parameter. We are introducing new endpoints that allow you to securely manage tokens for OAuth Apps by using `access_token` as an input parameter. The OAuth Application API will be removed on May 5, 2021. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/).
*
* OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`.
*/
"apps/check-authorization": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
access_token: components["parameters"]["access-token"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["authorization"];
};
"404": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will replace and discontinue OAuth endpoints containing `access_token` in the path parameter. We are introducing new endpoints that allow you to securely manage tokens for OAuth Apps by using `access_token` as an input parameter. The OAuth Application API will be removed on May 5, 2021. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/).
*
* OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`.
*/
"apps/reset-authorization": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
access_token: components["parameters"]["access-token"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["authorization"];
};
};
};
/**
* **Deprecation Notice:** GitHub will replace and discontinue OAuth endpoints containing `access_token` in the path parameter. We are introducing new endpoints that allow you to securely manage tokens for OAuth Apps by using `access_token` as an input parameter. The OAuth Application API will be removed on May 5, 2021. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/).
*
* OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password.
*/
"apps/revoke-authorization-for-application": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
access_token: components["parameters"]["access-token"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`).
*
* If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint.
*/
"apps/get-by-slug": {
parameters: {
path: {
app_slug: components["parameters"]["app_slug"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["integration"];
};
"403": unknown;
"404": unknown;
"415": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*/
"oauth-authorizations/list-authorizations": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["authorization"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*
* **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api).
*
* Creates OAuth tokens using [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication). If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)."
*
* To create tokens for a particular OAuth application using this endpoint, you must authenticate as the user you want to create an authorization for and provide the app's client ID and secret, found on your OAuth application's settings page. If your OAuth application intends to create multiple tokens for one user, use `fingerprint` to differentiate between them.
*
* You can also create tokens on GitHub from the [personal access tokens settings](https://github.com/settings/tokens) page. Read more about these tokens in [the GitHub Help documentation](https://help.github.com/articles/creating-an-access-token-for-command-line-use).
*
* Organizations that enforce SAML SSO require personal access tokens to be allowed. Read more about allowing tokens in [the GitHub Help documentation](https://help.github.com/articles/about-identity-and-access-management-with-saml-single-sign-on).
*/
"oauth-authorizations/create-authorization": {
parameters: {};
requestBody: {
"application/json": {
/**
* A list of scopes that this authorization is in.
*/
scopes?: string[] | null;
/**
* A note to remind you what the OAuth token is for.
*/
note?: string;
/**
* A URL to remind you what app the OAuth token is for.
*/
note_url?: string;
/**
* The OAuth app client key for which to create the token.
*/
client_id?: string;
/**
* The OAuth app client secret for which to create the token.
*/
client_secret?: string;
/**
* A unique string to distinguish an authorization from others created for the same client ID and user.
*/
fingerprint?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["authorization"];
};
"304": never;
"401": unknown;
"403": unknown;
"410": unknown;
"422": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*
* **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api).
*
* Creates a new authorization for the specified OAuth application, only if an authorization for that application doesn't already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one.
*
* If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)."
*
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*/
"oauth-authorizations/get-or-create-authorization-for-app": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
};
};
requestBody: {
"application/json": {
/**
* The OAuth app client secret for which to create the token.
*/
client_secret: string;
/**
* A list of scopes that this authorization is in.
*/
scopes?: string[] | null;
/**
* A note to remind you what the OAuth token is for.
*/
note?: string;
/**
* A URL to remind you what app the OAuth token is for.
*/
note_url?: string;
/**
* A unique string to distinguish an authorization from others created for the same client ID and user.
*/
fingerprint?: string;
};
};
responses: {
/**
* Response if returning an existing token
*/
"200": {
"application/json": components["schemas"]["authorization"];
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*/
"201": {
"application/json": components["schemas"]["authorization"];
};
"304": never;
"401": unknown;
"403": unknown;
"422": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*
* **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api).
*
* This method will create a new authorization for the specified OAuth application, only if an authorization for that application and fingerprint do not already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. `fingerprint` is a unique string to distinguish an authorization from others created for the same client ID and user. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one.
*
* If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)."
*/
"oauth-authorizations/get-or-create-authorization-for-app-and-fingerprint": {
parameters: {
path: {
client_id: components["parameters"]["client-id"];
fingerprint: string;
};
};
requestBody: {
"application/json": {
/**
* The OAuth app client secret for which to create the token.
*/
client_secret: string;
/**
* A list of scopes that this authorization is in.
*/
scopes?: string[] | null;
/**
* A note to remind you what the OAuth token is for.
*/
note?: string;
/**
* A URL to remind you what app the OAuth token is for.
*/
note_url?: string;
};
};
responses: {
/**
* Response if returning an existing token
*/
"200": {
"application/json": components["schemas"]["authorization"];
};
/**
* Response if returning a new token
*/
"201": {
"application/json": components["schemas"]["authorization"];
};
"422": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*/
"oauth-authorizations/get-authorization": {
parameters: {
path: {
authorization_id: components["parameters"]["authorization_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["authorization"];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*
* If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)."
*
* You can only send one of these scope keys at a time.
*/
"oauth-authorizations/update-authorization": {
parameters: {
path: {
authorization_id: components["parameters"]["authorization_id"];
};
};
requestBody: {
"application/json": {
/**
* A list of scopes that this authorization is in.
*/
scopes?: string[] | null;
/**
* A list of scopes to add to this authorization.
*/
add_scopes?: string[];
/**
* A list of scopes to remove from this authorization.
*/
remove_scopes?: string[];
/**
* A note to remind you what the OAuth token is for.
*/
note?: string;
/**
* A URL to remind you what app the OAuth token is for.
*/
note_url?: string;
/**
* A unique string to distinguish an authorization from others created for the same client ID and user.
*/
fingerprint?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["authorization"];
};
"422": unknown;
};
};
/**
* **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/).
*/
"oauth-authorizations/delete-authorization": {
parameters: {
path: {
authorization_id: components["parameters"]["authorization_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
};
};
"codes-of-conduct/get-all-codes-of-conduct": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["code-of-conduct"][];
};
"304": never;
"415": unknown;
};
};
"codes-of-conduct/get-conduct-code": {
parameters: {
path: {
key: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["code-of-conduct"];
};
"304": never;
"404": unknown;
"415": unknown;
};
};
/**
* Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment.
*
* The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments.
*
* You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint.
*/
"apps/create-content-attachment": {
parameters: {
path: {
content_reference_id: number;
};
};
requestBody: {
"application/json": {
/**
* The title of the attachment
*/
title: string;
/**
* The body of the attachment
*/
body: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["content-reference-attachment"];
};
"304": never;
"403": unknown;
"404": unknown;
"410": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Lists all the emojis available to use on GitHub.
*/
"emojis/get": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json": { [key: string]: string };
};
"304": never;
};
};
/**
* Gets the GitHub Actions permissions policy for organizations and allowed actions in an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/get-github-actions-permissions-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-enterprise-permissions"];
};
};
};
/**
* Sets the GitHub Actions permissions policy for organizations and allowed actions in an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/set-github-actions-permissions-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
requestBody: {
"application/json": {
enabled_organizations: components["schemas"]["enabled-organizations"];
allowed_actions?: components["schemas"]["allowed-actions"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)."
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/list-selected-organizations-enabled-github-actions-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
organizations?: components["schemas"]["organization-simple"][];
};
};
};
};
/**
* Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)."
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/set-selected-organizations-enabled-github-actions-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
requestBody: {
"application/json": {
/**
* List of organization IDs to enable for GitHub Actions.
*/
selected_organization_ids: number[];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)."
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/enable-selected-organization-github-actions-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
org_id: components["parameters"]["org_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)."
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/disable-selected-organization-github-actions-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
org_id: components["parameters"]["org_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Gets the selected actions that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)."
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/get-allowed-actions-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["selected-actions"];
};
};
};
/**
* Sets the actions that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)."
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/set-allowed-actions-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
requestBody: {
"application/json": components["schemas"]["selected-actions"];
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists all self-hosted runner groups for an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/list-self-hosted-runner-groups-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
runner_groups?: components["schemas"]["runner-groups-enterprise"][];
};
};
};
};
/**
* Creates a new self-hosted runner group for an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/create-self-hosted-runner-group-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
requestBody: {
"application/json": {
/**
* Name of the runner group.
*/
name: string;
/**
* Visibility of a runner group. You can select all organizations or select individual organization. Can be one of: `all` or `selected`
*/
visibility?: "selected" | "all";
/**
* List of organization IDs that can access the runner group.
*/
selected_organization_ids?: number[];
/**
* List of runner IDs to add to the runner group.
*/
runners?: number[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["runner-groups-enterprise"];
};
};
};
/**
* Gets a specific self-hosted runner group for an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/get-self-hosted-runner-group-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner-groups-enterprise"];
};
};
};
/**
* Updates the `name` and `visibility` of a self-hosted runner group in an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/update-self-hosted-runner-group-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
requestBody: {
"application/json": {
/**
* Name of the runner group.
*/
name?: string;
/**
* Visibility of a runner group. You can select all organizations or select individual organizations. Can be one of: `all` or `selected`
*/
visibility?: "selected" | "all";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner-groups-enterprise"];
};
};
};
/**
* Deletes a self-hosted runner group for an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/delete-self-hosted-runner-group-from-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists the organizations with access to a self-hosted runner group.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/list-org-access-to-self-hosted-runner-group-in-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
organizations?: components["schemas"]["organization-simple"][];
};
};
};
};
/**
* Replaces the list of organizations that have access to a self-hosted runner configured in an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/set-org-access-to-self-hosted-runner-group-in-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
requestBody: {
"application/json": {
/**
* List of organization IDs that can access the runner group.
*/
selected_organization_ids: number[];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Adds an organization to the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)."
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/add-org-access-to-self-hosted-runner-group-in-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
org_id: components["parameters"]["org_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Removes an organization from the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)."
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/remove-org-access-to-self-hosted-runner-group-in-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
org_id: components["parameters"]["org_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists the self-hosted runners that are in a specific enterprise group.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/list-self-hosted-runners-in-group-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
runners?: components["schemas"]["runner"][];
};
};
};
};
/**
* Replaces the list of self-hosted runners that that are part of an enterprise runner group.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/set-self-hosted-runners-in-group-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
requestBody: {
"application/json": {
/**
* List of runner IDs to add to the runner group.
*/
runners: number[];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Adds a self-hosted runner to a runner group configured in an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise`
* scope to use this endpoint.
*/
"enterprise-admin/add-self-hosted-runner-to-group-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Removes a self-hosted runner from a group configured in an enterprise. The runner is then returned to the default group.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/remove-self-hosted-runner-from-group-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_group_id: components["parameters"]["runner_group_id"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists all self-hosted runners configured for an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/list-self-hosted-runners-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
runners?: components["schemas"]["runner"][];
};
};
};
};
/**
* Lists binaries for the runner application that you can download and run.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/list-runner-applications-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner-application"][];
};
};
};
/**
* Returns a token that you can pass to the `config` script. The token expires after one hour.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*
* #### Example using registration token
*
* Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint.
*
* ```
* ./config.sh --url https://github.com/enterpises/octo-enterprise --token TOKEN
* ```
*/
"enterprise-admin/create-registration-token-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["authentication-token"];
};
};
};
/**
* Returns a token that you can pass to the `config` script to remove a self-hosted runner from an enterprise. The token expires after one hour.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*
* #### Example using remove token
*
* To remove your self-hosted runner from an enterprise, replace `TOKEN` with the remove token provided by this
* endpoint.
*
* ```
* ./config.sh remove --token TOKEN
* ```
*/
"enterprise-admin/create-remove-token-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["authentication-token"];
};
};
};
/**
* Gets a specific self-hosted runner configured in an enterprise.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/get-self-hosted-runner-for-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner"];
};
};
};
/**
* Forces the removal of a self-hosted runner from an enterprise. You can use this endpoint to completely remove the runner when the machine you were using no longer exists.
*
* You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint.
*/
"enterprise-admin/delete-self-hosted-runner-from-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Gets the summary of the free and paid GitHub Actions minutes used.
*
* Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)".
*
* The authenticated user must be an enterprise admin.
*/
"billing/get-github-actions-billing-ghe": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-billing-usage"];
};
};
};
/**
* Gets the free and paid storage used for GitHub Packages in gigabytes.
*
* Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)."
*
* The authenticated user must be an enterprise admin.
*/
"billing/get-github-packages-billing-ghe": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["packages-billing-usage"];
};
};
};
/**
* Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages.
*
* Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)."
*
* The authenticated user must be an enterprise admin.
*/
"billing/get-shared-storage-billing-ghe": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["combined-billing-usage"];
};
};
};
/**
* We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago.
*/
"activity/list-public-events": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
"304": never;
"403": unknown;
"503": unknown;
};
};
/**
* GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user:
*
* * **Timeline**: The GitHub global public timeline
* * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia)
* * **Current user public**: The public timeline for the authenticated user
* * **Current user**: The private timeline for the authenticated user
* * **Current user actor**: The private timeline for activity created by the authenticated user
* * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of.
* * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub.
*
* **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens.
*/
"activity/get-feeds": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["feed"];
};
};
};
/**
* Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists:
*/
"gists/list": {
parameters: {
query: {
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["base-gist"][];
};
"304": never;
"403": unknown;
};
};
/**
* Allows you to add a new gist with one or more files.
*
* **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally.
*/
"gists/create": {
parameters: {};
requestBody: {
"application/json": {
/**
* Description of the gist
*/
description?: string;
/**
* Names and content for the files that make up the gist
*/
files: {
[key: string]: {
/**
* Content of the file
*/
content: string;
};
};
public?: boolean | ("true" | "false");
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["gist-full"];
};
"304": never;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* List public gists sorted by most recently updated to least recently updated.
*
* Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page.
*/
"gists/list-public": {
parameters: {
query: {
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["base-gist"][];
};
"304": never;
"403": unknown;
"422": unknown;
};
};
/**
* List the authenticated user's starred gists:
*/
"gists/list-starred": {
parameters: {
query: {
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["base-gist"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
"gists/get": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gist-full"];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged.
*/
"gists/update": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
};
requestBody: {
"application/json": Partial<{ [key: string]: any }> &
Partial<{ [key: string]: any }>;
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gist-full"];
};
"404": unknown;
"422": unknown;
};
};
"gists/delete": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
};
};
"gists/list-comments": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gist-comment"][];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
"gists/create-comment": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
};
requestBody: {
"application/json": {
/**
* The comment text.
*/
body: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["gist-comment"];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
"gists/get-comment": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
comment_id: components["parameters"]["comment_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gist-comment"];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
"gists/update-comment": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
comment_id: components["parameters"]["comment_id"];
};
};
requestBody: {
"application/json": {
/**
* The comment text.
*/
body: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gist-comment"];
};
"404": unknown;
};
};
"gists/delete-comment": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
comment_id: components["parameters"]["comment_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
};
};
"gists/list-commits": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gist-commit"][];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
"gists/list-forks": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gist-full"][];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* **Note**: This was previously `/gists/:gist_id/fork`.
*/
"gists/fork": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["base-gist"];
};
"304": never;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
"gists/check-is-starred": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
};
responses: {
/**
* Response if gist is starred
*/
"204": never;
"304": never;
"403": unknown;
/**
* Response if gist is not starred
*/
"404": {
"application/json": { [key: string]: any };
};
};
};
/**
* Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*/
"gists/star": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
};
};
"gists/unstar": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
};
};
"gists/get-revision": {
parameters: {
path: {
gist_id: components["parameters"]["gist_id"];
sha: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gist-full"];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user).
*/
"gitignore/get-all-templates": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json": string[];
};
"304": never;
};
};
/**
* The API also allows fetching the source of a single template.
* Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents.
*/
"gitignore/get-template": {
parameters: {
path: {
name: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gitignore-template"];
};
"304": never;
};
};
/**
* List repositories that an app installation can access.
*
* You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint.
*/
"apps/list-repos-accessible-to-installation": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
repositories?: components["schemas"]["repository"][];
repository_selection?: string;
};
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* Revokes the installation token you're using to authenticate as an installation and access this endpoint.
*
* Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint.
*
* You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint.
*/
"apps/revoke-installation-access-token": {
parameters: {};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List issues assigned to the authenticated user across all visible repositories including owned repositories, member
* repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not
* necessarily assigned to you.
*
*
* **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this
* reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by
* the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull
* request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint.
*/
"issues/list": {
parameters: {
query: {
/**
* Indicates which sorts of issues to return. Can be one of:
* \* `assigned`: Issues assigned to you
* \* `created`: Issues created by you
* \* `mentioned`: Issues mentioning you
* \* `subscribed`: Issues you're subscribed to updates for
* \* `all`: All issues the authenticated user can see, regardless of participation or creation
*/
filter?: "assigned" | "created" | "mentioned" | "subscribed" | "all";
/**
* Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`.
*/
state?: "open" | "closed" | "all";
labels?: components["parameters"]["labels"];
/**
* What to sort results by. Can be either `created`, `updated`, `comments`.
*/
sort?: "created" | "updated" | "comments";
direction?: components["parameters"]["direction"];
since?: components["parameters"]["since"];
collab?: boolean;
orgs?: boolean;
owned?: boolean;
pulls?: boolean;
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue"][];
};
"304": never;
"404": unknown;
"422": unknown;
};
};
"licenses/get-all-commonly-used": {
parameters: {
query: {
featured?: boolean;
per_page?: components["parameters"]["per_page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["license-simple"][];
};
"304": never;
};
};
"licenses/get": {
parameters: {
path: {
license: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["license"];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
"markdown/render": {
parameters: {};
requestBody: {
"application/json": {
/**
* The Markdown text to render in HTML.
*/
text: string;
/**
* The rendering mode.
*/
mode?: "markdown" | "gfm";
/**
* The repository context to use when creating references in `gfm` mode.
*/
context?: string;
};
};
responses: {
/**
* response
*/
"200": unknown;
"304": never;
};
};
/**
* You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less.
*/
"markdown/render-raw": {
parameters: {};
requestBody: {
"text/plain": string;
"text/x-markdown": string;
};
responses: {
/**
* response
*/
"200": {
"text/html": string;
};
"304": never;
};
};
/**
* Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change.
*
* GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint.
*/
"apps/get-subscription-plan-for-account": {
parameters: {
path: {
account_id: components["parameters"]["account_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["marketplace-purchase"];
};
"401": unknown;
/**
* Response when the account has not purchased the listing
*/
"404": {
"application/json": components["schemas"]["basic-error"];
};
};
};
/**
* Lists all plans that are part of your GitHub Marketplace listing.
*
* GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint.
*/
"apps/list-plans": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["marketplace-listing-plan"][];
};
"401": unknown;
"404": unknown;
};
};
/**
* Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change.
*
* GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint.
*/
"apps/list-accounts-for-plan": {
parameters: {
path: {
plan_id: components["parameters"]["plan_id"];
};
query: {
sort?: components["parameters"]["sort"];
/**
* To return the oldest accounts first, set to `asc`. Can be one of `asc` or `desc`. Ignored without the `sort` parameter.
*/
direction?: "asc" | "desc";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["marketplace-purchase"][];
};
"401": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change.
*
* GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint.
*/
"apps/get-subscription-plan-for-account-stubbed": {
parameters: {
path: {
account_id: components["parameters"]["account_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["marketplace-purchase"];
};
"401": unknown;
/**
* Response when the account has not purchased the listing
*/
"404": unknown;
};
};
/**
* Lists all plans that are part of your GitHub Marketplace listing.
*
* GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint.
*/
"apps/list-plans-stubbed": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["marketplace-listing-plan"][];
};
"401": unknown;
};
};
/**
* Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change.
*
* GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint.
*/
"apps/list-accounts-for-plan-stubbed": {
parameters: {
path: {
plan_id: components["parameters"]["plan_id"];
};
query: {
sort?: components["parameters"]["sort"];
/**
* To return the oldest accounts first, set to `asc`. Can be one of `asc` or `desc`. Ignored without the `sort` parameter.
*/
direction?: "asc" | "desc";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["marketplace-purchase"][];
};
"401": unknown;
};
};
/**
* This endpoint provides a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://help.github.com/articles/about-github-s-ip-addresses/)."
*/
"meta/get": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["api-overview"];
};
"304": never;
};
};
"activity/list-public-events-for-repo-network": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
"301": never;
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* List all notifications for the current user, sorted by most recently updated.
*/
"activity/list-notifications-for-authenticated-user": {
parameters: {
query: {
all?: components["parameters"]["all"];
participating?: components["parameters"]["participating"];
since?: components["parameters"]["since"];
before?: components["parameters"]["before"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["thread"][];
};
"304": never;
"401": unknown;
"403": unknown;
"422": unknown;
};
};
/**
* Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`.
*/
"activity/mark-notifications-as-read": {
parameters: {};
requestBody: {
"application/json": {
/**
* Describes the last point that notifications were checked.
*/
last_read_at?: string;
/**
* Whether the notification has been read.
*/
read?: boolean;
};
};
responses: {
/**
* response
*/
"202": {
"application/json": { message?: string };
};
/**
* response
*/
"205": unknown;
"304": never;
"401": unknown;
"403": unknown;
};
};
"activity/get-thread": {
parameters: {
path: {
thread_id: components["parameters"]["thread_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["thread"];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
"activity/mark-thread-as-read": {
parameters: {
path: {
thread_id: components["parameters"]["thread_id"];
};
};
responses: {
/**
* response
*/
"205": unknown;
"304": never;
"403": unknown;
};
};
/**
* This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription).
*
* Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread.
*/
"activity/get-thread-subscription-for-authenticated-user": {
parameters: {
path: {
thread_id: components["parameters"]["thread_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["thread-subscription"];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**.
*
* You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored.
*
* Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint.
*/
"activity/set-thread-subscription": {
parameters: {
path: {
thread_id: components["parameters"]["thread_id"];
};
};
requestBody: {
"application/json": {
/**
* Whether to block all notifications from a thread.
*/
ignored?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["thread-subscription"];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`.
*/
"activity/delete-thread-subscription": {
parameters: {
path: {
thread_id: components["parameters"]["thread_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* Get the octocat as ASCII art
*/
"meta/get-octocat": {
parameters: {
query: {
/**
* The words to show in Octocat's speech bubble
*/
s?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/octocat-stream": string;
};
};
};
/**
* Lists all organizations, in the order that they were created on GitHub.
*
* **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations.
*/
"orgs/list": {
parameters: {
query: {
since?: components["parameters"]["since-org"];
per_page?: components["parameters"]["per_page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-simple"][];
};
"304": never;
};
};
/**
* To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/).
*
* GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below."
*/
"orgs/get": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-full"];
};
"404": unknown;
};
};
/**
* **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes).
*
* Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges.
*/
"orgs/update": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* Billing email address. This address is not publicized.
*/
billing_email?: string;
/**
* The company name.
*/
company?: string;
/**
* The publicly visible email address.
*/
email?: string;
/**
* The Twitter username of the company.
*/
twitter_username?: string;
/**
* The location.
*/
location?: string;
/**
* The shorthand name of the company.
*/
name?: string;
/**
* The description of the company.
*/
description?: string;
/**
* Toggles whether an organization can use organization projects.
*/
has_organization_projects?: boolean;
/**
* Toggles whether repositories that belong to the organization can use repository projects.
*/
has_repository_projects?: boolean;
/**
* Default permission level members have for organization repositories:
* \* `read` - can pull, but not push to or administer this repository.
* \* `write` - can pull and push, but not administer this repository.
* \* `admin` - can pull, push, and administer this repository.
* \* `none` - no permissions granted by default.
*/
default_repository_permission?: "read" | "write" | "admin" | "none";
/**
* Toggles the ability of non-admin organization members to create repositories. Can be one of:
* \* `true` - all organization members can create repositories.
* \* `false` - only organization owners can create repositories.
* Default: `true`
* **Note:** A parameter can override this parameter. See `members_allowed_repository_creation_type` in this table for details. **Note:** A parameter can override this parameter. See `members_allowed_repository_creation_type` in this table for details.
*/
members_can_create_repositories?: boolean;
/**
* Toggles whether organization members can create internal repositories, which are visible to all enterprise members. You can only allow members to create internal repositories if your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. Can be one of:
* \* `true` - all organization members can create internal repositories.
* \* `false` - only organization owners can create internal repositories.
* Default: `true`. For more information, see "[Restricting repository creation in your organization](https://help.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation.
*/
members_can_create_internal_repositories?: boolean;
/**
* Toggles whether organization members can create private repositories, which are visible to organization members with permission. Can be one of:
* \* `true` - all organization members can create private repositories.
* \* `false` - only organization owners can create private repositories.
* Default: `true`. For more information, see "[Restricting repository creation in your organization](https://help.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation.
*/
members_can_create_private_repositories?: boolean;
/**
* Toggles whether organization members can create public repositories, which are visible to anyone. Can be one of:
* \* `true` - all organization members can create public repositories.
* \* `false` - only organization owners can create public repositories.
* Default: `true`. For more information, see "[Restricting repository creation in your organization](https://help.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation.
*/
members_can_create_public_repositories?: boolean;
/**
* Specifies which types of repositories non-admin organization members can create. Can be one of:
* \* `all` - all organization members can create public and private repositories.
* \* `private` - members can create private repositories. This option is only available to repositories that are part of an organization on GitHub Enterprise Cloud.
* \* `none` - only admin members can create repositories.
* **Note:** This parameter is deprecated and will be removed in the future. Its return value ignores internal repositories. Using this parameter overrides values set in `members_can_create_repositories`. See the parameter deprecation notice in the operation description for details.
*/
members_allowed_repository_creation_type?: "all" | "private" | "none";
/**
* Toggles whether organization members can create GitHub Pages sites. Can be one of:
* \* `true` - all organization members can create GitHub Pages sites.
* \* `false` - no organization members can create GitHub Pages sites. Existing published sites will not be impacted.
* Default: `true`.
*/
members_can_create_pages?: boolean;
blog?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-full"];
};
"409": unknown;
"415": unknown;
/**
* Validation Failed
*/
"422": {
"application/json":
| components["schemas"]["validation-error"]
| components["schemas"]["validation-error-simple"];
};
};
};
/**
* Gets the GitHub Actions permissions policy for repositories and allowed actions in an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API.
*/
"actions/get-github-actions-permissions-organization": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-organization-permissions"];
};
};
};
/**
* Sets the GitHub Actions permissions policy for repositories and allowed actions in an organization.
*
* If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions, then you cannot override them for the organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API.
*/
"actions/set-github-actions-permissions-organization": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
enabled_repositories: components["schemas"]["enabled-repositories"];
allowed_actions?: components["schemas"]["allowed-actions"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API.
*/
"actions/list-selected-repositories-enabled-github-actions-organization": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
repositories?: components["schemas"]["repository"][];
};
};
};
};
/**
* Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API.
*/
"actions/set-selected-repositories-enabled-github-actions-organization": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* List of repository IDs to enable for GitHub Actions.
*/
selected_repository_ids: number[];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API.
*/
"actions/enable-selected-repository-github-actions-organization": {
parameters: {
path: {
org: components["parameters"]["org"];
repository_id: components["parameters"]["repository_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API.
*/
"actions/disable-selected-repository-github-actions-organization": {
parameters: {
path: {
org: components["parameters"]["org"];
repository_id: components["parameters"]["repository_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Gets the selected actions that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization).""
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API.
*/
"actions/get-allowed-actions-organization": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["selected-actions"];
};
};
};
/**
* Sets the actions that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."
*
* If the organization belongs to an enterprise that has `selected` actions set at the enterprise level, then you cannot override any of the enterprise's allowed actions settings.
*
* To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API.
*/
"actions/set-allowed-actions-organization": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": components["schemas"]["selected-actions"];
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Lists all self-hosted runner groups configured in an organization and inherited from an enterprise.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/list-self-hosted-runner-groups-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
runner_groups?: components["schemas"]["runner-groups-org"][];
};
};
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Creates a new self-hosted runner group for an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/create-self-hosted-runner-group-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* Name of the runner group.
*/
name: string;
/**
* Visibility of a runner group. You can select all repositories, select individual repositories, or limit access to private repositories. Can be one of: `all`, `selected`, or `private`.
*/
visibility?: "selected" | "all" | "private";
/**
* List of repository IDs that can access the runner group.
*/
selected_repository_ids?: number[];
/**
* List of runner IDs to add to the runner group.
*/
runners?: number[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["runner-groups-org"];
};
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Gets a specific self-hosted runner group for an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/get-self-hosted-runner-group-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner-groups-org"];
};
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Updates the `name` and `visibility` of a self-hosted runner group in an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/update-self-hosted-runner-group-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
requestBody: {
"application/json": {
/**
* Name of the runner group.
*/
name?: string;
/**
* Visibility of a runner group. You can select all repositories, select individual repositories, or all private repositories. Can be one of: `all`, `selected`, or `private`.
*/
visibility?: "selected" | "all" | "private";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner-groups-org"];
};
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Deletes a self-hosted runner group for an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/delete-self-hosted-runner-group-from-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Lists the repositories with access to a self-hosted runner group configured in an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/list-repo-access-to-self-hosted-runner-group-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
repositories?: components["schemas"]["repository"][];
};
};
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Replaces the list of repositories that have access to a self-hosted runner group configured in an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/set-repo-access-to-self-hosted-runner-group-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
requestBody: {
"application/json": {
/**
* List of repository IDs that can access the runner group.
*/
selected_repository_ids: number[];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
*
* Adds a repository to the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)."
*
* You must authenticate using an access token with the `admin:org`
* scope to use this endpoint.
*/
"actions/add-repo-access-to-self-hosted-runner-group-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
repository_id: components["parameters"]["repository_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
*
* Removes a repository from the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)."
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/remove-repo-access-to-self-hosted-runner-group-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
repository_id: components["parameters"]["repository_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Lists self-hosted runners that are in a specific organization group.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/list-self-hosted-runners-in-group-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
runners?: components["schemas"]["runner"][];
};
};
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
* Replaces the list of self-hosted runners that are part of an organization runner group.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/set-self-hosted-runners-in-group-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
};
};
requestBody: {
"application/json": {
/**
* List of runner IDs to add to the runner group.
*/
runners: number[];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
*
* Adds a self-hosted runner to a runner group configured in an organization.
*
* You must authenticate using an access token with the `admin:org`
* scope to use this endpoint.
*/
"actions/add-self-hosted-runner-to-group-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)."
*
*
* Removes a self-hosted runner from a group configured in an organization. The runner is then returned to the default group.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/remove-self-hosted-runner-from-group-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_group_id: components["parameters"]["runner_group_id"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists all self-hosted runners configured in an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/list-self-hosted-runners-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
runners?: components["schemas"]["runner"][];
};
};
};
};
/**
* Lists binaries for the runner application that you can download and run.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/list-runner-applications-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner-application"][];
};
};
};
/**
* Returns a token that you can pass to the `config` script. The token expires after one hour.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*
* #### Example using registration token
*
* Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint.
*
* ```
* ./config.sh --url https://github.com/octo-org --token TOKEN
* ```
*/
"actions/create-registration-token-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["authentication-token"];
};
};
};
/**
* Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*
* #### Example using remove token
*
* To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this
* endpoint.
*
* ```
* ./config.sh remove --token TOKEN
* ```
*/
"actions/create-remove-token-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["authentication-token"];
};
};
};
/**
* Gets a specific self-hosted runner configured in an organization.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/get-self-hosted-runner-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner"];
};
};
};
/**
* Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists.
*
* You must authenticate using an access token with the `admin:org` scope to use this endpoint.
*/
"actions/delete-self-hosted-runner-from-org": {
parameters: {
path: {
org: components["parameters"]["org"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint.
*/
"actions/list-org-secrets": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
secrets?: components["schemas"]["organization-actions-secret"][];
};
};
};
};
/**
* Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint.
*/
"actions/get-org-public-key": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-public-key"];
};
};
};
/**
* Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint.
*/
"actions/get-org-secret": {
parameters: {
path: {
org: components["parameters"]["org"];
secret_name: components["parameters"]["secret_name"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-actions-secret"];
};
};
};
/**
* Creates or updates an organization secret with an encrypted value. Encrypt your secret using
* [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access
* token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to
* use this endpoint.
*
* #### Example encrypting a secret using Node.js
*
* Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library.
*
* ```
* const sodium = require('tweetsodium');
*
* const key = "base64-encoded-public-key";
* const value = "plain-text-secret";
*
* // Convert the message and key to Uint8Array's (Buffer implements that interface)
* const messageBytes = Buffer.from(value);
* const keyBytes = Buffer.from(key, 'base64');
*
* // Encrypt using LibSodium.
* const encryptedBytes = sodium.seal(messageBytes, keyBytes);
*
* // Base64 the encrypted secret
* const encrypted = Buffer.from(encryptedBytes).toString('base64');
*
* console.log(encrypted);
* ```
*
*
* #### Example encrypting a secret using Python
*
* Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3.
*
* ```
* from base64 import b64encode
* from nacl import encoding, public
*
* def encrypt(public_key: str, secret_value: str) -> str:
* """Encrypt a Unicode string using the public key."""
* public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder())
* sealed_box = public.SealedBox(public_key)
* encrypted = sealed_box.encrypt(secret_value.encode("utf-8"))
* return b64encode(encrypted).decode("utf-8")
* ```
*
* #### Example encrypting a secret using C#
*
* Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package.
*
* ```
* var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret");
* var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU=");
*
* var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey);
*
* Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox));
* ```
*
* #### Example encrypting a secret using Ruby
*
* Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem.
*
* ```ruby
* require "rbnacl"
* require "base64"
*
* key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=")
* public_key = RbNaCl::PublicKey.new(key)
*
* box = RbNaCl::Boxes::Sealed.from_public_key(public_key)
* encrypted_secret = box.encrypt("my_secret")
*
* # Print the base64 encoded secret
* puts Base64.strict_encode64(encrypted_secret)
* ```
*/
"actions/create-or-update-org-secret": {
parameters: {
path: {
org: components["parameters"]["org"];
secret_name: components["parameters"]["secret_name"];
};
};
requestBody: {
"application/json": {
/**
* Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/actions#get-an-organization-public-key) endpoint.
*/
encrypted_value?: string;
/**
* ID of the key you used to encrypt the secret.
*/
key_id?: string;
/**
* Configures the access that repositories have to the organization secret. Can be one of:
* \- `all` - All repositories in an organization can access the secret.
* \- `private` - Private repositories in an organization can access the secret.
* \- `selected` - Only specific repositories can access the secret.
*/
visibility?: "all" | "private" | "selected";
/**
* An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints.
*/
selected_repository_ids?: string[];
};
};
responses: {
/**
* Response when creating a secret
*/
"201": unknown;
/**
* Response when updating a secret
*/
"204": never;
};
};
/**
* Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint.
*/
"actions/delete-org-secret": {
parameters: {
path: {
org: components["parameters"]["org"];
secret_name: components["parameters"]["secret_name"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint.
*/
"actions/list-selected-repos-for-org-secret": {
parameters: {
path: {
org: components["parameters"]["org"];
secret_name: components["parameters"]["secret_name"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
repositories?: components["schemas"]["minimal-repository"][];
};
};
};
};
/**
* Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint.
*/
"actions/set-selected-repos-for-org-secret": {
parameters: {
path: {
org: components["parameters"]["org"];
secret_name: components["parameters"]["secret_name"];
};
};
requestBody: {
"application/json": {
/**
* An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints.
*/
selected_repository_ids?: number[];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint.
*/
"actions/add-selected-repo-to-org-secret": {
parameters: {
path: {
org: components["parameters"]["org"];
secret_name: components["parameters"]["secret_name"];
repository_id: number;
};
};
responses: {
/**
* Response when repository was added to the selected list
*/
"204": never;
/**
* Response when visibility type is not set to selected
*/
"409": unknown;
};
};
/**
* Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint.
*/
"actions/remove-selected-repo-from-org-secret": {
parameters: {
path: {
org: components["parameters"]["org"];
secret_name: components["parameters"]["secret_name"];
repository_id: number;
};
};
responses: {
/**
* Response when repository was removed from the selected list
*/
"204": never;
/**
* Response when visibility type not set to selected
*/
"409": unknown;
};
};
/**
* List the users blocked by an organization.
*/
"orgs/list-blocked-users": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"415": unknown;
};
};
"orgs/check-blocked-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* If the user is blocked:
*/
"204": never;
/**
* If the user is not blocked:
*/
"404": {
"application/json": components["schemas"]["basic-error"];
};
};
};
"orgs/block-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"422": unknown;
};
};
"orgs/unblock-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products).
*
* An authenticated organization owner with the `read:org` scope can list all credential authorizations for an organization that uses SAML single sign-on (SSO). The credentials are either personal access tokens or SSH keys that organization members have authorized for the organization. For more information, see [About authentication with SAML single sign-on](https://help.github.com/en/articles/about-authentication-with-saml-single-sign-on).
*/
"orgs/list-saml-sso-authorizations": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["credential-authorization"][];
};
};
};
/**
* Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products).
*
* An authenticated organization owner with the `admin:org` scope can remove a credential authorization for an organization that uses SAML SSO. Once you remove someone's credential authorization, they will need to create a new personal access token or SSH key and authorize it for the organization they want to access.
*/
"orgs/remove-saml-sso-authorization": {
parameters: {
path: {
org: components["parameters"]["org"];
credential_id: number;
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
"activity/list-public-org-events": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
};
};
"orgs/list-webhooks": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["org-hook"][];
};
"404": unknown;
};
};
/**
* Here's how you can create a hook that posts payloads in JSON format:
*/
"orgs/create-webhook": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* Must be passed as "web".
*/
name: string;
/**
* Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#create-hook-config-params).
*/
config: {
url: components["schemas"]["webhook-config-url"];
content_type?: components["schemas"]["webhook-config-content-type"];
secret?: components["schemas"]["webhook-config-secret"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
username?: string;
password?: string;
};
/**
* Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for.
*/
events?: string[];
/**
* Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications.
*/
active?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["org-hook"];
};
"404": unknown;
"422": unknown;
};
};
/**
* Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)."
*/
"orgs/get-webhook": {
parameters: {
path: {
org: components["parameters"]["org"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["org-hook"];
};
"404": unknown;
};
};
/**
* Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)."
*/
"orgs/update-webhook": {
parameters: {
path: {
org: components["parameters"]["org"];
hook_id: components["parameters"]["hook-id"];
};
};
requestBody: {
"application/json": {
/**
* Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#update-hook-config-params).
*/
config?: {
url: components["schemas"]["webhook-config-url"];
content_type?: components["schemas"]["webhook-config-content-type"];
secret?: components["schemas"]["webhook-config-secret"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
};
/**
* Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for.
*/
events?: string[];
/**
* Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications.
*/
active?: boolean;
name?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["org-hook"];
};
"404": unknown;
"422": unknown;
};
};
"orgs/delete-webhook": {
parameters: {
path: {
org: components["parameters"]["org"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)."
*
* Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission.
*/
"orgs/get-webhook-config-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* Default response
*/
"200": {
"application/json": components["schemas"]["webhook-config"];
};
};
};
/**
* Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)."
*
* Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission.
*/
"orgs/update-webhook-config-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
hook_id: components["parameters"]["hook-id"];
};
};
requestBody: {
"application/json": {
url?: components["schemas"]["webhook-config-url"];
content_type?: components["schemas"]["webhook-config-content-type"];
secret?: components["schemas"]["webhook-config-secret"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
};
};
responses: {
/**
* Default response
*/
"200": {
"application/json": components["schemas"]["webhook-config"];
};
};
};
/**
* This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook.
*/
"orgs/ping-webhook": {
parameters: {
path: {
org: components["parameters"]["org"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* Enables an authenticated GitHub App to find the organization's installation information.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/get-org-installation": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["installation"];
};
};
};
/**
* Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint.
*/
"orgs/list-app-installations": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
installations?: components["schemas"]["installation"][];
};
};
};
};
/**
* Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response.
*/
"interactions/get-restrictions-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["interaction-limit-response"];
};
};
};
/**
* Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization.
*/
"interactions/set-restrictions-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": components["schemas"]["interaction-limit"];
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["interaction-limit-response"];
};
"422": unknown;
};
};
/**
* Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions.
*/
"interactions/remove-restrictions-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`.
*/
"orgs/list-pending-invitations": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-invitation"][];
};
"404": unknown;
};
};
/**
* Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner.
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*/
"orgs/create-invitation": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* **Required unless you provide `email`**. GitHub user ID for the person you are inviting.
*/
invitee_id?: number;
/**
* **Required unless you provide `invitee_id`**. Email address of the person you are inviting, which can be an existing GitHub user.
*/
email?: string;
/**
* Specify role for new member. Can be one of:
* \* `admin` - Organization owners with full administrative rights to the organization and complete access to all repositories and teams.
* \* `direct_member` - Non-owner organization members with ability to see other members and join teams by invitation.
* \* `billing_manager` - Non-owner organization members with ability to manage the billing settings of your organization.
*/
role?: "admin" | "direct_member" | "billing_manager";
/**
* Specify IDs for the teams you want to invite new members to.
*/
team_ids?: number[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["organization-invitation"];
};
"404": unknown;
"422": unknown;
};
};
/**
* List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner.
*/
"orgs/list-invitation-teams": {
parameters: {
path: {
org: components["parameters"]["org"];
invitation_id: components["parameters"]["invitation_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team"][];
};
"404": unknown;
};
};
/**
* List issues in an organization assigned to the authenticated user.
*
* **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this
* reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by
* the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull
* request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint.
*/
"issues/list-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
/**
* Indicates which sorts of issues to return. Can be one of:
* \* `assigned`: Issues assigned to you
* \* `created`: Issues created by you
* \* `mentioned`: Issues mentioning you
* \* `subscribed`: Issues you're subscribed to updates for
* \* `all`: All issues the authenticated user can see, regardless of participation or creation
*/
filter?: "assigned" | "created" | "mentioned" | "subscribed" | "all";
/**
* Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`.
*/
state?: "open" | "closed" | "all";
labels?: components["parameters"]["labels"];
/**
* What to sort results by. Can be either `created`, `updated`, `comments`.
*/
sort?: "created" | "updated" | "comments";
direction?: components["parameters"]["direction"];
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue"][];
};
"404": unknown;
};
};
/**
* List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned.
*/
"orgs/list-members": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
/**
* Filter members returned in the list. Can be one of:
* \* `2fa_disabled` - Members without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled. Available for organization owners.
* \* `all` - All members the authenticated user can see.
*/
filter?: "2fa_disabled" | "all";
/**
* Filter members returned by their role. Can be one of:
* \* `all` - All members of the organization, regardless of role.
* \* `admin` - Organization owners.
* \* `member` - Non-owner organization members.
*/
role?: "all" | "admin" | "member";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
/**
* Response if requester is not an organization member
*/
"302": never;
"422": unknown;
};
};
/**
* Check if a user is, publicly or privately, a member of the organization.
*/
"orgs/check-membership-for-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Response if requester is an organization member and user is a member
*/
"204": never;
/**
* Response if requester is not an organization member
*/
"302": never;
/**
* Response if requester is an organization member and user is not a member
*/
"404": unknown;
};
};
/**
* Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories.
*/
"orgs/remove-member": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"403": unknown;
};
};
/**
* In order to get a user's membership with an organization, the authenticated user must be an organization member.
*/
"orgs/get-membership-for-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["org-membership"];
};
"403": unknown;
"404": unknown;
};
};
/**
* Only authenticated organization owners can add a member to the organization or update the member's role.
*
* * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation.
*
* * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent.
*
* **Rate limits**
*
* To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period.
*/
"orgs/set-membership-for-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
requestBody: {
"application/json": {
/**
* The role to give the user in the organization. Can be one of:
* \* `admin` - The user will become an owner of the organization.
* \* `member` - The user will become a non-owner member of the organization.
*/
role?: "admin" | "member";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["org-membership"];
};
"403": unknown;
"422": unknown;
};
};
/**
* In order to remove a user's membership with an organization, the authenticated user must be an organization owner.
*
* If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases.
*/
"orgs/remove-membership-for-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"403": unknown;
"404": unknown;
};
};
/**
* Lists the most recent migrations.
*/
"migrations/list-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["migration"][];
};
};
};
/**
* Initiates the generation of a migration archive.
*/
"migrations/start-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* A list of arrays indicating which repositories should be migrated.
*/
repositories: string[];
/**
* Indicates whether repositories should be locked (to prevent manipulation) while migrating data.
*/
lock_repositories?: boolean;
/**
* Indicates whether attachments should be excluded from the migration (to reduce migration archive file size).
*/
exclude_attachments?: boolean;
exclude?: string[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["migration"];
};
"404": unknown;
"422": unknown;
};
};
/**
* Fetches the status of a migration.
*
* The `state` of a migration can be one of the following values:
*
* * `pending`, which means the migration hasn't started yet.
* * `exporting`, which means the migration is in progress.
* * `exported`, which means the migration finished successfully.
* * `failed`, which means the migration failed.
*/
"migrations/get-status-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
migration_id: components["parameters"]["migration_id"];
};
};
responses: {
/**
* * `pending`, which means the migration hasn't started yet.
* * `exporting`, which means the migration is in progress.
* * `exported`, which means the migration finished successfully.
* * `failed`, which means the migration failed.
*/
"200": {
"application/json": components["schemas"]["migration"];
};
"404": unknown;
};
};
/**
* Fetches the URL to a migration archive.
*/
"migrations/download-archive-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
migration_id: components["parameters"]["migration_id"];
};
};
responses: {
/**
* response
*/
"302": never;
"404": unknown;
};
};
/**
* Deletes a previous migration archive. Migration archives are automatically deleted after seven days.
*/
"migrations/delete-archive-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
migration_id: components["parameters"]["migration_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data.
*/
"migrations/unlock-repo-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
migration_id: components["parameters"]["migration_id"];
repo_name: components["parameters"]["repo_name"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* List all the repositories for this organization migration.
*/
"migrations/list-repos-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
migration_id: components["parameters"]["migration_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
"404": unknown;
};
};
/**
* List all users who are outside collaborators of an organization.
*/
"orgs/list-outside-collaborators": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
/**
* Filter the list of outside collaborators. Can be one of:
* \* `2fa_disabled`: Outside collaborators without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled.
* \* `all`: All outside collaborators.
*/
filter?: "2fa_disabled" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
};
};
/**
* When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://help.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)".
*/
"orgs/convert-member-to-outside-collaborator": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* User is getting converted asynchronously
*/
"202": unknown;
/**
* User was converted
*/
"204": never;
/**
* response
*/
"403": {
"application/json": { message?: string; documentation_url?: string };
};
"404": unknown;
};
};
/**
* Removing a user from this list will remove them from all the organization's repositories.
*/
"orgs/remove-outside-collaborator": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
/**
* Response if user is a member of the organization
*/
"422": {
"application/json": { message?: string; documentation_url?: string };
};
};
};
/**
* Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned.
*/
"projects/list-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
/**
* Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`.
*/
state?: "open" | "closed" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project"][];
};
"422": unknown;
};
};
/**
* Creates an organization project board. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned.
*/
"projects/create-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* The name of the project.
*/
name: string;
/**
* The description of the project.
*/
body?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["project"];
};
"401": unknown;
"403": unknown;
"404": unknown;
"410": unknown;
"422": unknown;
};
};
/**
* Members of an organization can choose to have their membership publicized or not.
*/
"orgs/list-public-members": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
};
};
"orgs/check-public-membership-for-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Response if user is a public member
*/
"204": never;
/**
* Response if user is not a public member
*/
"404": unknown;
};
};
/**
* The user can publicize their own membership. (A user cannot publicize the membership for another user.)
*
* Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*/
"orgs/set-public-membership-for-authenticated-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"403": unknown;
};
};
"orgs/remove-public-membership-for-authenticated-user": {
parameters: {
path: {
org: components["parameters"]["org"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists repositories for the specified organization.
*/
"repos/list-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
/**
* Specifies the types of repositories you want returned. Can be one of `all`, `public`, `private`, `forks`, `sources`, `member`, `internal`. Default: `all`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `type` can also be `internal`.
*/
type?:
| "all"
| "public"
| "private"
| "forks"
| "sources"
| "member"
| "internal";
/**
* Can be one of `created`, `updated`, `pushed`, `full_name`.
*/
sort?: "created" | "updated" | "pushed" | "full_name";
/**
* Can be one of `asc` or `desc`. Default: when using `full_name`: `asc`, otherwise `desc`
*/
direction?: "asc" | "desc";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
};
};
/**
* Creates a new repository in the specified organization. The authenticated user must be a member of the organization.
*
* **OAuth scope requirements**
*
* When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include:
*
* * `public_repo` scope or `repo` scope to create a public repository
* * `repo` scope to create a private repository
*/
"repos/create-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* The name of the repository.
*/
name: string;
/**
* A short description of the repository.
*/
description?: string;
/**
* A URL with more information about the repository.
*/
homepage?: string;
/**
* Either `true` to create a private repository or `false` to create a public one.
*/
private?: boolean;
/**
* Can be `public` or `private`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `visibility` can also be `internal`. For more information, see "[Creating an internal repository](https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-repository-visibility#about-internal-repositories)" in the GitHub Help documentation.
* The `visibility` parameter overrides the `private` parameter when you use both parameters with the `nebula-preview` preview header.
*/
visibility?: "public" | "private" | "visibility" | "internal";
/**
* Either `true` to enable issues for this repository or `false` to disable them.
*/
has_issues?: boolean;
/**
* Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error.
*/
has_projects?: boolean;
/**
* Either `true` to enable the wiki for this repository or `false` to disable it.
*/
has_wiki?: boolean;
/**
* Either `true` to make this repo available as a template repository or `false` to prevent it.
*/
is_template?: boolean;
/**
* The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization.
*/
team_id?: number;
/**
* Pass `true` to create an initial commit with empty README.
*/
auto_init?: boolean;
/**
* Desired language or platform [.gitignore template](https://github.com/github/gitignore) to apply. Use the name of the template without the extension. For example, "Haskell".
*/
gitignore_template?: string;
/**
* Choose an [open source license template](https://choosealicense.com/) that best suits your needs, and then use the [license keyword](https://help.github.com/articles/licensing-a-repository/#searching-github-by-license-type) as the `license_template` string. For example, "mit" or "mpl-2.0".
*/
license_template?: string;
/**
* Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging.
*/
allow_squash_merge?: boolean;
/**
* Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits.
*/
allow_merge_commit?: boolean;
/**
* Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging.
*/
allow_rebase_merge?: boolean;
/**
* Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion.
*/
delete_branch_on_merge?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["repository"];
};
"403": unknown;
"422": unknown;
};
};
/**
* Gets the summary of the free and paid GitHub Actions minutes used.
*
* Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)".
*
* Access tokens must have the `read:org` scope.
*/
"billing/get-github-actions-billing-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-billing-usage"];
};
};
};
/**
* Gets the free and paid storage usued for GitHub Packages in gigabytes.
*
* Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)."
*
* Access tokens must have the `read:org` scope.
*/
"billing/get-github-packages-billing-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["packages-billing-usage"];
};
};
};
/**
* Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages.
*
* Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)."
*
* Access tokens must have the `read:org` scope.
*/
"billing/get-shared-storage-billing-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["combined-billing-usage"];
};
};
};
/**
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* List IdP groups available in an organization. You can limit your page results using the `per_page` parameter. GitHub generates a url-encoded `page` token using a cursor value for where the next page begins. For more information on cursor pagination, see "[Offset and Cursor Pagination explained](https://dev.to/jackmarchant/offset-and-cursor-pagination-explained-b89)."
*
* The `per_page` parameter provides pagination for a list of IdP groups the authenticated user can access in an organization. For example, if the user `octocat` wants to see two groups per page in `octo-org` via cURL, it would look like this:
*/
"teams/list-idp-groups-for-org": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["group-mapping"];
};
};
};
/**
* Lists all teams in an organization that are visible to the authenticated user.
*/
"teams/list": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team"][];
};
"403": unknown;
};
};
/**
* To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://help.github.com/en/articles/setting-team-creation-permissions-in-your-organization)."
*
* When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)".
*/
"teams/create": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* The name of the team.
*/
name: string;
/**
* The description of the team.
*/
description?: string;
/**
* List GitHub IDs for organization members who will become team maintainers.
*/
maintainers?: string[];
/**
* The full name (e.g., "organization-name/repository-name") of repositories to add the team to.
*/
repo_names?: string[];
/**
* The level of privacy this team should have. The options are:
* **For a non-nested team:**
* \* `secret` - only visible to organization owners and members of this team.
* \* `closed` - visible to all members of this organization.
* Default: `secret`
* **For a parent or child team:**
* \* `closed` - visible to all members of this organization.
* Default for child team: `closed`
*/
privacy?: "secret" | "closed";
/**
* **Deprecated**. The permission that new repositories will be added to the team with when none is specified. Can be one of:
* \* `pull` - team members can pull, but not push to or administer newly-added repositories.
* \* `push` - team members can pull and push, but not administer newly-added repositories.
* \* `admin` - team members can pull, push and administer newly-added repositories.
*/
permission?: "pull" | "push" | "admin";
/**
* The ID of a team to set as the parent team.
*/
parent_team_id?: number;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["team-full"];
};
"403": unknown;
"422": unknown;
};
};
/**
* Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`.
*/
"teams/get-by-name": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-full"];
};
"404": unknown;
};
};
/**
* To edit a team, the authenticated user must either be an organization owner or a team maintainer.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`.
*/
"teams/update-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
};
requestBody: {
"application/json": {
/**
* The name of the team.
*/
name: string;
/**
* The description of the team.
*/
description?: string;
/**
* The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. When a team is nested, the `privacy` for parent teams cannot be `secret`. The options are:
* **For a non-nested team:**
* \* `secret` - only visible to organization owners and members of this team.
* \* `closed` - visible to all members of this organization.
* **For a parent or child team:**
* \* `closed` - visible to all members of this organization.
*/
privacy?: "secret" | "closed";
/**
* **Deprecated**. The permission that new repositories will be added to the team with when none is specified. Can be one of:
* \* `pull` - team members can pull, but not push to or administer newly-added repositories.
* \* `push` - team members can pull and push, but not administer newly-added repositories.
* \* `admin` - team members can pull, push and administer newly-added repositories.
*/
permission?: "pull" | "push" | "admin";
/**
* The ID of a team to set as the parent team.
*/
parent_team_id?: number;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["team-full"];
};
};
};
/**
* To delete a team, the authenticated user must be an organization owner or team maintainer.
*
* If you are an organization owner, deleting a parent team will delete all of its child teams as well.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`.
*/
"teams/delete-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`.
*/
"teams/list-discussions-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
query: {
direction?: components["parameters"]["direction"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion"][];
};
};
};
/**
* Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`.
*/
"teams/create-discussion-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
};
requestBody: {
"application/json": {
/**
* The discussion post's title.
*/
title: string;
/**
* The discussion post's body text.
*/
body: string;
/**
* Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post.
*/
private?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["team-discussion"];
};
};
};
/**
* Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`.
*/
"teams/get-discussion-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion"];
};
};
};
/**
* Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`.
*/
"teams/update-discussion-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
};
};
requestBody: {
"application/json": {
/**
* The discussion post's title.
*/
title?: string;
/**
* The discussion post's body text.
*/
body?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion"];
};
};
};
/**
* Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`.
*/
"teams/delete-discussion-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`.
*/
"teams/list-discussion-comments-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
};
query: {
direction?: components["parameters"]["direction"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion-comment"][];
};
};
};
/**
* Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`.
*/
"teams/create-discussion-comment-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
};
};
requestBody: {
"application/json": {
/**
* The discussion comment's body text.
*/
body: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["team-discussion-comment"];
};
};
};
/**
* Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`.
*/
"teams/get-discussion-comment-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion-comment"];
};
};
};
/**
* Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`.
*/
"teams/update-discussion-comment-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
};
requestBody: {
"application/json": {
/**
* The discussion comment's body text.
*/
body: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion-comment"];
};
};
};
/**
* Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`.
*/
"teams/delete-discussion-comment-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`.
*/
"reactions/list-for-team-discussion-comment-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
query: {
/**
* Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment.
*/
content?:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["reaction"][];
};
};
};
/**
* Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion comment.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`.
*/
"reactions/create-for-team-discussion-comment-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
};
requestBody: {
"application/json": {
/**
* The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment.
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["reaction"];
};
};
};
/**
* **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`.
*
* Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"reactions/delete-for-team-discussion-comment": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
reaction_id: components["parameters"]["reaction-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`.
*/
"reactions/list-for-team-discussion-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
};
query: {
/**
* Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion.
*/
content?:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["reaction"][];
};
};
};
/**
* Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`.
*/
"reactions/create-for-team-discussion-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
};
};
requestBody: {
"application/json": {
/**
* The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion.
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["reaction"];
};
};
};
/**
* **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`.
*
* Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"reactions/delete-for-team-discussion": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
discussion_number: components["parameters"]["discussion-number"];
reaction_id: components["parameters"]["reaction-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`.
*/
"teams/list-pending-invitations-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-invitation"][];
};
};
};
/**
* Team members will include the members of child teams.
*
* To list members in a team, the team must be visible to the authenticated user.
*/
"teams/list-members-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
query: {
/**
* Filters members returned by their role in the team. Can be one of:
* \* `member` - normal members of the team.
* \* `maintainer` - team maintainers.
* \* `all` - all members of the team.
*/
role?: "member" | "maintainer" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
};
};
/**
* Team members will include the members of child teams.
*
* To get a user's membership with a team, the team must be visible to the authenticated user.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`.
*
* **Note:** The `role` for organization owners returns as `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team).
*/
"teams/get-membership-for-user-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-membership"];
};
/**
* Response if user has no team membership
*/
"404": unknown;
};
};
/**
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team.
*
* **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)."
*
* An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team.
*
* If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`.
*/
"teams/add-or-update-membership-for-user-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
username: components["parameters"]["username"];
};
};
requestBody: {
"application/json": {
/**
* The role that this user should have in the team. Can be one of:
* \* `member` - a normal member of the team.
* \* `maintainer` - a team maintainer. Able to add/remove other team members, promote other team members to team maintainer, and edit the team's name and description.
*/
role?: "member" | "maintainer";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-membership"];
};
/**
* Response if team synchronization is set up
*/
"403": unknown;
/**
* Response if you attempt to add an organization to a team
*/
"422": {
"application/json": {
message?: string;
errors?: { code?: string; field?: string; resource?: string }[];
};
};
};
};
/**
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team.
*
* **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)."
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`.
*/
"teams/remove-membership-for-user-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
/**
* Response if team synchronization is set up
*/
"403": unknown;
};
};
/**
* Lists the organization projects for a team.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`.
*/
"teams/list-projects-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-project"][];
};
};
};
/**
* Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`.
*/
"teams/check-permissions-for-project-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
project_id: components["parameters"]["project-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-project"];
};
/**
* Response if project is not managed by this team
*/
"404": unknown;
};
};
/**
* Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`.
*/
"teams/add-or-update-project-permissions-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
project_id: components["parameters"]["project-id"];
};
};
requestBody: {
"application/json": {
/**
* The permission to grant to the team for this project. Can be one of:
* \* `read` - team members can read, but not write to or administer this project.
* \* `write` - team members can read and write, but not administer this project.
* \* `admin` - team members can read, write and administer this project.
* Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*/
permission?: "read" | "write" | "admin";
};
};
responses: {
/**
* Empty response
*/
"204": never;
/**
* Response if the project is not owned by the organization
*/
"403": {
"application/json": { message?: string; documentation_url?: string };
};
};
};
/**
* Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`.
*/
"teams/remove-project-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
project_id: components["parameters"]["project-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists a team's repositories visible to the authenticated user.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`.
*/
"teams/list-repos-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
};
};
/**
* Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked.
*
* You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header.
*
* If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`.
*/
"teams/check-permissions-for-repo-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Alternative response with repository permissions
*/
"200": {
"application/vnd.github.v3.repository+json": components["schemas"]["team-repository"];
};
/**
* Response if team has permission for the repository
*/
"204": never;
/**
* Response if team does not have permission for the repository
*/
"404": unknown;
};
};
/**
* To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`.
*
* For more information about the permission levels, see "[Repository permission levels for an organization](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)".
*/
"teams/add-or-update-repo-permissions-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The permission to grant the team on this repository. Can be one of:
* \* `pull` - team members can pull, but not push to or administer this repository.
* \* `push` - team members can pull and push, but not administer this repository.
* \* `admin` - team members can pull, push and administer this repository.
* \* `maintain` - team members can manage the repository without access to sensitive or destructive actions. Recommended for project managers. Only applies to repositories owned by organizations.
* \* `triage` - team members can proactively manage issues and pull requests without write access. Recommended for contributors who triage a repository. Only applies to repositories owned by organizations.
*
* If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository.
*/
permission?: "pull" | "push" | "admin" | "maintain" | "triage";
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`.
*/
"teams/remove-repo-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* List IdP groups connected to a team on GitHub.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`.
*/
"teams/list-idp-groups-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["group-mapping"];
};
};
};
/**
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`.
*/
"teams/create-or-update-idp-group-connections-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
};
requestBody: {
"application/json": {
/**
* The IdP groups you want to connect to a GitHub team. When updating, the new `groups` object will replace the original one. You must include any existing groups that you don't want to remove.
*/
groups: {
/**
* ID of the IdP group.
*/
group_id: string;
/**
* Name of the IdP group.
*/
group_name: string;
/**
* Description of the IdP group.
*/
group_description: string;
}[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["group-mapping"];
};
};
};
/**
* Lists the child teams of the team specified by `{team_slug}`.
*
* **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`.
*/
"teams/list-child-in-org": {
parameters: {
path: {
org: components["parameters"]["org"];
team_slug: components["parameters"]["team_slug"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* Response if child teams exist
*/
"200": {
"application/json": components["schemas"]["team"][];
};
};
};
"projects/get-card": {
parameters: {
path: {
card_id: components["parameters"]["card_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project-card"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
"projects/update-card": {
parameters: {
path: {
card_id: components["parameters"]["card_id"];
};
};
requestBody: {
"application/json": {
/**
* The project card's note
*/
note?: string | null;
/**
* Whether or not the card is archived
*/
archived?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project-card"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
"projects/delete-card": {
parameters: {
path: {
card_id: components["parameters"]["card_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
/**
* Forbidden
*/
"403": {
"application/json": {
message?: string;
documentation_url?: string;
errors?: string[];
};
};
"404": unknown;
};
};
"projects/move-card": {
parameters: {
path: {
card_id: components["parameters"]["card_id"];
};
};
requestBody: {
"application/json": {
/**
* The position of the card in a column
*/
position: string;
/**
* The unique identifier of the column the card should be moved to
*/
column_id?: number;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": { [key: string]: any };
};
"304": never;
"401": unknown;
/**
* Forbidden
*/
"403": {
"application/json": {
message?: string;
documentation_url?: string;
errors?: {
code?: string;
message?: string;
resource?: string;
field?: string;
}[];
};
};
"422": unknown;
/**
* Service Unavailable
*/
"503": {
"application/json": {
code?: string;
message?: string;
documentation_url?: string;
errors?: { code?: string; message?: string }[];
};
};
};
};
"projects/get-column": {
parameters: {
path: {
column_id: components["parameters"]["column_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project-column"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
"projects/update-column": {
parameters: {
path: {
column_id: components["parameters"]["column_id"];
};
};
requestBody: {
"application/json": {
/**
* Name of the project column
*/
name: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project-column"];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
"projects/delete-column": {
parameters: {
path: {
column_id: components["parameters"]["column_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
};
};
"projects/list-cards": {
parameters: {
path: {
column_id: components["parameters"]["column_id"];
};
query: {
/**
* Filters the project cards that are returned by the card's state. Can be one of `all`,`archived`, or `not_archived`.
*/
archived_state?: "all" | "archived" | "not_archived";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project-card"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by the `pull_request` key.
*
* Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint.
*/
"projects/create-card": {
parameters: {
path: {
column_id: components["parameters"]["column_id"];
};
};
requestBody: {
"application/json":
| {
/**
* The project card's note
*/
note: string | null;
}
| {
/**
* The unique identifier of the content associated with the card
*/
content_id: number;
/**
* The piece of content associated with the card
*/
content_type: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["project-card"];
};
"304": never;
"401": unknown;
"403": unknown;
/**
* Validation Failed
*/
"422": {
"application/json":
| components["schemas"]["validation-error"]
| components["schemas"]["validation-error-simple"];
};
/**
* Service Unavailable
*/
"503": {
"application/json": {
code?: string;
message?: string;
documentation_url?: string;
errors?: { code?: string; message?: string }[];
};
};
};
};
"projects/move-column": {
parameters: {
path: {
column_id: components["parameters"]["column_id"];
};
};
requestBody: {
"application/json": {
/**
* The position of the column in a project
*/
position: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": { [key: string]: any };
};
"304": never;
"401": unknown;
"403": unknown;
"422": unknown;
};
};
/**
* Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned.
*/
"projects/get": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project"];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned.
*/
"projects/update": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
};
};
requestBody: {
"application/json": {
/**
* Name of the project
*/
name?: string;
/**
* Body of the project
*/
body?: string | null;
/**
* State of the project; either 'open' or 'closed'
*/
state?: string;
/**
* The baseline permission that all organization members have on this project
*/
organization_permission?: "read" | "write" | "admin" | "none";
/**
* Whether or not this project can be seen by everyone.
*/
private?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project"];
};
"304": never;
"401": unknown;
/**
* Forbidden
*/
"403": {
"application/json": {
message?: string;
documentation_url?: string;
errors?: string[];
};
};
/**
* Response if the authenticated user does not have access to the project
*/
"404": unknown;
"410": unknown;
"422": unknown;
};
};
/**
* Deletes a project board. Returns a `404 Not Found` status if projects are disabled.
*/
"projects/delete": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
};
};
responses: {
/**
* Delete Success
*/
"204": never;
"304": never;
"401": unknown;
/**
* Forbidden
*/
"403": {
"application/json": {
message?: string;
documentation_url?: string;
errors?: string[];
};
};
"404": unknown;
"410": unknown;
};
};
/**
* Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators.
*/
"projects/list-collaborators": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
};
query: {
/**
* Filters the collaborators by their affiliation. Can be one of:
* \* `outside`: Outside collaborators of a project that are not a member of the project's organization.
* \* `direct`: Collaborators with permissions to a project, regardless of organization membership status.
* \* `all`: All collaborators the authenticated user can see.
*/
affiliation?: "outside" | "direct" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator.
*/
"projects/add-collaborator": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
username: components["parameters"]["username"];
};
};
requestBody: {
"application/json": {
/**
* The permission to grant the collaborator.
*/
permission?: "read" | "write" | "admin";
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator.
*/
"projects/remove-collaborator": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level.
*/
"projects/get-permission-for-user": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["repository-collaborator-permission"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"415": unknown;
"422": unknown;
};
};
"projects/list-columns": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project-column"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
"projects/create-column": {
parameters: {
path: {
project_id: components["parameters"]["project-id"];
};
};
requestBody: {
"application/json": {
/**
* Name of the project column
*/
name: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["project-column"];
};
"304": never;
"401": unknown;
"403": unknown;
"422": unknown;
};
};
/**
* **Note:** Accessing this endpoint does not count against your REST API rate limit.
*
* **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object.
*/
"rate-limit/get": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["rate-limit-overview"];
};
"304": never;
"404": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this [blog post](https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/).
*
* OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), when deleting a [team discussion](https://docs.github.com/rest/reference/teams#discussions) or [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments).
*/
"reactions/delete-legacy": {
parameters: {
path: {
reaction_id: components["parameters"]["reaction-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"410": unknown;
"415": unknown;
};
};
/**
* When you pass the `scarlet-witch-preview` media type, requests to get a repository will also return the repository's code of conduct if it can be detected from the repository's code of conduct file.
*
* The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network.
*/
"repos/get": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["full-repository"];
};
"301": never;
"403": unknown;
"404": unknown;
};
};
/**
* **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint.
*/
"repos/update": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The name of the repository.
*/
name?: string;
/**
* A short description of the repository.
*/
description?: string;
/**
* A URL with more information about the repository.
*/
homepage?: string;
/**
* Either `true` to make the repository private or `false` to make it public. Default: `false`.
* **Note**: You will get a `422` error if the organization restricts [changing repository visibility](https://help.github.com/articles/repository-permission-levels-for-an-organization#changing-the-visibility-of-repositories) to organization owners and a non-owner tries to change the value of private. **Note**: You will get a `422` error if the organization restricts [changing repository visibility](https://help.github.com/articles/repository-permission-levels-for-an-organization#changing-the-visibility-of-repositories) to organization owners and a non-owner tries to change the value of private.
*/
private?: boolean;
/**
* Can be `public` or `private`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `visibility` can also be `internal`. The `visibility` parameter overrides the `private` parameter when you use both along with the `nebula-preview` preview header.
*/
visibility?: "public" | "private" | "visibility" | "internal";
/**
* Either `true` to enable issues for this repository or `false` to disable them.
*/
has_issues?: boolean;
/**
* Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error.
*/
has_projects?: boolean;
/**
* Either `true` to enable the wiki for this repository or `false` to disable it.
*/
has_wiki?: boolean;
/**
* Either `true` to make this repo available as a template repository or `false` to prevent it.
*/
is_template?: boolean;
/**
* Updates the default branch for this repository.
*/
default_branch?: string;
/**
* Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging.
*/
allow_squash_merge?: boolean;
/**
* Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits.
*/
allow_merge_commit?: boolean;
/**
* Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging.
*/
allow_rebase_merge?: boolean;
/**
* Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion.
*/
delete_branch_on_merge?: boolean;
/**
* `true` to archive this repository. **Note**: You cannot unarchive repositories through the API.
*/
archived?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["full-repository"];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required.
*
* If an organization owner has configured the organization to prevent members from deleting organization-owned
* repositories, you will get a `403 Forbidden` response.
*/
"repos/delete": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
/**
* If an organization owner has configured the organization to prevent members from deleting organization-owned repositories, a member will get this response:
*/
"403": {
"application/json": { message?: string; documentation_url?: string };
};
"404": unknown;
};
};
/**
* Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/list-artifacts-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
artifacts?: components["schemas"]["artifact"][];
};
};
};
};
/**
* Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/get-artifact": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
artifact_id: components["parameters"]["artifact_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["artifact"];
};
};
};
/**
* Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint.
*/
"actions/delete-artifact": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
artifact_id: components["parameters"]["artifact_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in
* the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to
* the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope.
* GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/download-artifact": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
artifact_id: components["parameters"]["artifact_id"];
archive_format: string;
};
};
responses: {
/**
* response
*/
"302": never;
};
};
/**
* Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/get-job-for-workflow-run": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
job_id: components["parameters"]["job_id"];
};
};
responses: {
/**
* response
*/
"202": {
"application/json": components["schemas"]["job"];
};
};
};
/**
* Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look
* for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can
* use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must
* have the `actions:read` permission to use this endpoint.
*/
"actions/download-job-logs-for-workflow-run": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
job_id: components["parameters"]["job_id"];
};
};
responses: {
/**
* response
*/
"302": never;
};
};
/**
* Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions allowed to run in the repository.
*
* You must authenticate using an access token with the `repo` scope to use this
* endpoint. GitHub Apps must have the `administration` repository permission to use this API.
*/
"actions/get-github-actions-permissions-repository": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-repository-permissions"];
};
};
};
/**
* Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions in the repository.
*
* If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions, then you cannot override them for the repository.
*
* You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API.
*/
"actions/set-github-actions-permissions-repository": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
enabled: components["schemas"]["actions-enabled"];
allowed_actions?: components["schemas"]["allowed-actions"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Gets the settings for selected actions that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)."
*
* You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API.
*/
"actions/get-allowed-actions-repository": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["selected-actions"];
};
};
};
/**
* Sets the actions that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)."
*
* If the repository belongs to an organization or enterprise that has `selected` actions set at the organization or enterprise levels, then you cannot override any of the allowed actions settings.
*
* To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories.
*
* You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API.
*/
"actions/set-allowed-actions-repository": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": components["schemas"]["selected-actions"];
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint.
*/
"actions/list-self-hosted-runners-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
runners?: components["schemas"]["runner"][];
};
};
};
};
/**
* Lists binaries for the runner application that you can download and run.
*
* You must authenticate using an access token with the `repo` scope to use this endpoint.
*/
"actions/list-runner-applications-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner-application"][];
};
};
};
/**
* Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate
* using an access token with the `repo` scope to use this endpoint.
*
* #### Example using registration token
*
* Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint.
*
* ```
* ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN
* ```
*/
"actions/create-registration-token-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["authentication-token"];
};
};
};
/**
* Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour.
* You must authenticate using an access token with the `repo` scope to use this endpoint.
*
* #### Example using remove token
*
* To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint.
*
* ```
* ./config.sh remove --token TOKEN
* ```
*/
"actions/create-remove-token-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["authentication-token"];
};
};
};
/**
* Gets a specific self-hosted runner configured in a repository.
*
* You must authenticate using an access token with the `repo` scope to use this
* endpoint.
*/
"actions/get-self-hosted-runner-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["runner"];
};
};
};
/**
* Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists.
*
* You must authenticate using an access token with the `repo`
* scope to use this endpoint.
*/
"actions/delete-self-hosted-runner-from-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
runner_id: components["parameters"]["runner_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters).
*
* Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/list-workflow-runs-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
actor?: components["parameters"]["actor"];
branch?: components["parameters"]["workflow-run-branch"];
event?: components["parameters"]["event"];
status?: components["parameters"]["workflow-run-status"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
workflow_runs?: components["schemas"]["workflow-run"][];
};
};
};
};
/**
* Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/get-workflow-run": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["workflow-run"];
};
};
};
/**
* Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is
* private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use
* this endpoint.
*/
"actions/delete-workflow-run": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/list-workflow-run-artifacts": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
artifacts?: components["schemas"]["artifact"][];
};
};
};
};
/**
* Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint.
*/
"actions/cancel-workflow-run": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
};
responses: {
/**
* response
*/
"202": unknown;
};
};
/**
* Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters).
*/
"actions/list-jobs-for-workflow-run": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
query: {
/**
* Filters jobs by their `completed_at` timestamp. Can be one of:
* \* `latest`: Returns jobs from the most recent execution of the workflow run.
* \* `all`: Returns all jobs for a workflow run, including from old executions of the workflow run.
*/
filter?: "latest" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
jobs?: components["schemas"]["job"][];
};
};
};
};
/**
* Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for
* `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use
* this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have
* the `actions:read` permission to use this endpoint.
*/
"actions/download-workflow-run-logs": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
};
responses: {
/**
* response
*/
"302": never;
};
};
/**
* Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint.
*/
"actions/delete-workflow-run-logs": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint.
*/
"actions/re-run-workflow": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
};
responses: {
/**
* response
*/
"201": unknown;
};
};
/**
* Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)".
*
* Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/get-workflow-run-usage": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
run_id: components["parameters"]["run-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["workflow-run-usage"];
};
};
};
/**
* Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint.
*/
"actions/list-repo-secrets": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
secrets?: components["schemas"]["actions-secret"][];
};
};
};
};
/**
* Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint.
*/
"actions/get-repo-public-key": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-public-key"];
};
};
};
/**
* Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint.
*/
"actions/get-repo-secret": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
secret_name: components["parameters"]["secret_name"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-secret"];
};
};
};
/**
* Creates or updates a repository secret with an encrypted value. Encrypt your secret using
* [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access
* token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use
* this endpoint.
*
* #### Example encrypting a secret using Node.js
*
* Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library.
*
* ```
* const sodium = require('tweetsodium');
*
* const key = "base64-encoded-public-key";
* const value = "plain-text-secret";
*
* // Convert the message and key to Uint8Array's (Buffer implements that interface)
* const messageBytes = Buffer.from(value);
* const keyBytes = Buffer.from(key, 'base64');
*
* // Encrypt using LibSodium.
* const encryptedBytes = sodium.seal(messageBytes, keyBytes);
*
* // Base64 the encrypted secret
* const encrypted = Buffer.from(encryptedBytes).toString('base64');
*
* console.log(encrypted);
* ```
*
*
* #### Example encrypting a secret using Python
*
* Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3.
*
* ```
* from base64 import b64encode
* from nacl import encoding, public
*
* def encrypt(public_key: str, secret_value: str) -> str:
* """Encrypt a Unicode string using the public key."""
* public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder())
* sealed_box = public.SealedBox(public_key)
* encrypted = sealed_box.encrypt(secret_value.encode("utf-8"))
* return b64encode(encrypted).decode("utf-8")
* ```
*
* #### Example encrypting a secret using C#
*
* Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package.
*
* ```
* var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret");
* var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU=");
*
* var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey);
*
* Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox));
* ```
*
* #### Example encrypting a secret using Ruby
*
* Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem.
*
* ```ruby
* require "rbnacl"
* require "base64"
*
* key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=")
* public_key = RbNaCl::PublicKey.new(key)
*
* box = RbNaCl::Boxes::Sealed.from_public_key(public_key)
* encrypted_secret = box.encrypt("my_secret")
*
* # Print the base64 encoded secret
* puts Base64.strict_encode64(encrypted_secret)
* ```
*/
"actions/create-or-update-repo-secret": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
secret_name: components["parameters"]["secret_name"];
};
};
requestBody: {
"application/json": {
/**
* Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/actions#get-a-repository-public-key) endpoint.
*/
encrypted_value?: string;
/**
* ID of the key you used to encrypt the secret.
*/
key_id?: string;
};
};
responses: {
/**
* Response when creating a secret
*/
"201": unknown;
/**
* Response when updating a secret
*/
"204": never;
};
};
/**
* Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint.
*/
"actions/delete-repo-secret": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
secret_name: components["parameters"]["secret_name"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/list-repo-workflows": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
workflows?: components["schemas"]["workflow"][];
};
};
};
};
/**
* Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/get-workflow": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
workflow_id: components["parameters"]["workflow-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["workflow"];
};
};
};
/**
* Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`.
*
* You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint.
*/
"actions/disable-workflow": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
workflow_id: components["parameters"]["workflow-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`.
*
* You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)."
*
* You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line)."
*/
"actions/create-workflow-dispatch": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
workflow_id: components["parameters"]["workflow-id"];
};
};
requestBody: {
"application/json": {
/**
* The git reference for the workflow. The reference can be a branch or tag name.
*/
ref: string;
/**
* Input keys and values configured in the workflow file. The maximum number of properties is 10. Any default properties configured in the workflow file will be used when `inputs` are omitted.
*/
inputs?: { [key: string]: string };
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`.
*
* You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint.
*/
"actions/enable-workflow": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
workflow_id: components["parameters"]["workflow-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters).
*
* Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope.
*/
"actions/list-workflow-runs": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
workflow_id: components["parameters"]["workflow-id"];
};
query: {
actor?: components["parameters"]["actor"];
branch?: components["parameters"]["workflow-run-branch"];
event?: components["parameters"]["event"];
status?: components["parameters"]["workflow-run-status"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
workflow_runs?: components["schemas"]["workflow-run"][];
};
};
};
};
/**
* Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)".
*
* You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint.
*/
"actions/get-workflow-usage": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
workflow_id: components["parameters"]["workflow-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["workflow-usage"];
};
};
};
/**
* Lists the [available assignees](https://help.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository.
*/
"issues/list-assignees": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"404": unknown;
};
};
/**
* Checks if a user has permission to be assigned to an issue in this repository.
*
* If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned.
*
* Otherwise a `404` status code is returned.
*/
"issues/check-user-can-be-assigned": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
assignee: string;
};
};
responses: {
/**
* If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned.
*/
"204": never;
/**
* Otherwise a `404` status code is returned.
*/
"404": {
"application/json": components["schemas"]["basic-error"];
};
};
};
/**
* Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://help.github.com/en/articles/configuring-automated-security-fixes)".
*/
"repos/enable-automated-security-fixes": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://help.github.com/en/articles/configuring-automated-security-fixes)".
*/
"repos/disable-automated-security-fixes": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
"repos/list-branches": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* Setting to `true` returns only protected branches. When set to `false`, only unprotected branches are returned. Omitting this parameter returns all branches.
*/
protected?: boolean;
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["short-branch"][];
};
"404": unknown;
};
};
"repos/get-branch": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["branch-with-protection"];
};
"404": unknown;
"415": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/get-branch-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["branch-protection"];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Protecting a branch requires admin or owner permissions to the repository.
*
* **Note**: Passing new arrays of `users` and `teams` replaces their previous values.
*
* **Note**: The list of users, apps, and teams in total is limited to 100 items.
*/
"repos/update-branch-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* Require status checks to pass before merging. Set to `null` to disable.
*/
required_status_checks: {
/**
* Require branches to be up to date before merging.
*/
strict: boolean;
/**
* The list of status checks to require in order to merge into this branch
*/
contexts: string[];
} | null;
/**
* Enforce all configured restrictions for administrators. Set to `true` to enforce required status checks for repository administrators. Set to `null` to disable.
*/
enforce_admins: boolean | null;
/**
* Require at least one approving review on a pull request, before merging. Set to `null` to disable.
*/
required_pull_request_reviews: {
/**
* Specify which users and teams can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories.
*/
dismissal_restrictions?: {
/**
* The list of user `login`s with dismissal access
*/
users?: string[];
/**
* The list of team `slug`s with dismissal access
*/
teams?: string[];
};
/**
* Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit.
*/
dismiss_stale_reviews?: boolean;
/**
* Blocks merging pull requests until [code owners](https://help.github.com/articles/about-code-owners/) review them.
*/
require_code_owner_reviews?: boolean;
/**
* Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6.
*/
required_approving_review_count?: number;
} | null;
/**
* Restrict who can push to the protected branch. User, app, and team `restrictions` are only available for organization-owned repositories. Set to `null` to disable.
*/
restrictions: {
/**
* The list of user `login`s with push access
*/
users: string[];
/**
* The list of team `slug`s with push access
*/
teams: string[];
/**
* The list of app `slug`s with push access
*/
apps?: string[];
} | null;
/**
* Enforces a linear commit Git history, which prevents anyone from pushing merge commits to a branch. Set to `true` to enforce a linear commit history. Set to `false` to disable a linear commit Git history. Your repository must allow squash merging or rebase merging before you can enable a linear commit history. Default: `false`. For more information, see "[Requiring a linear commit history](https://help.github.com/github/administering-a-repository/requiring-a-linear-commit-history)" in the GitHub Help documentation.
*/
required_linear_history?: boolean;
/**
* Permits force pushes to the protected branch by anyone with write access to the repository. Set to `true` to allow force pushes. Set to `false` or `null` to block force pushes. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://help.github.com/en/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation."
*/
allow_force_pushes?: boolean | null;
/**
* Allows deletion of the protected branch by anyone with write access to the repository. Set to `false` to prevent deletion of the protected branch. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://help.github.com/en/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation.
*/
allow_deletions?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["protected-branch"];
};
"403": unknown;
"404": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/delete-branch-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"403": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/get-admin-branch-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["protected-branch-admin-enforced"];
};
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled.
*/
"repos/set-admin-branch-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["protected-branch-admin-enforced"];
};
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled.
*/
"repos/delete-admin-branch-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* No Content
*/
"204": never;
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/get-pull-request-review-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/vnd.github.luke-cage-preview+json": components["schemas"]["protected-branch-pull-request-review"];
};
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled.
*
* **Note**: Passing new arrays of `users` and `teams` replaces their previous values.
*/
"repos/update-pull-request-review-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* Specify which users and teams can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories.
*/
dismissal_restrictions?: {
/**
* The list of user `login`s with dismissal access
*/
users?: string[];
/**
* The list of team `slug`s with dismissal access
*/
teams?: string[];
};
/**
* Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit.
*/
dismiss_stale_reviews?: boolean;
/**
* Blocks merging pull requests until [code owners](https://help.github.com/articles/about-code-owners/) have reviewed.
*/
require_code_owner_reviews?: boolean;
/**
* Specifies the number of reviewers required to approve pull requests. Use a number between 1 and 6.
*/
required_approving_review_count?: number;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["protected-branch-pull-request-review"];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/delete-pull-request-review-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* No Content
*/
"204": never;
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://help.github.com/articles/signing-commits-with-gpg) in GitHub Help.
*
* **Note**: You must enable branch protection to require signed commits.
*/
"repos/get-commit-signature-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["protected-branch-admin-enforced"];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits.
*/
"repos/create-commit-signature-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["protected-branch-admin-enforced"];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits.
*/
"repos/delete-commit-signature-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* No Content
*/
"204": never;
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/get-status-checks-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["status-check-policy"];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled.
*/
"repos/update-status-check-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* Require branches to be up to date before merging.
*/
strict?: boolean;
/**
* The list of status checks to require in order to merge into this branch
*/
contexts?: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["status-check-policy"];
};
"404": unknown;
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/remove-status-check-protection": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* No Content
*/
"204": never;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/get-all-status-check-contexts": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": string[];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/add-status-check-contexts": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* contexts parameter
*/
contexts: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": string[];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/set-status-check-contexts": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* contexts parameter
*/
contexts: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": string[];
};
"404": unknown;
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"repos/remove-status-check-contexts": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* contexts parameter
*/
contexts: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": string[];
};
"404": unknown;
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Lists who has access to this protected branch.
*
* **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories.
*/
"repos/get-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["branch-restriction-policy"];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Disables the ability to restrict who can push to this branch.
*/
"repos/delete-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* No Content
*/
"204": never;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch.
*/
"repos/get-apps-with-access-to-protected-branch": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["integration"][];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch.
*
* | Type | Description |
* | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- |
* | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/add-app-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* apps parameter
*/
apps: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["integration"][];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch.
*
* | Type | Description |
* | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- |
* | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/set-app-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* apps parameter
*/
apps: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["integration"][];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch.
*
* | Type | Description |
* | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- |
* | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/remove-app-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* apps parameter
*/
apps: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["integration"][];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Lists the teams who have push access to this branch. The list includes child teams.
*/
"repos/get-teams-with-access-to-protected-branch": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team"][];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Grants the specified teams push access for this branch. You can also give push access to child teams.
*
* | Type | Description |
* | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ |
* | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/add-team-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* teams parameter
*/
teams: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team"][];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams.
*
* | Type | Description |
* | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ |
* | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/set-team-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* teams parameter
*/
teams: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team"][];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Removes the ability of a team to push to this branch. You can also remove push access for child teams.
*
* | Type | Description |
* | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
* | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/remove-team-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* teams parameter
*/
teams: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team"][];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Lists the people who have push access to this branch.
*/
"repos/get-users-with-access-to-protected-branch": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"404": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Grants the specified people push access for this branch.
*
* | Type | Description |
* | ------- | ----------------------------------------------------------------------------------------------------------------------------- |
* | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/add-user-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* users parameter
*/
users: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people.
*
* | Type | Description |
* | ------- | ----------------------------------------------------------------------------------------------------------------------------- |
* | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/set-user-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* users parameter
*/
users: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"422": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Removes the ability of a user to push to this branch.
*
* | Type | Description |
* | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- |
* | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. |
*/
"repos/remove-user-access-restrictions": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
branch: components["parameters"]["branch"];
};
};
requestBody: {
"application/json": {
/**
* users parameter
*/
users: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"422": unknown;
};
};
/**
* **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array.
*
* Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs.
*
* In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs.
*/
"checks/create": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The name of the check. For example, "code-coverage".
*/
name: string;
/**
* The SHA of the commit.
*/
head_sha: string;
/**
* The URL of the integrator's site that has the full details of the check. If the integrator does not provide this, then the homepage of the GitHub app is used.
*/
details_url?: string;
/**
* A reference for the run on the integrator's system.
*/
external_id?: string;
/**
* The current status. Can be one of `queued`, `in_progress`, or `completed`.
*/
status?: "queued" | "in_progress" | "completed";
/**
* The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
started_at?: string;
/**
* **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. Can be one of `success`, `failure`, `neutral`, `cancelled`, `skipped`, `timed_out`, or `action_required`. When the conclusion is `action_required`, additional details should be provided on the site specified by `details_url`.
* **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. Only GitHub can change a check run conclusion to `stale`.
*/
conclusion?:
| "success"
| "failure"
| "neutral"
| "cancelled"
| "skipped"
| "timed_out"
| "action_required";
/**
* The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
completed_at?: string;
/**
* Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. See the [`output` object](https://docs.github.com/rest/reference/checks#output-object) description.
*/
output?: {
/**
* The title of the check run.
*/
title: string;
/**
* The summary of the check run. This parameter supports Markdown.
*/
summary: string;
/**
* The details of the check run. This parameter supports Markdown.
*/
text?: string;
/**
* Adds information from your analysis to specific lines of code. Annotations are visible on GitHub in the **Checks** and **Files changed** tab of the pull request. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. For details about how you can view annotations on GitHub, see "[About status checks](https://help.github.com/articles/about-status-checks#checks)". See the [`annotations` object](https://docs.github.com/rest/reference/checks#annotations-object) description for details about how to use this parameter.
*/
annotations?: {
/**
* The path of the file to add an annotation to. For example, `assets/css/main.css`.
*/
path: string;
/**
* The start line of the annotation.
*/
start_line: number;
/**
* The end line of the annotation.
*/
end_line: number;
/**
* The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values.
*/
start_column?: number;
/**
* The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values.
*/
end_column?: number;
/**
* The level of the annotation. Can be one of `notice`, `warning`, or `failure`.
*/
annotation_level: "notice" | "warning" | "failure";
/**
* A short description of the feedback for these lines of code. The maximum size is 64 KB.
*/
message: string;
/**
* The title that represents the annotation. The maximum size is 255 characters.
*/
title?: string;
/**
* Details about this annotation. The maximum size is 64 KB.
*/
raw_details?: string;
}[];
/**
* Adds images to the output displayed in the GitHub pull request UI. See the [`images` object](https://docs.github.com/rest/reference/checks#images-object) description for details.
*/
images?: {
/**
* The alternative text for the image.
*/
alt: string;
/**
* The full URL of the image.
*/
image_url: string;
/**
* A short image description.
*/
caption?: string;
}[];
};
/**
* Displays a button on GitHub that can be clicked to alert your app to do additional tasks. For example, a code linting app can display a button that automatically fixes detected errors. The button created in this object is displayed after the check run completes. When a user clicks the button, GitHub sends the [`check_run.requested_action` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) to your app. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)."
*/
actions?: {
/**
* The text to be displayed on a button in the web UI. The maximum size is 20 characters.
*/
label: string;
/**
* A short explanation of what this action would do. The maximum size is 40 characters.
*/
description: string;
/**
* A reference for the action on the integrator's system. The maximum size is 20 characters.
*/
identifier: string;
}[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["check-run"];
};
};
};
/**
* **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array.
*
* Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository.
*/
"checks/get": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
check_run_id: components["parameters"]["check_run_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["check-run"];
};
};
};
/**
* **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array.
*
* Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs.
*/
"checks/update": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
check_run_id: components["parameters"]["check_run_id"];
};
};
requestBody: {
"application/json": {
/**
* The name of the check. For example, "code-coverage".
*/
name?: string;
/**
* The URL of the integrator's site that has the full details of the check.
*/
details_url?: string;
/**
* A reference for the run on the integrator's system.
*/
external_id?: string;
/**
* This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
started_at?: string;
/**
* The current status. Can be one of `queued`, `in_progress`, or `completed`.
*/
status?: "queued" | "in_progress" | "completed";
/**
* **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. Can be one of `success`, `failure`, `neutral`, `cancelled`, `skipped`, `timed_out`, or `action_required`.
* **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. Only GitHub can change a check run conclusion to `stale`.
*/
conclusion?:
| "success"
| "failure"
| "neutral"
| "cancelled"
| "skipped"
| "timed_out"
| "action_required";
/**
* The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
completed_at?: string;
/**
* Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. See the [`output` object](https://docs.github.com/rest/reference/checks#output-object-1) description.
*/
output?: {
/**
* **Required**.
*/
title?: string;
/**
* Can contain Markdown.
*/
summary: string;
/**
* Can contain Markdown.
*/
text?: string;
/**
* Adds information from your analysis to specific lines of code. Annotations are visible in GitHub's pull request UI. Annotations are visible in GitHub's pull request UI. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. For details about annotations in the UI, see "[About status checks](https://help.github.com/articles/about-status-checks#checks)". See the [`annotations` object](https://docs.github.com/rest/reference/checks#annotations-object-1) description for details.
*/
annotations?: {
/**
* The path of the file to add an annotation to. For example, `assets/css/main.css`.
*/
path: string;
/**
* The start line of the annotation.
*/
start_line: number;
/**
* The end line of the annotation.
*/
end_line: number;
/**
* The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values.
*/
start_column?: number;
/**
* The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values.
*/
end_column?: number;
/**
* The level of the annotation. Can be one of `notice`, `warning`, or `failure`.
*/
annotation_level: "notice" | "warning" | "failure";
/**
* A short description of the feedback for these lines of code. The maximum size is 64 KB.
*/
message: string;
/**
* The title that represents the annotation. The maximum size is 255 characters.
*/
title?: string;
/**
* Details about this annotation. The maximum size is 64 KB.
*/
raw_details?: string;
}[];
/**
* Adds images to the output displayed in the GitHub pull request UI. See the [`images` object](https://docs.github.com/rest/reference/checks#annotations-object-1) description for details.
*/
images?: {
/**
* The alternative text for the image.
*/
alt: string;
/**
* The full URL of the image.
*/
image_url: string;
/**
* A short image description.
*/
caption?: string;
}[];
};
/**
* Possible further actions the integrator can perform, which a user may trigger. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)."
*/
actions?: {
/**
* The text to be displayed on a button in the web UI. The maximum size is 20 characters.
*/
label: string;
/**
* A short explanation of what this action would do. The maximum size is 40 characters.
*/
description: string;
/**
* A reference for the action on the integrator's system. The maximum size is 20 characters.
*/
identifier: string;
}[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["check-run"];
};
};
};
/**
* Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository.
*/
"checks/list-annotations": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
check_run_id: components["parameters"]["check_run_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["check-annotation"][];
};
};
};
/**
* **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`.
*
* By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites.
*/
"checks/create-suite": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The sha of the head commit.
*/
head_sha: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["check-suite"];
};
};
};
/**
* Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites.
*/
"checks/set-suites-preferences": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* Enables or disables automatic creation of CheckSuite events upon pushes to the repository. Enabled by default. See the [`auto_trigger_checks` object](https://docs.github.com/rest/reference/checks#auto_trigger_checks-object) description for details.
*/
auto_trigger_checks?: {
/**
* The `id` of the GitHub App.
*/
app_id: number;
/**
* Set to `true` to enable automatic creation of CheckSuite events upon pushes to the repository, or `false` to disable them.
*/
setting: boolean;
}[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["check-suite-preference"];
};
};
};
/**
* **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`.
*
* Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository.
*/
"checks/get-suite": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
check_suite_id: components["parameters"]["check_suite_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["check-suite"];
};
};
};
/**
* **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array.
*
* Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository.
*/
"checks/list-for-suite": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
check_suite_id: components["parameters"]["check_suite_id"];
};
query: {
check_name?: components["parameters"]["check_name"];
status?: components["parameters"]["status"];
/**
* Filters check runs by their `completed_at` timestamp. Can be one of `latest` (returning the most recent check runs) or `all`.
*/
filter?: "latest" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
check_runs?: components["schemas"]["check-run"][];
};
};
};
};
/**
* Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared.
*
* To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository.
*/
"checks/rerequest-suite": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
check_suite_id: components["parameters"]["check_suite_id"];
};
};
responses: {
/**
* response
*/
"201": unknown;
};
};
/**
* Lists all open code scanning alerts for the default branch (usually `main` or `master`). For private repos, you must use an access token with the `repo` scope. For public repos, you must use an access token with `public_repo` and `repo:security_events` scopes. GitHub Apps must have the `security_events` read permission to use this endpoint.
*/
"code-scanning/list-alerts-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* Set to `open`, `fixed`, or `dismissed` to list code scanning alerts in a specific state.
*/
state?: components["schemas"]["code-scanning-alert-state"];
/**
* Set a full Git reference to list alerts for a specific branch. The `ref` must be formatted as `refs/heads/<branch name>`.
*/
ref?: components["schemas"]["code-scanning-alert-ref"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["code-scanning-alert-code-scanning-alert-items"][];
};
/**
* Response if the ref does not match an existing ref
*/
"404": unknown;
"503": unknown;
};
};
/**
* Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint.
*
* The security `alert_number` is found at the end of the security alert's URL. For example, the security alert ID for `https://github.com/Octo-org/octo-repo/security/code-scanning/88` is `88`.
*/
"code-scanning/get-alert": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
alert_number: number;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["code-scanning-alert-code-scanning-alert"];
};
"404": unknown;
"503": unknown;
};
};
/**
* Updates the status of a single code scanning alert. For private repos, you must use an access token with the `repo` scope. For public repos, you must use an access token with `public_repo` and `repo:security_events` scopes.
* GitHub Apps must have the `security_events` write permission to use this endpoint.
*/
"code-scanning/update-alert": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
alert_number: components["parameters"]["alert_number"];
};
};
requestBody: {
"application/json": {
state: components["schemas"]["code-scanning-alert-set-state"];
dismissed_reason?: components["schemas"]["code-scanning-alert-dismissed-reason"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["code-scanning-alert-code-scanning-alert"];
};
/**
* Response if the repository is archived
*/
"403": unknown;
/**
* Response when code scanning is not available and you should try again at a later time
*/
"503": unknown;
};
};
/**
* List the details of recent code scanning analyses for a repository. For private repos, you must use an access token with the `repo` scope. For public repos, you must use an access token with `public_repo` and `repo:security_events` scopes. GitHub Apps must have the `security_events` read permission to use this endpoint.
*/
"code-scanning/list-recent-analyses": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* Set a full Git reference to list alerts for a specific branch. The `ref` must be formatted as `refs/heads/<branch name>`.
*/
ref?: components["schemas"]["code-scanning-analysis-ref"];
/**
* Set a single code scanning tool name to filter alerts by tool.
*/
tool_name?: components["schemas"]["code-scanning-analysis-tool-name"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["code-scanning-analysis-code-scanning-analysis"][];
};
};
};
/**
* Upload a SARIF file containing the results of a code scanning analysis to make the results available in a repository.
* For private repos, you must use an access token with the `repo` scope. For public repos, you must use an access token with `public_repo` and `repo:security_events` scopes. GitHub Apps must have the `security_events` write permission to use this endpoint.
*/
"code-scanning/upload-sarif": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"];
ref: components["schemas"]["code-scanning-analysis-ref"];
sarif: components["schemas"]["code-scanning-analysis-sarif-file"];
/**
* The base directory used in the analysis, as it appears in the SARIF file.
* This property is used to convert file paths from absolute to relative, so that alerts can be mapped to their correct location in the repository.
*/
checkout_uri?: string;
/**
* The time that the analysis run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
started_at?: string;
tool_name: components["schemas"]["code-scanning-analysis-tool-name"];
};
};
responses: {
/**
* response
*/
"202": unknown;
/**
* Response if the `sarif` field is invalid
*/
"400": unknown;
/**
* Response if the repository is archived
*/
"403": unknown;
/**
* Response if `commit_sha` or `ref` cannot be found
*/
"404": unknown;
/**
* Response if the `sarif` field is too large
*/
"413": unknown;
};
};
/**
* For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners.
*
* Team members will include the members of child teams.
*/
"repos/list-collaborators": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* Filter collaborators returned by their affiliation. Can be one of:
* \* `outside`: All outside collaborators of an organization-owned repository.
* \* `direct`: All collaborators with permissions to an organization-owned repository, regardless of organization membership status.
* \* `all`: All collaborators the authenticated user can see.
*/
affiliation?: "outside" | "direct" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["collaborator"][];
};
"404": unknown;
};
};
/**
* For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners.
*
* Team members will include the members of child teams.
*/
"repos/check-collaborator": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Response if user is a collaborator
*/
"204": never;
/**
* Response if user is not a collaborator
*/
"404": unknown;
};
};
/**
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*
* For more information the permission levels, see "[Repository permission levels for an organization](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)".
*
* Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*
* The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations).
*
* **Rate limits**
*
* To prevent abuse, you are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository.
*/
"repos/add-collaborator": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
username: components["parameters"]["username"];
};
};
requestBody: {
"application/json": {
/**
* The permission to grant the collaborator. **Only valid on organization-owned repositories.** Can be one of:
* \* `pull` - can pull, but not push to or administer this repository.
* \* `push` - can pull and push, but not administer this repository.
* \* `admin` - can pull, push and administer this repository.
* \* `maintain` - Recommended for project managers who need to manage the repository without access to sensitive or destructive actions.
* \* `triage` - Recommended for contributors who need to proactively manage issues and pull requests without write access.
*/
permission?: "pull" | "push" | "admin" | "maintain" | "triage";
permissions?: string;
};
};
responses: {
/**
* Response when a new invitation is created
*/
"201": {
"application/json": components["schemas"]["repository-invitation"];
};
/**
* Response when person is already a collaborator
*/
"204": never;
"403": unknown;
"422": unknown;
};
};
"repos/remove-collaborator": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`.
*/
"repos/get-collaborator-permission-level": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Response if user has admin permissions
*/
"200": {
"application/json": components["schemas"]["repository-collaborator-permission"];
};
"404": unknown;
};
};
/**
* Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/).
*
* Comments are ordered by ascending ID.
*/
"repos/list-commit-comments-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit-comment"][];
};
};
};
"repos/get-commit-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit-comment"];
};
"404": unknown;
};
};
"repos/update-commit-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
requestBody: {
"application/json": {
/**
* The contents of the comment
*/
body: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit-comment"];
};
"404": unknown;
};
};
"repos/delete-commit-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments).
*/
"reactions/list-for-commit-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
query: {
/**
* Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a commit comment.
*/
content?:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["reaction"][];
};
"404": unknown;
"415": unknown;
};
};
/**
* Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this commit comment.
*/
"reactions/create-for-commit-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
requestBody: {
"application/json": {
/**
* The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the commit comment.
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
};
};
responses: {
/**
* Reaction exists
*/
"200": {
"application/json": components["schemas"]["reaction"];
};
/**
* Reaction created
*/
"201": {
"application/json": components["schemas"]["reaction"];
};
"415": unknown;
"422": unknown;
};
};
/**
* **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`.
*
* Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments).
*/
"reactions/delete-for-commit-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
reaction_id: components["parameters"]["reaction-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* **Signature verification object**
*
* The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object:
*
* | Name | Type | Description |
* | ---- | ---- | ----------- |
* | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. |
* | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. |
* | `signature` | `string` | The signature that was extracted from the commit. |
* | `payload` | `string` | The value that was signed. |
*
* These are the possible values for `reason` in the `verification` object:
*
* | Value | Description |
* | ----- | ----------- |
* | `expired_key` | The key that made the signature is expired. |
* | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. |
* | `gpgverify_error` | There was an error communicating with the signature verification service. |
* | `gpgverify_unavailable` | The signature verification service is currently unavailable. |
* | `unsigned` | The object does not include a signature. |
* | `unknown_signature_type` | A non-PGP signature was found in the commit. |
* | `no_user` | No user was associated with the `committer` email address in the commit. |
* | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. |
* | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. |
* | `unknown_key` | The key that made the signature has not been registered with any user's account. |
* | `malformed_signature` | There was an error parsing the signature. |
* | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. |
* | `valid` | None of the above errors applied, so the signature is considered to be verified. |
*/
"repos/list-commits": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* SHA or branch to start listing commits from. Default: the repository’s default branch (usually `master`).
*/
sha?: string;
/**
* Only commits containing this file path will be returned.
*/
path?: string;
/**
* GitHub login or email address by which to filter by commit author.
*/
author?: string;
since?: components["parameters"]["since"];
/**
* Only commits before this date will be returned. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
until?: string;
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit"][];
};
"400": unknown;
"404": unknown;
"409": unknown;
"500": unknown;
};
};
/**
* Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch.
*/
"repos/list-branches-for-head-commit": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
commit_sha: components["parameters"]["commit_sha"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["branch-short"][];
};
"415": unknown;
"422": unknown;
};
};
/**
* Use the `:commit_sha` to specify the commit that will have its comments listed.
*/
"repos/list-comments-for-commit": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
commit_sha: components["parameters"]["commit_sha"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit-comment"][];
};
};
};
/**
* Create a comment for a commit using its `:commit_sha`.
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*/
"repos/create-commit-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
commit_sha: components["parameters"]["commit_sha"];
};
};
requestBody: {
"application/json": {
/**
* The contents of the comment.
*/
body: string;
/**
* Relative path of the file to comment on.
*/
path?: string;
/**
* Line index in the diff to comment on.
*/
position?: number;
/**
* **Deprecated**. Use **position** parameter instead. Line number in the file to comment on.
*/
line?: number;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["commit-comment"];
};
"403": unknown;
"422": unknown;
};
};
/**
* Lists all pull requests containing the provided commit SHA, which can be from any point in the commit history. The results will include open and closed pull requests. Additional preview headers may be required to see certain details for associated pull requests, such as whether a pull request is in a draft state. For more information about previews that might affect this endpoint, see the [List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests) endpoint.
*/
"repos/list-pull-requests-associated-with-commit": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
commit_sha: components["parameters"]["commit_sha"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-simple"][];
};
"415": unknown;
};
};
/**
* Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint.
*
* **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing.
*
* You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property.
*
* To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag.
*
* **Signature verification object**
*
* The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object:
*
* | Name | Type | Description |
* | ---- | ---- | ----------- |
* | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. |
* | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. |
* | `signature` | `string` | The signature that was extracted from the commit. |
* | `payload` | `string` | The value that was signed. |
*
* These are the possible values for `reason` in the `verification` object:
*
* | Value | Description |
* | ----- | ----------- |
* | `expired_key` | The key that made the signature is expired. |
* | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. |
* | `gpgverify_error` | There was an error communicating with the signature verification service. |
* | `gpgverify_unavailable` | The signature verification service is currently unavailable. |
* | `unsigned` | The object does not include a signature. |
* | `unknown_signature_type` | A non-PGP signature was found in the commit. |
* | `no_user` | No user was associated with the `committer` email address in the commit. |
* | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. |
* | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. |
* | `unknown_key` | The key that made the signature has not been registered with any user's account. |
* | `malformed_signature` | There was an error parsing the signature. |
* | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. |
* | `valid` | None of the above errors applied, so the signature is considered to be verified. |
*/
"repos/get-commit": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit"];
};
"404": unknown;
"422": unknown;
"500": unknown;
};
};
/**
* **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array.
*
* Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository.
*/
"checks/list-for-ref": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
query: {
check_name?: components["parameters"]["check_name"];
status?: components["parameters"]["status"];
/**
* Filters check runs by their `completed_at` timestamp. Can be one of `latest` (returning the most recent check runs) or `all`.
*/
filter?: "latest" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
check_runs?: components["schemas"]["check-run"][];
};
};
};
};
/**
* **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`.
*
* Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository.
*/
"checks/list-suites-for-ref": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
query: {
/**
* Filters check suites by GitHub App `id`.
*/
app_id?: number;
check_name?: components["parameters"]["check_name"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
check_suites?: components["schemas"]["check-suite"][];
};
};
};
};
/**
* Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name.
*
* The most recent status for each context is returned, up to 100. This field [paginates](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination) if there are over 100 contexts.
*
* Additionally, a combined `state` is returned. The `state` is one of:
*
* * **failure** if any of the contexts report as `error` or `failure`
* * **pending** if there are no statuses or a context is `pending`
* * **success** if the latest status for all contexts is `success`
*/
"repos/get-combined-status-for-ref": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["combined-commit-status"];
};
"404": unknown;
};
};
/**
* Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one.
*
* This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`.
*/
"repos/list-commit-statuses-for-ref": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["status"][];
};
"301": never;
};
};
/**
* This method returns the contents of the repository's code of conduct file, if one is detected.
*/
"codes-of-conduct/get-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["code-of-conduct"];
};
};
};
/**
* This endpoint will return all community profile metrics, including an
* overall health score, repository description, the presence of documentation, detected
* code of conduct, detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE,
* README, and CONTRIBUTING files.
*
* `content_reports_enabled` is only returned for organization-owned repositories.
*/
"repos/get-community-profile-metrics": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["community-profile"];
};
};
};
/**
* Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `<USERNAME>:branch`.
*
* The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats.
*
* The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file.
*
* **Working with large comparisons**
*
* The response will include a comparison of up to 250 commits. If you are working with a larger commit range, you can use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) to enumerate all commits in the range.
*
* For comparisons with extremely large diffs, you may receive an error response indicating that the diff took too long
* to generate. You can typically resolve this error by using a smaller commit range.
*
* **Signature verification object**
*
* The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object:
*
* | Name | Type | Description |
* | ---- | ---- | ----------- |
* | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. |
* | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. |
* | `signature` | `string` | The signature that was extracted from the commit. |
* | `payload` | `string` | The value that was signed. |
*
* These are the possible values for `reason` in the `verification` object:
*
* | Value | Description |
* | ----- | ----------- |
* | `expired_key` | The key that made the signature is expired. |
* | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. |
* | `gpgverify_error` | There was an error communicating with the signature verification service. |
* | `gpgverify_unavailable` | The signature verification service is currently unavailable. |
* | `unsigned` | The object does not include a signature. |
* | `unknown_signature_type` | A non-PGP signature was found in the commit. |
* | `no_user` | No user was associated with the `committer` email address in the commit. |
* | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. |
* | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. |
* | `unknown_key` | The key that made the signature has not been registered with any user's account. |
* | `malformed_signature` | There was an error parsing the signature. |
* | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. |
* | `valid` | None of the above errors applied, so the signature is considered to be verified. |
*/
"repos/compare-commits": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
base: string;
head: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit-comparison"];
};
"404": unknown;
"500": unknown;
};
};
/**
* Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit
* `:path`, you will receive the contents of all files in the repository.
*
* Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for
* retrieving the raw content or rendered HTML (when supported). All content types support [a custom media
* type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent
* object format.
*
* **Note**:
* * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees).
* * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees
* API](https://docs.github.com/rest/reference/git#get-a-tree).
* * This API supports files up to 1 megabyte in size.
*
* #### If the content is a directory
* The response will be an array of objects, one object for each item in the directory.
* When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value
* _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW).
* In the next major version of the API, the type will be returned as "submodule".
*
* #### If the content is a symlink
* If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the
* API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object
* describing the symlink itself.
*
* #### If the content is a submodule
* The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific
* commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out
* the submodule at that specific commit.
*
* If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the
* github.com URLs (`html_url` and `_links["html"]`) will have null values.
*/
"repos/get-content": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* path+ parameter
*/
path: string;
};
query: {
/**
* The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`)
*/
ref?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/vnd.github.v3.object": components["schemas"]["content-tree"];
"application/json":
| components["schemas"]["content-directory"]
| components["schemas"]["content-file"]
| components["schemas"]["content-symlink"]
| components["schemas"]["content-submodule"];
};
"302": never;
"403": unknown;
"404": unknown;
};
};
/**
* Creates a new file or replaces an existing file in a repository.
*/
"repos/create-or-update-file-contents": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* path+ parameter
*/
path: string;
};
};
requestBody: {
"application/json": {
/**
* The commit message.
*/
message: string;
/**
* The new file content, using Base64 encoding.
*/
content: string;
/**
* **Required if you are updating a file**. The blob SHA of the file being replaced.
*/
sha?: string;
/**
* The branch name. Default: the repository’s default branch (usually `master`)
*/
branch?: string;
/**
* The person that committed the file. Default: the authenticated user.
*/
committer?: {
/**
* The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted.
*/
name: string;
/**
* The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted.
*/
email: string;
date?: string;
};
/**
* The author of the file. Default: The `committer` or the authenticated user if you omit `committer`.
*/
author?: {
/**
* The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted.
*/
name: string;
/**
* The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted.
*/
email: string;
date?: string;
};
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["file-commit"];
};
/**
* response
*/
"201": {
"application/json": components["schemas"]["file-commit"];
};
"404": unknown;
"409": unknown;
"422": unknown;
};
};
/**
* Deletes a file in a repository.
*
* You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author.
*
* The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used.
*
* You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code.
*/
"repos/delete-file": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* path+ parameter
*/
path: string;
};
};
requestBody: {
"application/json": {
/**
* The commit message.
*/
message: string;
/**
* The blob SHA of the file being replaced.
*/
sha: string;
/**
* The branch name. Default: the repository’s default branch (usually `master`)
*/
branch?: string;
/**
* object containing information about the committer.
*/
committer?: {
/**
* The name of the author (or committer) of the commit
*/
name?: string;
/**
* The email of the author (or committer) of the commit
*/
email?: string;
};
/**
* object containing information about the author.
*/
author?: {
/**
* The name of the author (or committer) of the commit
*/
name?: string;
/**
* The email of the author (or committer) of the commit
*/
email?: string;
};
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["file-commit"];
};
"404": unknown;
"409": unknown;
"422": unknown;
"503": unknown;
};
};
/**
* Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance.
*
* GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information.
*/
"repos/list-contributors": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* Set to `1` or `true` to include anonymous contributors in results.
*/
anon?: string;
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* Response if repository contains content
*/
"200": {
"application/json": components["schemas"]["contributor"][];
};
/**
* Response if repository is empty
*/
"204": never;
"403": unknown;
"404": unknown;
};
};
/**
* Simple filtering of deployments is available via query parameters:
*/
"repos/list-deployments": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* The SHA recorded at creation time.
*/
sha?: string;
/**
* The name of the ref. This can be a branch, tag, or SHA.
*/
ref?: string;
/**
* The name of the task for the deployment (e.g., `deploy` or `deploy:migrations`).
*/
task?: string;
/**
* The name of the environment that was deployed to (e.g., `staging` or `production`).
*/
environment?: string;
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["deployment"][];
};
};
};
/**
* Deployments offer a few configurable parameters with certain defaults.
*
* The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them
* before we merge a pull request.
*
* The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have
* multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter
* makes it easier to track which environments have requested deployments. The default environment is `production`.
*
* The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If
* the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds,
* the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will
* return a failure response.
*
* By default, [commit statuses](https://docs.github.com/rest/reference/repos#statuses) for every submitted context must be in a `success`
* state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to
* specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do
* not require any contexts or create any commit statuses, the deployment will always succeed.
*
* The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text
* field that will be passed on when a deployment event is dispatched.
*
* The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might
* be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an
* application with debugging enabled.
*
* Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref.
*
* #### Merged branch response
* You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating
* a deployment. This auto-merge happens when:
* * Auto-merge option is enabled in the repository
* * Topic branch does not include the latest changes on the base branch, which is `master` in the response example
* * There are no merge conflicts
*
* If there are no new commits in the base branch, a new request to create a deployment should give a successful
* response.
*
* #### Merge conflict response
* This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't
* be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts.
*
* #### Failed commit status checks
* This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success`
* status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`.
*/
"repos/create-deployment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The ref to deploy. This can be a branch, tag, or SHA.
*/
ref: string;
/**
* Specifies a task to execute (e.g., `deploy` or `deploy:migrations`).
*/
task?: string;
/**
* Attempts to automatically merge the default branch into the requested ref, if it's behind the default branch.
*/
auto_merge?: boolean;
/**
* The [status](https://docs.github.com/rest/reference/repos#statuses) contexts to verify against commit status checks. If you omit this parameter, GitHub verifies all unique contexts before creating a deployment. To bypass checking entirely, pass an empty array. Defaults to all unique contexts.
*/
required_contexts?: string[];
/**
* JSON payload with extra information about the deployment.
*/
payload?: string;
/**
* Name for the target deployment environment (e.g., `production`, `staging`, `qa`).
*/
environment?: string;
/**
* Short description of the deployment.
*/
description?: string | null;
/**
* Specifies if the given environment is specific to the deployment and will no longer exist at some point in the future. Default: `false`
* **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type.
*/
transient_environment?: boolean;
/**
* Specifies if the given environment is one that end-users directly interact with. Default: `true` when `environment` is `production` and `false` otherwise.
* **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type.
*/
production_environment?: boolean;
created_at?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["deployment"];
};
/**
* Merged branch response
*/
"202": {
"application/json": { message?: string };
};
/**
* response
*/
"409": {
"application/json": { message?: string; documentation_url?: string };
};
"422": unknown;
};
};
"repos/get-deployment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
deployment_id: components["parameters"]["deployment_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["deployment"];
};
"404": unknown;
};
};
/**
* To ensure there can always be an active deployment, you can only delete an _inactive_ deployment. Anyone with `repo` or `repo_deployment` scopes can delete an inactive deployment.
*
* To set a deployment as inactive, you must:
*
* * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment.
* * Mark the active deployment as inactive by adding any non-successful deployment status.
*
* For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/deployments/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)."
*/
"repos/delete-deployment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
deployment_id: components["parameters"]["deployment_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
"422": unknown;
};
};
/**
* Users with pull access can view deployment statuses for a deployment:
*/
"repos/list-deployment-statuses": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
deployment_id: components["parameters"]["deployment_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["deployment-status"][];
};
"404": unknown;
};
};
/**
* Users with `push` access can create deployment statuses for a given deployment.
*
* GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope.
*/
"repos/create-deployment-status": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
deployment_id: components["parameters"]["deployment_id"];
};
};
requestBody: {
"application/json": {
/**
* The state of the status. Can be one of `error`, `failure`, `inactive`, `in_progress`, `queued` `pending`, or `success`. **Note:** To use the `inactive` state, you must provide the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. To use the `in_progress` and `queued` states, you must provide the [`application/vnd.github.flash-preview+json`](https://docs.github.com/rest/overview/api-previews#deployment-statuses) custom media type. When you set a transient deployment to `inactive`, the deployment will be shown as `destroyed` in GitHub.
*/
state:
| "error"
| "failure"
| "inactive"
| "in_progress"
| "queued"
| "pending"
| "success";
/**
* The target URL to associate with this status. This URL should contain output to keep the user updated while the task is running or serve as historical information for what happened in the deployment. **Note:** It's recommended to use the `log_url` parameter, which replaces `target_url`.
*/
target_url?: string;
/**
* The full URL of the deployment's output. This parameter replaces `target_url`. We will continue to accept `target_url` to support legacy uses, but we recommend replacing `target_url` with `log_url`. Setting `log_url` will automatically set `target_url` to the same value. Default: `""`
* **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type.
*/
log_url?: string;
/**
* A short description of the status. The maximum description length is 140 characters.
*/
description?: string;
/**
* Name for the target deployment environment, which can be changed when setting a deploy status. For example, `production`, `staging`, or `qa`. **Note:** This parameter requires you to use the [`application/vnd.github.flash-preview+json`](https://docs.github.com/rest/overview/api-previews#deployment-statuses) custom media type.
*/
environment?: "production" | "staging" | "qa";
/**
* Sets the URL for accessing your environment. Default: `""`
* **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type.
*/
environment_url?: string;
/**
* Adds a new `inactive` status to all prior non-transient, non-production environment deployments with the same repository and `environment` name as the created status's deployment. An `inactive` status is only added to deployments that had a `success` state. Default: `true`
* **Note:** To add an `inactive` status to `production` environments, you must use the [`application/vnd.github.flash-preview+json`](https://docs.github.com/rest/overview/api-previews#deployment-statuses) custom media type.
* **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type.
*/
auto_inactive?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["deployment-status"];
};
"422": unknown;
};
};
/**
* Users with pull access can view a deployment status for a deployment:
*/
"repos/get-deployment-status": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
deployment_id: components["parameters"]["deployment_id"];
status_id: number;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["deployment-status"];
};
"404": unknown;
"415": unknown;
};
};
/**
* You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)."
*
* The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow.
*
* To give you write access to the repository, you must use a personal access token with the `repo` scope. For more information, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation.
*
* This input example shows how you can use the `client_payload` as a test to debug your workflow.
*/
"repos/create-dispatch-event": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* A custom webhook event name.
*/
event_type: string;
/**
* JSON payload with extra information about the webhook event that your action or worklow may use.
*/
client_payload?: { [key: string]: any };
};
};
responses: {
/**
* Empty response
*/
"204": never;
"422": unknown;
};
};
"activity/list-repo-events": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
};
};
"repos/list-forks": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* The sort order. Can be either `newest`, `oldest`, or `stargazers`.
*/
sort?: "newest" | "oldest" | "stargazers";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
"400": unknown;
};
};
/**
* Create a fork for the authenticated user.
*
* **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://github.com/contact) or [GitHub Premium Support](https://premium.githubsupport.com).
*/
"repos/create-fork": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* Optional parameter to specify the organization name if forking into an organization.
*/
organization?: string;
};
};
responses: {
/**
* response
*/
"202": {
"application/json": components["schemas"]["repository"];
};
"400": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
"git/create-blob": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The new blob's content.
*/
content: string;
/**
* The encoding used for `content`. Currently, `"utf-8"` and `"base64"` are supported.
*/
encoding?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["short-blob"];
};
"403": unknown;
"404": unknown;
"409": unknown;
"422": unknown;
};
};
/**
* The `content` in the response will always be Base64 encoded.
*
* _Note_: This API supports blobs up to 100 megabytes in size.
*/
"git/get-blob": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
file_sha: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["blob"];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects).
*
* **Signature verification object**
*
* The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object:
*
* | Name | Type | Description |
* | ---- | ---- | ----------- |
* | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. |
* | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. |
* | `signature` | `string` | The signature that was extracted from the commit. |
* | `payload` | `string` | The value that was signed. |
*
* These are the possible values for `reason` in the `verification` object:
*
* | Value | Description |
* | ----- | ----------- |
* | `expired_key` | The key that made the signature is expired. |
* | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. |
* | `gpgverify_error` | There was an error communicating with the signature verification service. |
* | `gpgverify_unavailable` | The signature verification service is currently unavailable. |
* | `unsigned` | The object does not include a signature. |
* | `unknown_signature_type` | A non-PGP signature was found in the commit. |
* | `no_user` | No user was associated with the `committer` email address in the commit. |
* | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. |
* | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. |
* | `unknown_key` | The key that made the signature has not been registered with any user's account. |
* | `malformed_signature` | There was an error parsing the signature. |
* | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. |
* | `valid` | None of the above errors applied, so the signature is considered to be verified. |
*/
"git/create-commit": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The commit message
*/
message: string;
/**
* The SHA of the tree object this commit points to
*/
tree: string;
/**
* The SHAs of the commits that were the parents of this commit. If omitted or empty, the commit will be written as a root commit. For a single parent, an array of one SHA should be provided; for a merge commit, an array of more than one should be provided.
*/
parents?: string[];
/**
* Information about the author of the commit. By default, the `author` will be the authenticated user and the current date. See the `author` and `committer` object below for details.
*/
author?: {
/**
* The name of the author (or committer) of the commit
*/
name?: string;
/**
* The email of the author (or committer) of the commit
*/
email?: string;
/**
* Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
date?: string;
};
/**
* Information about the person who is making the commit. By default, `committer` will use the information set in `author`. See the `author` and `committer` object below for details.
*/
committer?: {
/**
* The name of the author (or committer) of the commit
*/
name?: string;
/**
* The email of the author (or committer) of the commit
*/
email?: string;
/**
* Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
date?: string;
};
/**
* The [PGP signature](https://en.wikipedia.org/wiki/Pretty_Good_Privacy) of the commit. GitHub adds the signature to the `gpgsig` header of the created commit. For a commit signature to be verifiable by Git or GitHub, it must be an ASCII-armored detached PGP signature over the string commit as it would be written to the object database. To pass a `signature` parameter, you need to first manually create a valid PGP signature, which can be complicated. You may find it easier to [use the command line](https://git-scm.com/book/id/v2/Git-Tools-Signing-Your-Work) to create signed commits.
*/
signature?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["git-commit"];
};
"404": unknown;
"422": unknown;
};
};
/**
* Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects).
*
* **Signature verification object**
*
* The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object:
*
* | Name | Type | Description |
* | ---- | ---- | ----------- |
* | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. |
* | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. |
* | `signature` | `string` | The signature that was extracted from the commit. |
* | `payload` | `string` | The value that was signed. |
*
* These are the possible values for `reason` in the `verification` object:
*
* | Value | Description |
* | ----- | ----------- |
* | `expired_key` | The key that made the signature is expired. |
* | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. |
* | `gpgverify_error` | There was an error communicating with the signature verification service. |
* | `gpgverify_unavailable` | The signature verification service is currently unavailable. |
* | `unsigned` | The object does not include a signature. |
* | `unknown_signature_type` | A non-PGP signature was found in the commit. |
* | `no_user` | No user was associated with the `committer` email address in the commit. |
* | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. |
* | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. |
* | `unknown_key` | The key that made the signature has not been registered with any user's account. |
* | `malformed_signature` | There was an error parsing the signature. |
* | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. |
* | `valid` | None of the above errors applied, so the signature is considered to be verified. |
*/
"git/get-commit": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
commit_sha: components["parameters"]["commit_sha"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["git-commit"];
};
"404": unknown;
};
};
/**
* Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/<branch name>` for branches and `tags/<tag name>` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array.
*
* When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`.
*
* **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)".
*
* If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`.
*/
"git/list-matching-refs": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["git-ref"][];
};
};
};
/**
* Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/<branch name>` for branches and `tags/<tag name>` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned.
*
* **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)".
*/
"git/get-ref": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["git-ref"];
};
"404": unknown;
};
};
/**
* Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches.
*/
"git/create-ref": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The name of the fully qualified reference (ie: `refs/heads/master`). If it doesn't start with 'refs' and have at least two slashes, it will be rejected.
*/
ref: string;
/**
* The SHA1 value for this reference.
*/
sha: string;
key?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["git-ref"];
};
"422": unknown;
};
};
"git/update-ref": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
};
requestBody: {
"application/json": {
/**
* The SHA1 value to set this reference to
*/
sha: string;
/**
* Indicates whether to force the update or to make sure the update is a fast-forward update. Leaving this out or setting it to `false` will make sure you're not overwriting work.
*/
force?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["git-ref"];
};
"422": unknown;
};
};
"git/delete-ref": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* ref+ parameter
*/
ref: string;
};
};
responses: {
/**
* Empty response
*/
"204": never;
"422": unknown;
};
};
/**
* Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary.
*
* **Signature verification object**
*
* The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object:
*
* | Name | Type | Description |
* | ---- | ---- | ----------- |
* | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. |
* | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. |
* | `signature` | `string` | The signature that was extracted from the commit. |
* | `payload` | `string` | The value that was signed. |
*
* These are the possible values for `reason` in the `verification` object:
*
* | Value | Description |
* | ----- | ----------- |
* | `expired_key` | The key that made the signature is expired. |
* | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. |
* | `gpgverify_error` | There was an error communicating with the signature verification service. |
* | `gpgverify_unavailable` | The signature verification service is currently unavailable. |
* | `unsigned` | The object does not include a signature. |
* | `unknown_signature_type` | A non-PGP signature was found in the commit. |
* | `no_user` | No user was associated with the `committer` email address in the commit. |
* | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. |
* | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. |
* | `unknown_key` | The key that made the signature has not been registered with any user's account. |
* | `malformed_signature` | There was an error parsing the signature. |
* | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. |
* | `valid` | None of the above errors applied, so the signature is considered to be verified. |
*/
"git/create-tag": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The tag's name. This is typically a version (e.g., "v0.0.1").
*/
tag: string;
/**
* The tag message.
*/
message: string;
/**
* The SHA of the git object this is tagging.
*/
object: string;
/**
* The type of the object we're tagging. Normally this is a `commit` but it can also be a `tree` or a `blob`.
*/
type: "commit" | "tree" | "blob";
/**
* An object with information about the individual creating the tag.
*/
tagger?: {
/**
* The name of the author of the tag
*/
name?: string;
/**
* The email of the author of the tag
*/
email?: string;
/**
* When this object was tagged. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
date?: string;
};
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["git-tag"];
};
"422": unknown;
};
};
/**
* **Signature verification object**
*
* The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object:
*
* | Name | Type | Description |
* | ---- | ---- | ----------- |
* | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. |
* | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. |
* | `signature` | `string` | The signature that was extracted from the commit. |
* | `payload` | `string` | The value that was signed. |
*
* These are the possible values for `reason` in the `verification` object:
*
* | Value | Description |
* | ----- | ----------- |
* | `expired_key` | The key that made the signature is expired. |
* | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. |
* | `gpgverify_error` | There was an error communicating with the signature verification service. |
* | `gpgverify_unavailable` | The signature verification service is currently unavailable. |
* | `unsigned` | The object does not include a signature. |
* | `unknown_signature_type` | A non-PGP signature was found in the commit. |
* | `no_user` | No user was associated with the `committer` email address in the commit. |
* | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. |
* | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. |
* | `unknown_key` | The key that made the signature has not been registered with any user's account. |
* | `malformed_signature` | There was an error parsing the signature. |
* | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. |
* | `valid` | None of the above errors applied, so the signature is considered to be verified. |
*/
"git/get-tag": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
tag_sha: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["git-tag"];
};
"404": unknown;
};
};
/**
* The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure.
*
* If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)."
*/
"git/create-tree": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* Objects (of `path`, `mode`, `type`, and `sha`) specifying a tree structure.
*/
tree: {
/**
* The file referenced in the tree.
*/
path?: string;
/**
* The file mode; one of `100644` for file (blob), `100755` for executable (blob), `040000` for subdirectory (tree), `160000` for submodule (commit), or `120000` for a blob that specifies the path of a symlink.
*/
mode?: "100644" | "100755" | "040000" | "160000" | "120000";
/**
* Either `blob`, `tree`, or `commit`.
*/
type?: "blob" | "tree" | "commit";
/**
* The SHA1 checksum ID of the object in the tree. Also called `tree.sha`. If the value is `null` then the file will be deleted.
*
* **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error.
*/
sha?: string | null;
/**
* The content you want this file to have. GitHub will write this blob out and use that SHA for this entry. Use either this, or `tree.sha`.
*
* **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error.
*/
content?: string;
}[];
/**
* The SHA1 of the tree you want to update with new data. If you don't set this, the commit will be created on top of everything; however, it will only contain your change, the rest of your files will show up as deleted.
*/
base_tree?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["git-tree"];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Returns a single tree using the SHA1 value for that tree.
*
* If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time.
*/
"git/get-tree": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
tree_sha: string;
};
query: {
/**
* Setting this parameter to any value returns the objects or subtrees referenced by the tree specified in `:tree_sha`. For example, setting `recursive` to any of the following will enable returning objects or subtrees: `0`, `1`, `"true"`, and `"false"`. Omit this parameter to prevent recursively returning objects or subtrees.
*/
recursive?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["git-tree"];
};
"404": unknown;
"422": unknown;
};
};
"repos/list-webhooks": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["hook"][];
};
"404": unknown;
};
};
/**
* Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can
* share the same `config` as long as those webhooks do not have any `events` that overlap.
*/
"repos/create-webhook": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* Use `web` to create a webhook. Default: `web`. This parameter only accepts the value `web`.
*/
name?: string;
/**
* Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params).
*/
config: {
url: components["schemas"]["webhook-config-url"];
content_type?: components["schemas"]["webhook-config-content-type"];
secret?: components["schemas"]["webhook-config-secret"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
token?: string;
digest?: string;
};
/**
* Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for.
*/
events?: string[];
/**
* Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications.
*/
active?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["hook"];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)."
*/
"repos/get-webhook": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["hook"];
};
"404": unknown;
};
};
/**
* Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)."
*/
"repos/update-webhook": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
hook_id: components["parameters"]["hook-id"];
};
};
requestBody: {
"application/json": {
/**
* Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params).
*/
config?: {
url: components["schemas"]["webhook-config-url"];
content_type?: components["schemas"]["webhook-config-content-type"];
secret?: components["schemas"]["webhook-config-secret"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
address?: string;
room?: string;
};
/**
* Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. This replaces the entire array of events.
*/
events?: string[];
/**
* Determines a list of events to be added to the list of events that the Hook triggers for.
*/
add_events?: string[];
/**
* Determines a list of events to be removed from the list of events that the Hook triggers for.
*/
remove_events?: string[];
/**
* Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications.
*/
active?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["hook"];
};
"404": unknown;
"422": unknown;
};
};
"repos/delete-webhook": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)."
*
* Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission.
*/
"repos/get-webhook-config-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* Default response
*/
"200": {
"application/json": components["schemas"]["webhook-config"];
};
};
};
/**
* Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)."
*
* Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission.
*/
"repos/update-webhook-config-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
hook_id: components["parameters"]["hook-id"];
};
};
requestBody: {
"application/json": {
url?: components["schemas"]["webhook-config-url"];
content_type?: components["schemas"]["webhook-config-content-type"];
secret?: components["schemas"]["webhook-config-secret"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
};
};
responses: {
/**
* Default response
*/
"200": {
"application/json": components["schemas"]["webhook-config"];
};
};
};
/**
* This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook.
*/
"repos/ping-webhook": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated.
*
* **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test`
*/
"repos/test-push-webhook": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
hook_id: components["parameters"]["hook-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* View the progress of an import.
*
* **Import status**
*
* This section includes details about the possible values of the `status` field of the Import Progress response.
*
* An import that does not have errors will progress through these steps:
*
* * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL.
* * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import).
* * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information.
* * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects".
* * `complete` - the import is complete, and the repository is ready on GitHub.
*
* If there are problems, you will see one of these in the `status` field:
*
* * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section.
* * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://github.com/contact) or [GitHub Premium Support](https://premium.githubsupport.com) for more information.
* * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section.
* * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL.
* * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section.
*
* **The project_choices field**
*
* When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type.
*
* **Git LFS related fields**
*
* This section includes details about Git LFS related fields that may be present in the Import Progress response.
*
* * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken.
* * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step.
* * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository.
* * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request.
*/
"migrations/get-import-status": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["import"];
};
"404": unknown;
};
};
/**
* Start a source import to a GitHub repository using GitHub Importer.
*/
"migrations/start-import": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The URL of the originating repository.
*/
vcs_url: string;
/**
* The originating VCS type. Can be one of `subversion`, `git`, `mercurial`, or `tfvc`. Please be aware that without this parameter, the import job will take additional time to detect the VCS type before beginning the import. This detection step will be reflected in the response.
*/
vcs?: "subversion" | "git" | "mercurial" | "tfvc";
/**
* If authentication is required, the username to provide to `vcs_url`.
*/
vcs_username?: string;
/**
* If authentication is required, the password to provide to `vcs_url`.
*/
vcs_password?: string;
/**
* For a tfvc import, the name of the project that is being imported.
*/
tfvc_project?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["import"];
};
"404": unknown;
"422": unknown;
};
};
/**
* An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API
* request. If no parameters are provided, the import will be restarted.
*/
"migrations/update-import": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The username to provide to the originating repository.
*/
vcs_username?: string;
/**
* The password to provide to the originating repository.
*/
vcs_password?: string;
vcs?: string;
tfvc_project?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["import"];
};
};
};
/**
* Stop an import for a repository.
*/
"migrations/cancel-import": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot <hubot@12341234-abab-fefe-8787-fedcba987654>`.
*
* This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information.
*/
"migrations/get-commit-authors": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
since?: components["parameters"]["since-user"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["porter-author"][];
};
"404": unknown;
};
};
/**
* Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository.
*/
"migrations/map-commit-author": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
author_id: number;
};
};
requestBody: {
"application/json": {
/**
* The new Git author email.
*/
email?: string;
/**
* The new Git author name.
*/
name?: string;
remote_id?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["porter-author"];
};
"404": unknown;
"422": unknown;
};
};
/**
* List files larger than 100MB found during the import
*/
"migrations/get-large-files": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["porter-large-file"][];
};
};
};
/**
* You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://help.github.com/articles/versioning-large-files/).
*/
"migrations/set-lfs-preference": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* Can be one of `opt_in` (large files will be stored using Git LFS) or `opt_out` (large files will be removed during the import).
*/
use_lfs: "opt_in" | "opt_out";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["import"];
};
"422": unknown;
};
};
/**
* Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/get-repo-installation": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["installation"];
};
"301": never;
"404": unknown;
};
};
/**
* Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response.
*/
"interactions/get-restrictions-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["interaction-limit-response"];
};
};
};
/**
* Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository.
*/
"interactions/set-restrictions-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": components["schemas"]["interaction-limit"];
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["interaction-limit-response"];
};
/**
* Conflict
*/
"409": unknown;
};
};
/**
* Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository.
*/
"interactions/remove-restrictions-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
/**
* Conflict
*/
"409": unknown;
};
};
/**
* When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations.
*/
"repos/list-invitations": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["repository-invitation"][];
};
};
};
"repos/update-invitation": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
invitation_id: components["parameters"]["invitation_id"];
};
};
requestBody: {
"application/json": {
/**
* The permissions that the associated user will have on the repository. Valid values are `read`, `write`, `maintain`, `triage`, and `admin`.
*/
permissions?: "read" | "write" | "maintain" | "triage" | "admin";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["repository-invitation"];
};
};
};
"repos/delete-invitation": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
invitation_id: components["parameters"]["invitation_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List issues in a repository.
*
* **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this
* reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by
* the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull
* request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint.
*/
"issues/list-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* If an `integer` is passed, it should refer to a milestone by its `number` field. If the string `*` is passed, issues with any milestone are accepted. If the string `none` is passed, issues without milestones are returned.
*/
milestone?: string;
/**
* Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`.
*/
state?: "open" | "closed" | "all";
/**
* Can be the name of a user. Pass in `none` for issues with no assigned user, and `*` for issues assigned to any user.
*/
assignee?: string;
/**
* The user that created the issue.
*/
creator?: string;
/**
* A user that's mentioned in the issue.
*/
mentioned?: string;
labels?: components["parameters"]["labels"];
/**
* What to sort results by. Can be either `created`, `updated`, `comments`.
*/
sort?: "created" | "updated" | "comments";
direction?: components["parameters"]["direction"];
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-simple"][];
};
"301": never;
"404": unknown;
"422": unknown;
};
};
/**
* Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://help.github.com/articles/disabling-issues/), the API returns a `410 Gone` status.
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details.
*/
"issues/create": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The title of the issue.
*/
title: string;
/**
* The contents of the issue.
*/
body?: string;
/**
* Login for the user that this issue should be assigned to. _NOTE: Only users with push access can set the assignee for new issues. The assignee is silently dropped otherwise. **This field is deprecated.**_
*/
assignee?: string | null;
/**
* The `number` of the milestone to associate this issue with. _NOTE: Only users with push access can set the milestone for new issues. The milestone is silently dropped otherwise._
*/
milestone?: number | null;
/**
* Labels to associate with this issue. _NOTE: Only users with push access can set labels for new issues. Labels are silently dropped otherwise._
*/
labels?: (
| string
| { id?: number; name?: string; description?: string; color?: string }
)[];
/**
* Logins for Users to assign to this issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._
*/
assignees?: string[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["issue"];
};
"403": unknown;
"404": unknown;
"410": unknown;
"422": unknown;
"503": unknown;
};
};
/**
* By default, Issue Comments are ordered by ascending ID.
*/
"issues/list-comments-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
sort?: components["parameters"]["sort"];
/**
* Either `asc` or `desc`. Ignored without the `sort` parameter.
*/
direction?: "asc" | "desc";
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-comment"][];
};
"404": unknown;
"422": unknown;
};
};
"issues/get-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-comment"];
};
"404": unknown;
};
};
"issues/update-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
requestBody: {
"application/json": {
/**
* The contents of the comment.
*/
body: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-comment"];
};
"422": unknown;
};
};
"issues/delete-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments).
*/
"reactions/list-for-issue-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
query: {
/**
* Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue comment.
*/
content?:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["reaction"][];
};
"404": unknown;
"415": unknown;
};
};
/**
* Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this issue comment.
*/
"reactions/create-for-issue-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
requestBody: {
"application/json": {
/**
* The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue comment.
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
};
};
responses: {
/**
* Reaction exists
*/
"200": {
"application/json": components["schemas"]["reaction"];
};
/**
* Reaction created
*/
"201": {
"application/json": components["schemas"]["reaction"];
};
"415": unknown;
"422": unknown;
};
};
/**
* **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`.
*
* Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments).
*/
"reactions/delete-for-issue-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
reaction_id: components["parameters"]["reaction-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
"issues/list-events-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-event"][];
};
"422": unknown;
};
};
"issues/get-event": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
event_id: number;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-event"];
};
"403": unknown;
"404": unknown;
"410": unknown;
};
};
/**
* The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was
* [transferred](https://help.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If
* the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API
* returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read
* access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe
* to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook.
*
* **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this
* reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by
* the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull
* request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint.
*/
"issues/get": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue"];
};
"301": never;
"304": never;
"404": unknown;
"410": unknown;
};
};
/**
* Issue owners and users with push access can edit an issue.
*/
"issues/update": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
requestBody: {
"application/json": {
/**
* The title of the issue.
*/
title?: string;
/**
* The contents of the issue.
*/
body?: string;
/**
* Login for the user that this issue should be assigned to. **This field is deprecated.**
*/
assignee?: string;
/**
* State of the issue. Either `open` or `closed`.
*/
state?: "open" | "closed";
/**
* The `number` of the milestone to associate this issue with or `null` to remove current. _NOTE: Only users with push access can set the milestone for issues. The milestone is silently dropped otherwise._
*/
milestone?: number | null;
/**
* Labels to associate with this issue. Pass one or more Labels to _replace_ the set of Labels on this Issue. Send an empty array (`[]`) to clear all Labels from the Issue. _NOTE: Only users with push access can set labels for issues. Labels are silently dropped otherwise._
*/
labels?: (
| string
| { id?: number; name?: string; description?: string; color?: string }
)[];
/**
* Logins for Users to assign to this issue. Pass one or more user logins to _replace_ the set of assignees on this Issue. Send an empty array (`[]`) to clear all assignees from the Issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._
*/
assignees?: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue"];
};
"301": never;
"403": unknown;
"404": unknown;
"410": unknown;
"422": unknown;
"503": unknown;
};
};
/**
* Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced.
*/
"issues/add-assignees": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
requestBody: {
"application/json": {
/**
* Usernames of people to assign this issue to. _NOTE: Only users with push access can add assignees to an issue. Assignees are silently ignored otherwise._
*/
assignees?: string[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["issue-simple"];
};
};
};
/**
* Removes one or more assignees from an issue.
*/
"issues/remove-assignees": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
requestBody: {
"application/json": {
/**
* Usernames of assignees to remove from an issue. _NOTE: Only users with push access can remove assignees from an issue. Assignees are silently ignored otherwise._
*/
assignees?: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-simple"];
};
};
};
/**
* Issue Comments are ordered by ascending ID.
*/
"issues/list-comments": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
query: {
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-comment"][];
};
"404": unknown;
"410": unknown;
};
};
/**
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details.
*/
"issues/create-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
requestBody: {
"application/json": {
/**
* The contents of the comment.
*/
body: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["issue-comment"];
};
"403": unknown;
"404": unknown;
"410": unknown;
"422": unknown;
};
};
"issues/list-events": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-event-for-issue"][];
};
"410": unknown;
};
};
"issues/list-labels-on-issue": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["label"][];
};
"410": unknown;
};
};
"issues/add-labels": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
requestBody: {
"application/json": {
/**
* The name of the label to add to the issue. Must contain at least one label. **Note:** Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key.
*/
labels: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["label"][];
};
"410": unknown;
"422": unknown;
};
};
/**
* Removes any previous labels and sets the new labels for an issue.
*/
"issues/set-labels": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
requestBody: {
"application/json": {
/**
* The names of the labels to add to the issue. You can pass an empty array to remove all labels. **Note:** Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key.
*/
labels?: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["label"][];
};
"410": unknown;
"422": unknown;
};
};
"issues/remove-all-labels": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"410": unknown;
};
};
/**
* Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist.
*/
"issues/remove-label": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
name: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["label"][];
};
"404": unknown;
"410": unknown;
};
};
/**
* Users with push access can lock an issue or pull request's conversation.
*
* Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*/
"issues/lock": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
requestBody: {
"application/json": {
/**
* The reason for locking the issue or pull request conversation. Lock will fail if you don't use one of these reasons:
* \* `off-topic`
* \* `too heated`
* \* `resolved`
* \* `spam`
*/
lock_reason?: "off-topic" | "too heated" | "resolved" | "spam";
};
};
responses: {
/**
* Empty response
*/
"204": never;
"403": unknown;
"404": unknown;
"410": unknown;
"422": unknown;
};
};
/**
* Users with push access can unlock an issue's conversation.
*/
"issues/unlock": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"403": unknown;
"404": unknown;
};
};
/**
* List the reactions to an [issue](https://docs.github.com/rest/reference/issues).
*/
"reactions/list-for-issue": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
query: {
/**
* Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue.
*/
content?:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["reaction"][];
};
"404": unknown;
"410": unknown;
"415": unknown;
};
};
/**
* Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with a `Status: 200 OK` means that you already added the reaction type to this issue.
*/
"reactions/create-for-issue": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
};
requestBody: {
"application/json": {
/**
* The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue.
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["reaction"];
};
"415": unknown;
"422": unknown;
};
};
/**
* **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`.
*
* Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/).
*/
"reactions/delete-for-issue": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
reaction_id: components["parameters"]["reaction-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
"issues/list-events-for-timeline": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
issue_number: components["parameters"]["issue_number"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue-event-for-issue"][];
};
"404": unknown;
"410": unknown;
"415": unknown;
};
};
"repos/list-deploy-keys": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["deploy-key"][];
};
};
};
/**
* You can create a read-only deploy key.
*/
"repos/create-deploy-key": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* A name for the key.
*/
title?: string;
/**
* The contents of the key.
*/
key: string;
/**
* If `true`, the key will only be able to read repository contents. Otherwise, the key will be able to read and write.
*
* Deploy keys with write access can perform the same actions as an organization member with admin access, or a collaborator on a personal repository. For more information, see "[Repository permission levels for an organization](https://help.github.com/articles/repository-permission-levels-for-an-organization/)" and "[Permission levels for a user account repository](https://help.github.com/articles/permission-levels-for-a-user-account-repository/)."
*/
read_only?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["deploy-key"];
};
"422": unknown;
};
};
"repos/get-deploy-key": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
key_id: components["parameters"]["key_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["deploy-key"];
};
"404": unknown;
};
};
/**
* Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead.
*/
"repos/delete-deploy-key": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
key_id: components["parameters"]["key_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
"issues/list-labels-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["label"][];
};
"404": unknown;
};
};
"issues/create-label": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji . For a full list of available emoji and codes, see [emoji-cheat-sheet.com](http://emoji-cheat-sheet.com/).
*/
name: string;
/**
* The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`.
*/
color?: string;
/**
* A short description of the label.
*/
description?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["label"];
};
"404": unknown;
"422": unknown;
};
};
"issues/get-label": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
name: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["label"];
};
"404": unknown;
};
};
"issues/update-label": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
name: string;
};
};
requestBody: {
"application/json": {
/**
* The new name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji . For a full list of available emoji and codes, see [emoji-cheat-sheet.com](http://emoji-cheat-sheet.com/).
*/
new_name?: string;
/**
* The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`.
*/
color?: string;
/**
* A short description of the label.
*/
description?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["label"];
};
};
};
"issues/delete-label": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
name: string;
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language.
*/
"repos/list-languages": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["language"];
};
};
};
/**
* This method returns the contents of the repository's license file, if one is detected.
*
* Similar to [Get repository content](https://docs.github.com/rest/reference/repos/contents#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML.
*/
"licenses/get-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["license-content"];
};
};
};
"repos/merge": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The name of the base branch that the head will be merged into.
*/
base: string;
/**
* The head to merge. This can be a branch name or a commit SHA1.
*/
head: string;
/**
* Commit message to use for the merge commit. If omitted, a default message will be used.
*/
commit_message?: string;
};
};
responses: {
/**
* Successful Response (The resulting merge commit)
*/
"201": {
"application/json": components["schemas"]["commit"];
};
"403": unknown;
/**
* response
*/
"404": {
"application/json": { message?: string; documentation_url?: string };
};
/**
* Merge conflict response
*/
"409": {
"application/json": { message?: string; documentation_url?: string };
};
"422": unknown;
};
};
"issues/list-milestones": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* The state of the milestone. Either `open`, `closed`, or `all`.
*/
state?: "open" | "closed" | "all";
/**
* What to sort results by. Either `due_on` or `completeness`.
*/
sort?: "due_on" | "completeness";
/**
* The direction of the sort. Either `asc` or `desc`.
*/
direction?: "asc" | "desc";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["milestone"][];
};
"404": unknown;
};
};
"issues/create-milestone": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The title of the milestone.
*/
title: string;
/**
* The state of the milestone. Either `open` or `closed`.
*/
state?: "open" | "closed";
/**
* A description of the milestone.
*/
description?: string;
/**
* The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
due_on?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["milestone"];
};
"404": unknown;
"422": unknown;
};
};
"issues/get-milestone": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
milestone_number: components["parameters"]["milestone_number"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["milestone"];
};
"404": unknown;
};
};
"issues/update-milestone": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
milestone_number: components["parameters"]["milestone_number"];
};
};
requestBody: {
"application/json": {
/**
* The title of the milestone.
*/
title?: string;
/**
* The state of the milestone. Either `open` or `closed`.
*/
state?: "open" | "closed";
/**
* A description of the milestone.
*/
description?: string;
/**
* The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
due_on?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["milestone"];
};
};
};
"issues/delete-milestone": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
milestone_number: components["parameters"]["milestone_number"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
"issues/list-labels-for-milestone": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
milestone_number: components["parameters"]["milestone_number"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["label"][];
};
};
};
/**
* List all notifications for the current user.
*/
"activity/list-repo-notifications-for-authenticated-user": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
all?: components["parameters"]["all"];
participating?: components["parameters"]["participating"];
since?: components["parameters"]["since"];
before?: components["parameters"]["before"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["thread"][];
};
};
};
/**
* Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`.
*/
"activity/mark-repo-notifications-as-read": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* Describes the last point that notifications were checked. Anything updated since this time will not be marked as read. If you omit this parameter, all notifications are marked as read. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. Default: The current timestamp.
*/
last_read_at?: string;
};
};
responses: {
/**
* response
*/
"202": unknown;
};
};
"repos/get-pages": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["page"];
};
"404": unknown;
};
};
/**
* Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)."
*/
"repos/create-pages-site": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The source branch and directory used to publish your Pages site.
*/
source: {
/**
* The repository branch used to publish your site's source files.
*/
branch: string;
/**
* The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. Default: `/`
*/
path?: "/" | "/docs";
};
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["page"];
};
"409": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages).
*/
"repos/update-information-about-pages-site": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* Specify a custom domain for the repository. Sending a `null` value will remove the custom domain. For more about custom domains, see "[Using a custom domain with GitHub Pages](https://help.github.com/articles/using-a-custom-domain-with-github-pages/)."
*/
cname?: string | null;
source: Partial<"gh-pages" | "master" | "master /docs"> &
Partial<{
/**
* The repository branch used to publish your site's source files.
*/
branch: string;
/**
* The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`.
*/
path: "/" | "/docs";
}>;
};
};
responses: {
/**
* Empty response
*/
"204": never;
"400": unknown;
"422": unknown;
};
};
"repos/delete-pages-site": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
"415": unknown;
"422": unknown;
};
};
"repos/list-pages-builds": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["page-build"][];
};
};
};
/**
* You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures.
*
* Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes.
*/
"repos/request-pages-build": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["page-build-status"];
};
};
};
"repos/get-latest-pages-build": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["page-build"];
};
};
};
"repos/get-pages-build": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
build_id: number;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["page-build"];
};
};
};
/**
* Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned.
*/
"projects/list-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`.
*/
state?: "open" | "closed" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project"][];
};
"401": unknown;
"403": unknown;
"404": unknown;
"410": unknown;
"422": unknown;
};
};
/**
* Creates a repository project board. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned.
*/
"projects/create-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The name of the project.
*/
name: string;
/**
* The description of the project.
*/
body?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["project"];
};
"401": unknown;
"403": unknown;
"404": unknown;
"410": unknown;
"422": unknown;
};
};
/**
* Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*/
"pulls/list": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* Either `open`, `closed`, or `all` to filter by state.
*/
state?: "open" | "closed" | "all";
/**
* Filter pulls by head user or head organization and branch name in the format of `user:ref-name` or `organization:ref-name`. For example: `github:new-script-format` or `octocat:test-branch`.
*/
head?: string;
/**
* Filter pulls by base branch name. Example: `gh-pages`.
*/
base?: string;
/**
* What to sort results by. Can be either `created`, `updated`, `popularity` (comment count) or `long-running` (age, filtering by pulls updated in the last month).
*/
sort?: "created" | "updated" | "popularity" | "long-running";
/**
* The direction of the sort. Can be either `asc` or `desc`. Default: `desc` when sort is `created` or sort is not specified, otherwise `asc`.
*/
direction?: "asc" | "desc";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-simple"][];
};
"304": never;
"422": unknown;
};
};
/**
* Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request.
*
* You can create a new pull request.
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*/
"pulls/create": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The title of the new pull request.
*/
title?: string;
/**
* The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace `head` with a user like this: `username:branch`.
*/
head: string;
/**
* The name of the branch you want the changes pulled into. This should be an existing branch on the current repository. You cannot submit a pull request to one repository that requests a merge to a base of another repository.
*/
base: string;
/**
* The contents of the pull request.
*/
body?: string;
/**
* Indicates whether [maintainers can modify](https://help.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request.
*/
maintainer_can_modify?: boolean;
/**
* Indicates whether the pull request is a draft. See "[Draft Pull Requests](https://help.github.com/en/articles/about-pull-requests#draft-pull-requests)" in the GitHub Help documentation to learn more.
*/
draft?: boolean;
issue?: number;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["pull-request"];
};
"403": unknown;
"422": unknown;
};
};
/**
* **Note:** Multi-line comments on pull requests are currently in public beta and subject to change.
*
* Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID.
*
* **Multi-line comment summary**
*
* **Note:** New parameters and response fields are available for developers to preview. During the preview period, these response fields may change without advance notice. Please see the [blog post](https://developer.github.com/changes/2019-10-03-multi-line-comments) for full details.
*
* Use the `comfort-fade` preview header and the `line` parameter to show multi-line comment-supported fields in the response.
*
* If you use the `comfort-fade` preview header, your response will show:
*
* * For multi-line comments, values for `start_line`, `original_start_line`, `start_side`, `line`, `original_line`, and `side`.
* * For single-line comments, values for `line`, `original_line`, and `side` and a `null` value for `start_line`, `original_start_line`, and `start_side`.
*
* If you don't use the `comfort-fade` preview header, multi-line and single-line comments will appear the same way in the response with a single `position` attribute. Your response will show:
*
* * For multi-line comments, the last line of the comment range for the `position` attribute.
* * For single-line comments, the diff-positioned way of referencing comments for the `position` attribute. For more information, see `position` in the [input parameters](https://docs.github.com/rest/reference/pulls#parameters-2) table.
*
* The `reactions` key will have the following payload where `url` can be used to construct the API location for [listing and creating](https://docs.github.com/rest/reference/reactions) reactions.
*/
"pulls/list-review-comments-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
sort?: components["parameters"]["sort"];
/**
* Can be either `asc` or `desc`. Ignored without `sort` parameter.
*/
direction?: "asc" | "desc";
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review-comment"][];
};
};
};
/**
* **Note:** Multi-line comments on pull requests are currently in public beta and subject to change.
*
* Provides details for a review comment.
*
* **Multi-line comment summary**
*
* **Note:** New parameters and response fields are available for developers to preview. During the preview period, these response fields may change without advance notice. Please see the [blog post](https://developer.github.com/changes/2019-10-03-multi-line-comments) for full details.
*
* Use the `comfort-fade` preview header and the `line` parameter to show multi-line comment-supported fields in the response.
*
* If you use the `comfort-fade` preview header, your response will show:
*
* * For multi-line comments, values for `start_line`, `original_start_line`, `start_side`, `line`, `original_line`, and `side`.
* * For single-line comments, values for `line`, `original_line`, and `side` and a `null` value for `start_line`, `original_start_line`, and `start_side`.
*
* If you don't use the `comfort-fade` preview header, multi-line and single-line comments will appear the same way in the response with a single `position` attribute. Your response will show:
*
* * For multi-line comments, the last line of the comment range for the `position` attribute.
* * For single-line comments, the diff-positioned way of referencing comments for the `position` attribute. For more information, see `position` in the [input parameters](https://docs.github.com/rest/reference/pulls#parameters-2) table.
*
* The `reactions` key will have the following payload where `url` can be used to construct the API location for [listing and creating](https://docs.github.com/rest/reference/reactions) reactions.
*/
"pulls/get-review-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review-comment"];
};
"404": unknown;
};
};
/**
* **Note:** Multi-line comments on pull requests are currently in public beta and subject to change.
*
* Enables you to edit a review comment.
*
* **Multi-line comment summary**
*
* **Note:** New parameters and response fields are available for developers to preview. During the preview period, these response fields may change without advance notice. Please see the [blog post](https://developer.github.com/changes/2019-10-03-multi-line-comments) for full details.
*
* Use the `comfort-fade` preview header and the `line` parameter to show multi-line comment-supported fields in the response.
*
* If you use the `comfort-fade` preview header, your response will show:
*
* * For multi-line comments, values for `start_line`, `original_start_line`, `start_side`, `line`, `original_line`, and `side`.
* * For single-line comments, values for `line`, `original_line`, and `side` and a `null` value for `start_line`, `original_start_line`, and `start_side`.
*
* If you don't use the `comfort-fade` preview header, multi-line and single-line comments will appear the same way in the response with a single `position` attribute. Your response will show:
*
* * For multi-line comments, the last line of the comment range for the `position` attribute.
* * For single-line comments, the diff-positioned way of referencing comments for the `position` attribute. For more information, see `position` in the [input parameters](https://docs.github.com/rest/reference/pulls#parameters-2) table.
*/
"pulls/update-review-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
requestBody: {
"application/json": {
/**
* The text of the reply to the review comment.
*/
body: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review-comment"];
};
};
};
/**
* Deletes a review comment.
*/
"pulls/delete-review-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
};
};
/**
* List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments).
*/
"reactions/list-for-pull-request-review-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
query: {
/**
* Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a pull request review comment.
*/
content?:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["reaction"][];
};
"404": unknown;
"415": unknown;
};
};
/**
* Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this pull request review comment.
*/
"reactions/create-for-pull-request-review-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
};
};
requestBody: {
"application/json": {
/**
* The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the pull request review comment.
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
};
};
responses: {
/**
* Reaction exists
*/
"200": {
"application/json": components["schemas"]["reaction"];
};
/**
* Reaction created
*/
"201": {
"application/json": components["schemas"]["reaction"];
};
"415": unknown;
"422": unknown;
};
};
/**
* **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.`
*
* Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments).
*/
"reactions/delete-for-pull-request-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
comment_id: components["parameters"]["comment_id"];
reaction_id: components["parameters"]["reaction-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Lists details of a pull request by providing its number.
*
* When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)".
*
* The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit.
*
* The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request:
*
* * If merged as a [merge commit](https://help.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit.
* * If merged via a [squash](https://help.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch.
* * If [rebased](https://help.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to.
*
* Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats.
*/
"pulls/get": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
responses: {
/**
* Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats.
*/
"200": {
"application/json": components["schemas"]["pull-request"];
};
"304": never;
"404": unknown;
"500": unknown;
};
};
/**
* Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request.
*/
"pulls/update": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
requestBody: {
"application/json": {
/**
* The title of the pull request.
*/
title?: string;
/**
* The contents of the pull request.
*/
body?: string;
/**
* State of this Pull Request. Either `open` or `closed`.
*/
state?: "open" | "closed";
/**
* The name of the branch you want your changes pulled into. This should be an existing branch on the current repository. You cannot update the base branch on a pull request to point to another repository.
*/
base?: string;
/**
* Indicates whether [maintainers can modify](https://help.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request.
*/
maintainer_can_modify?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request"];
};
"403": unknown;
"422": unknown;
};
};
/**
* **Note:** Multi-line comments on pull requests are currently in public beta and subject to change.
*
* Lists all review comments for a pull request. By default, review comments are in ascending order by ID.
*
* **Multi-line comment summary**
*
* **Note:** New parameters and response fields are available for developers to preview. During the preview period, these response fields may change without advance notice. Please see the [blog post](https://developer.github.com/changes/2019-10-03-multi-line-comments) for full details.
*
* Use the `comfort-fade` preview header and the `line` parameter to show multi-line comment-supported fields in the response.
*
* If you use the `comfort-fade` preview header, your response will show:
*
* * For multi-line comments, values for `start_line`, `original_start_line`, `start_side`, `line`, `original_line`, and `side`.
* * For single-line comments, values for `line`, `original_line`, and `side` and a `null` value for `start_line`, `original_start_line`, and `start_side`.
*
* If you don't use the `comfort-fade` preview header, multi-line and single-line comments will appear the same way in the response with a single `position` attribute. Your response will show:
*
* * For multi-line comments, the last line of the comment range for the `position` attribute.
* * For single-line comments, the diff-positioned way of referencing comments for the `position` attribute. For more information, see `position` in the [input parameters](https://docs.github.com/rest/reference/pulls#parameters-2) table.
*
* The `reactions` key will have the following payload where `url` can be used to construct the API location for [listing and creating](https://docs.github.com/rest/reference/reactions) reactions.
*/
"pulls/list-review-comments": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
query: {
sort?: components["parameters"]["sort"];
/**
* Can be either `asc` or `desc`. Ignored without `sort` parameter.
*/
direction?: "asc" | "desc";
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review-comment"][];
};
};
};
/**
* **Note:** Multi-line comments on pull requests are currently in public beta and subject to change.
*
* Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff.
*
* You can still create a review comment using the `position` parameter. When you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. For more information, see [Multi-line comment summary](https://docs.github.com/rest/reference/pulls#multi-line-comment-summary-3).
*
* **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file.
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*
* **Multi-line comment summary**
*
* **Note:** New parameters and response fields are available for developers to preview. During the preview period, these response fields may change without advance notice. Please see the [blog post](https://developer.github.com/changes/2019-10-03-multi-line-comments) for full details.
*
* Use the `comfort-fade` preview header and the `line` parameter to show multi-line comment-supported fields in the response.
*
* If you use the `comfort-fade` preview header, your response will show:
*
* * For multi-line comments, values for `start_line`, `original_start_line`, `start_side`, `line`, `original_line`, and `side`.
* * For single-line comments, values for `line`, `original_line`, and `side` and a `null` value for `start_line`, `original_start_line`, and `start_side`.
*
* If you don't use the `comfort-fade` preview header, multi-line and single-line comments will appear the same way in the response with a single `position` attribute. Your response will show:
*
* * For multi-line comments, the last line of the comment range for the `position` attribute.
* * For single-line comments, the diff-positioned way of referencing comments for the `position` attribute. For more information, see `position` in the [input parameters](https://docs.github.com/rest/reference/pulls#parameters-2) table.
*/
"pulls/create-review-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
requestBody: {
"application/json": {
/**
* The text of the review comment.
*/
body: string;
/**
* The SHA of the commit needing a comment. Not using the latest commit SHA may render your comment outdated if a subsequent commit modifies the line you specify as the `position`.
*/
commit_id?: string;
/**
* The relative path to the file that necessitates a comment.
*/
path: string;
/**
* **Required without `comfort-fade` preview**. The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note above.
*/
position?: number;
/**
* **Required with `comfort-fade` preview**. In a split diff view, the side of the diff that the pull request's changes appear on. Can be `LEFT` or `RIGHT`. Use `LEFT` for deletions that appear in red. Use `RIGHT` for additions that appear in green or unchanged lines that appear in white and are shown for context. For a multi-line comment, side represents whether the last line of the comment range is a deletion or addition. For more information, see "[Diff view options](https://help.github.com/en/articles/about-comparing-branches-in-pull-requests#diff-view-options)" in the GitHub Help documentation.
*/
side?: "LEFT" | "RIGHT";
/**
* **Required with `comfort-fade` preview**. The line of the blob in the pull request diff that the comment applies to. For a multi-line comment, the last line of the range that your comment applies to.
*/
line?: number;
/**
* **Required when using multi-line comments**. To create multi-line comments, you must use the `comfort-fade` preview header. The `start_line` is the first line in the pull request diff that your multi-line comment applies to. To learn more about multi-line comments, see "[Commenting on a pull request](https://help.github.com/en/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation.
*/
start_line?: number;
/**
* **Required when using multi-line comments**. To create multi-line comments, you must use the `comfort-fade` preview header. The `start_side` is the starting side of the diff that the comment applies to. Can be `LEFT` or `RIGHT`. To learn more about multi-line comments, see "[Commenting on a pull request](https://help.github.com/en/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. See `side` in this table for additional context.
*/
start_side?: "LEFT" | "RIGHT" | "side";
in_reply_to?: number;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["pull-request-review-comment"];
};
"403": unknown;
"422": unknown;
};
};
/**
* Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported.
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*/
"pulls/create-reply-for-review-comment": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
comment_id: components["parameters"]["comment_id"];
};
};
requestBody: {
"application/json": {
/**
* The text of the review comment.
*/
body: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["pull-request-review-comment"];
};
"404": unknown;
};
};
/**
* Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint.
*/
"pulls/list-commits": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit"][];
};
};
};
/**
* **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default.
*/
"pulls/list-files": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["diff-entry"][];
};
"422": unknown;
"500": unknown;
};
};
"pulls/check-if-merged": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
responses: {
/**
* Response if pull request has been merged
*/
"204": never;
/**
* Response if pull request has not been merged
*/
"404": unknown;
};
};
/**
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/reference/guides#dealing-with-abuse-rate-limits)" for details.
*/
"pulls/merge": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
requestBody: {
"application/json": {
/**
* Title for the automatic commit message.
*/
commit_title?: string;
/**
* Extra detail to append to automatic commit message.
*/
commit_message?: string;
/**
* SHA that pull request head must match to allow merge.
*/
sha?: string;
/**
* Merge method to use. Possible values are `merge`, `squash` or `rebase`. Default is `merge`.
*/
merge_method?: "merge" | "squash" | "rebase";
};
};
responses: {
/**
* Response if merge was successful
*/
"200": {
"application/json": components["schemas"]["pull-request-merge-result"];
};
"403": unknown;
"404": unknown;
/**
* Response if merge cannot be performed
*/
"405": {
"application/json": { message?: string; documentation_url?: string };
};
/**
* Response if sha was provided and pull request head did not match
*/
"409": {
"application/json": { message?: string; documentation_url?: string };
};
"422": unknown;
};
};
"pulls/list-requested-reviewers": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review-request"];
};
};
};
/**
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/reference/guides#dealing-with-abuse-rate-limits)" for details.
*/
"pulls/request-reviewers": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
requestBody: {
"application/json": {
/**
* An array of user `login`s that will be requested.
*/
reviewers?: string[];
/**
* An array of team `slug`s that will be requested.
*/
team_reviewers?: string[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["pull-request-simple"];
};
"403": unknown;
/**
* Response if user is not a collaborator
*/
"422": unknown;
};
};
"pulls/remove-requested-reviewers": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
requestBody: {
"application/json": {
/**
* An array of user `login`s that will be removed.
*/
reviewers?: string[];
/**
* An array of team `slug`s that will be removed.
*/
team_reviewers?: string[];
};
};
responses: {
/**
* response
*/
"200": unknown;
"422": unknown;
};
};
/**
* The list of reviews returns in chronological order.
*/
"pulls/list-reviews": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* The list of reviews returns in chronological order.
*/
"200": {
"application/json": components["schemas"]["pull-request-review"][];
};
};
};
/**
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*
* Pull request reviews created in the `PENDING` state do not include the `submitted_at` property in the response.
*
* **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/reference/media/#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint.
*
* The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file.
*/
"pulls/create-review": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
requestBody: {
"application/json": {
/**
* The SHA of the commit that needs a review. Not using the latest commit SHA may render your review comment outdated if a subsequent commit modifies the line you specify as the `position`. Defaults to the most recent commit in the pull request when you do not specify a value.
*/
commit_id?: string;
/**
* **Required** when using `REQUEST_CHANGES` or `COMMENT` for the `event` parameter. The body text of the pull request review.
*/
body?: string;
/**
* The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. By leaving this blank, you set the review action state to `PENDING`, which means you will need to [submit the pull request review](https://docs.github.com/rest/reference/pulls#submit-a-review-for-a-pull-request) when you are ready.
*/
event?: "APPROVE" | "REQUEST_CHANGES" | "COMMENT";
/**
* Use the following table to specify the location, destination, and contents of the draft review comment.
*/
comments?: {
/**
* The relative path to the file that necessitates a review comment.
*/
path: string;
/**
* The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note below.
*/
position?: number;
/**
* Text of the review comment.
*/
body: string;
line?: number;
side?: string;
start_line?: number;
start_side?: string;
}[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review"];
};
"403": unknown;
"422": unknown;
};
};
"pulls/get-review": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
review_id: components["parameters"]["review_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review"];
};
"404": unknown;
};
};
/**
* Update the review summary comment with new text.
*/
"pulls/update-review": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
review_id: components["parameters"]["review_id"];
};
};
requestBody: {
"application/json": {
/**
* The body text of the pull request review.
*/
body: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review"];
};
"422": unknown;
};
};
"pulls/delete-pending-review": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
review_id: components["parameters"]["review_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review"];
};
"404": unknown;
"422": unknown;
};
};
/**
* List comments for a specific pull request review.
*/
"pulls/list-comments-for-review": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
review_id: components["parameters"]["review_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["review-comment"][];
};
"404": unknown;
};
};
/**
* **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews.
*/
"pulls/dismiss-review": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
review_id: components["parameters"]["review_id"];
};
};
requestBody: {
"application/json": {
/**
* The message for the pull request review dismissal
*/
message: string;
event?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review"];
};
"404": unknown;
"422": unknown;
};
};
"pulls/submit-review": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
review_id: components["parameters"]["review_id"];
};
};
requestBody: {
"application/json": {
/**
* The body text of the pull request review
*/
body?: string;
/**
* The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. When you leave this blank, the API returns _HTTP 422 (Unrecognizable entity)_ and sets the review action state to `PENDING`, which means you will need to re-submit the pull request review using a review action.
*/
event: "APPROVE" | "REQUEST_CHANGES" | "COMMENT";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["pull-request-review"];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch.
*/
"pulls/update-branch": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
pull_number: components["parameters"]["pull-number"];
};
};
requestBody: {
"application/json": {
/**
* The expected SHA of the pull request's HEAD ref. This is the most recent commit on the pull request's branch. If the expected SHA does not match the pull request's HEAD, you will receive a `422 Unprocessable Entity` status. You can use the "[List commits](https://docs.github.com/rest/reference/repos#list-commits)" endpoint to find the most recent commit SHA. Default: SHA of the pull request's current HEAD ref.
*/
expected_head_sha?: string;
};
};
responses: {
/**
* response
*/
"202": {
"application/json": { message?: string; url?: string };
};
"403": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Gets the preferred README for a repository.
*
* READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML.
*/
"repos/get-readme": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`)
*/
ref?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["content-file"];
};
"404": unknown;
"422": unknown;
};
};
/**
* This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags).
*
* Information about published releases are available to everyone. Only users with push access will receive listings for draft releases.
*/
"repos/list-releases": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["release"][];
};
"404": unknown;
};
};
/**
* Users with push access to the repository can create a release.
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*/
"repos/create-release": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The name of the tag.
*/
tag_name: string;
/**
* Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually `master`).
*/
target_commitish?: string;
/**
* The name of the release.
*/
name?: string;
/**
* Text describing the contents of the tag.
*/
body?: string;
/**
* `true` to create a draft (unpublished) release, `false` to create a published one.
*/
draft?: boolean;
/**
* `true` to identify the release as a prerelease. `false` to identify the release as a full release.
*/
prerelease?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["release"];
};
"422": unknown;
};
};
/**
* To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response.
*/
"repos/get-release-asset": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
asset_id: components["parameters"]["asset_id"];
};
};
responses: {
/**
* To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response.
*/
"200": {
"application/json": components["schemas"]["release-asset"];
};
"302": never;
"404": unknown;
"415": unknown;
};
};
/**
* Users with push access to the repository can edit a release asset.
*/
"repos/update-release-asset": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
asset_id: components["parameters"]["asset_id"];
};
};
requestBody: {
"application/json": {
/**
* The file name of the asset.
*/
name?: string;
/**
* An alternate short description of the asset. Used in place of the filename.
*/
label?: string;
state?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["release-asset"];
};
};
};
"repos/delete-release-asset": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
asset_id: components["parameters"]["asset_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* View the latest published full release for the repository.
*
* The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published.
*/
"repos/get-latest-release": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["release"];
};
};
};
/**
* Get a published release with the specified tag.
*/
"repos/get-release-by-tag": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
/**
* tag+ parameter
*/
tag: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["release"];
};
"404": unknown;
};
};
/**
* **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia).
*/
"repos/get-release": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
release_id: components["parameters"]["release_id"];
};
};
responses: {
/**
* **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia).
*/
"200": {
"application/json": components["schemas"]["release"];
};
"404": unknown;
};
};
/**
* Users with push access to the repository can edit a release.
*/
"repos/update-release": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
release_id: components["parameters"]["release_id"];
};
};
requestBody: {
"application/json": {
/**
* The name of the tag.
*/
tag_name?: string;
/**
* Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually `master`).
*/
target_commitish?: string;
/**
* The name of the release.
*/
name?: string;
/**
* Text describing the contents of the tag.
*/
body?: string;
/**
* `true` makes the release a draft, and `false` publishes the release.
*/
draft?: boolean;
/**
* `true` to identify the release as a prerelease, `false` to identify the release as a full release.
*/
prerelease?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["release"];
};
};
};
/**
* Users with push access to the repository can delete a release.
*/
"repos/delete-release": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
release_id: components["parameters"]["release_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
"repos/list-release-assets": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
release_id: components["parameters"]["release_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["release-asset"][];
};
};
};
/**
* This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in
* the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset.
*
* You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint.
*
* Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example:
*
* `application/zip`
*
* GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example,
* you'll still need to pass your authentication to be able to upload an asset.
*
* When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted.
*
* **Notes:**
* * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)"
* endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://github.com/contact).
* * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset.
*/
"repos/upload-release-asset": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
release_id: components["parameters"]["release_id"];
};
query: {
name?: string;
label?: string;
};
};
requestBody: {
"*/*": string;
};
responses: {
/**
* Response for successful upload
*/
"201": {
"application/json": components["schemas"]["release-asset"];
};
};
};
/**
* Lists all secret scanning alerts for a private repository, from newest to oldest. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope.
*
* GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint.
*/
"secret-scanning/list-alerts-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
/**
* Set to `open` or `resolved` to only list secret scanning alerts in a specific state.
*/
state?: "open" | "resolved";
page?: components["parameters"]["page"];
per_page?: components["parameters"]["per_page"];
};
};
responses: {
/**
* Response
*/
"200": {
"application/json": components["schemas"]["secret-scanning-alert"][];
};
/**
* Repository is public or secret scanning is disabled for the repository
*/
"404": unknown;
"503": unknown;
};
};
/**
* Gets a single secret scanning alert detected in a private repository. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope.
*
* GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint.
*/
"secret-scanning/get-alert": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
alert_number: components["parameters"]["alert_number"];
};
};
responses: {
/**
* Default response
*/
"200": {
"application/json": components["schemas"]["secret-scanning-alert"];
};
/**
* Repository is public, or secret scanning is disabled for the repository, or the resource is not found
*/
"404": unknown;
"503": unknown;
};
};
/**
* Updates the status of a secret scanning alert in a private repository. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope.
*
* GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint.
*/
"secret-scanning/update-alert": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
alert_number: components["parameters"]["alert_number"];
};
};
requestBody: {
"application/json": {
state: components["schemas"]["secret-scanning-alert-state"];
resolution?: components["schemas"]["secret-scanning-alert-resolution"];
};
};
responses: {
/**
* Default response
*/
"200": {
"application/json": components["schemas"]["secret-scanning-alert"];
};
/**
* Repository is public, or secret scanning is disabled for the repository, or the resource is not found
*/
"404": unknown;
/**
* State does not match the resolution
*/
"422": unknown;
"503": unknown;
};
};
/**
* Lists the people that have starred the repository.
*
* You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header:
*/
"activity/list-stargazers-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
"application/vnd.github.v3.star+json": components["schemas"]["stargazer"][];
};
"422": unknown;
};
};
/**
* Returns a weekly aggregate of the number of additions and deletions pushed to a repository.
*/
"repos/get-code-frequency-stats": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Returns a weekly aggregate of the number of additions and deletions pushed to a repository.
*/
"200": {
"application/json": components["schemas"]["code-frequency-stat"][];
};
};
};
/**
* Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`.
*/
"repos/get-commit-activity-stats": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["commit-activity"][];
};
};
};
/**
* Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information:
*
* * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time).
* * `a` - Number of additions
* * `d` - Number of deletions
* * `c` - Number of commits
*/
"repos/get-contributors-stats": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time).
* * `a` - Number of additions
* * `d` - Number of deletions
* * `c` - Number of commits
*/
"200": {
"application/json": components["schemas"]["contributor-activity"][];
};
};
};
/**
* Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`.
*
* The array order is oldest week (index 0) to most recent week.
*/
"repos/get-participation-stats": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* The array order is oldest week (index 0) to most recent week.
*/
"200": {
"application/json": components["schemas"]["participation-stats"];
};
"404": unknown;
};
};
/**
* Each array contains the day number, hour number, and number of commits:
*
* * `0-6`: Sunday - Saturday
* * `0-23`: Hour of day
* * Number of commits
*
* For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits.
*/
"repos/get-punch-card-stats": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits.
*/
"200": {
"application/json": components["schemas"]["code-frequency-stat"][];
};
};
};
/**
* Users with push access in a repository can create commit statuses for a given SHA.
*
* Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error.
*/
"repos/create-commit-status": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
sha: string;
};
};
requestBody: {
"application/json": {
/**
* The state of the status. Can be one of `error`, `failure`, `pending`, or `success`.
*/
state: "error" | "failure" | "pending" | "success";
/**
* The target URL to associate with this status. This URL will be linked from the GitHub UI to allow users to easily see the source of the status.
* For example, if your continuous integration system is posting build status, you would want to provide the deep link for the build output for this specific SHA:
* `http://ci.example.com/user/repo/build/sha`
*/
target_url?: string;
/**
* A short description of the status.
*/
description?: string;
/**
* A string label to differentiate this status from the status of other systems.
*/
context?: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["status"];
};
};
};
/**
* Lists the people watching the specified repository.
*/
"activity/list-watchers-for-repo": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
};
};
"activity/get-repo-subscription": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Response if you subscribe to the repository
*/
"200": {
"application/json": components["schemas"]["repository-subscription"];
};
"403": unknown;
/**
* Response if you don't subscribe to the repository
*/
"404": unknown;
};
};
/**
* If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely.
*/
"activity/set-repo-subscription": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* Determines if notifications should be received from this repository.
*/
subscribed?: boolean;
/**
* Determines if all notifications should be blocked from this repository.
*/
ignored?: boolean;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["repository-subscription"];
};
};
};
/**
* This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription).
*/
"activity/delete-repo-subscription": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
"repos/list-tags": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["tag"][];
};
};
};
/**
* Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually
* `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use
* the `Location` header to make a second `GET` request.
* **Note**: For private repositories, these links are temporary and expire after five minutes.
*/
"repos/download-tarball-archive": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
ref: string;
};
};
responses: {
/**
* response
*/
"302": never;
};
};
"repos/list-teams": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team"][];
};
};
};
"repos/get-all-topics": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["topic"];
};
"404": unknown;
"415": unknown;
};
};
"repos/replace-all-topics": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* An array of topics to add to the repository. Pass one or more topics to _replace_ the set of existing topics. Send an empty array (`[]`) to clear all topics from the repository. **Note:** Topic `names` cannot contain uppercase letters.
*/
names: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["topic"];
};
"404": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday.
*/
"repos/get-clones": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per?: components["parameters"]["per"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["clone-traffic"];
};
"403": unknown;
};
};
/**
* Get the top 10 popular contents over the last 14 days.
*/
"repos/get-top-paths": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["content-traffic"][];
};
"403": unknown;
};
};
/**
* Get the top 10 referrers over the last 14 days.
*/
"repos/get-top-referrers": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["referrer-traffic"][];
};
"403": unknown;
};
};
/**
* Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday.
*/
"repos/get-views": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
query: {
per?: components["parameters"]["per"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["view-traffic"];
};
"403": unknown;
};
};
/**
* A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://help.github.com/articles/about-repository-transfers/).
*/
"repos/transfer": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* **Required:** The username or organization name the repository will be transferred to.
*/
new_owner?: string;
/**
* ID of the team or teams to add to the repository. Teams can only be added to organization-owned repositories.
*/
team_ids?: number[];
};
};
responses: {
/**
* response
*/
"202": {
"application/json": components["schemas"]["repository"];
};
};
};
/**
* Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)".
*/
"repos/check-vulnerability-alerts": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Response if repository is enabled with vulnerability alerts
*/
"204": never;
/**
* Response if repository is not enabled with vulnerability alerts
*/
"404": unknown;
};
};
/**
* Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)".
*/
"repos/enable-vulnerability-alerts": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)".
*/
"repos/disable-vulnerability-alerts": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually
* `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use
* the `Location` header to make a second `GET` request.
* **Note**: For private repositories, these links are temporary and expire after five minutes.
*/
"repos/download-zipball-archive": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
ref: string;
};
};
responses: {
/**
* response
*/
"302": never;
};
};
/**
* Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. The authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`.
*
* **OAuth scope requirements**
*
* When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include:
*
* * `public_repo` scope or `repo` scope to create a public repository
* * `repo` scope to create a private repository
*/
"repos/create-using-template": {
parameters: {
path: {
template_owner: string;
template_repo: string;
};
};
requestBody: {
"application/json": {
/**
* The organization or person who will own the new repository. To create a new repository in an organization, the authenticated user must be a member of the specified organization.
*/
owner?: string;
/**
* The name of the new repository.
*/
name: string;
/**
* A short description of the new repository.
*/
description?: string;
/**
* Set to `true` to include the directory structure and files from all branches in the template repository, and not just the default branch. Default: `false`.
*/
include_all_branches?: boolean;
/**
* Either `true` to create a new private repository or `false` to create a new public one.
*/
private?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["repository"];
};
};
};
/**
* Lists all public repositories in the order that they were created.
*
* Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories.
*/
"repos/list-public": {
parameters: {
query: {
since?: components["parameters"]["since-repo"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
"304": never;
"422": unknown;
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*/
"enterprise-admin/list-provisioned-groups-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
query: {
startIndex?: components["parameters"]["start_index"];
count?: components["parameters"]["count"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["scim-group-list-enterprise"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*
* Provision an enterprise group, and invite users to the group. This sends invitation emails to the email address of the invited users to join the GitHub organization that the SCIM group corresponds to.
*/
"enterprise-admin/provision-and-invite-enterprise-group": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
requestBody: {
"application/json": {
/**
* The SCIM schema URIs.
*/
schemas: string[];
/**
* The name of the SCIM group. This must match the GitHub organization that the group maps to.
*/
displayName: string;
members?: {
/**
* The SCIM user ID for a user.
*/
value: string;
}[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["scim-enterprise-group"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*/
"enterprise-admin/get-provisioning-information-for-enterprise-group": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
scim_group_id: components["parameters"]["scim_group_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["scim-enterprise-group"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*
* Replaces an existing provisioned group’s information. You must provide all the information required for the group as if you were provisioning it for the first time. Any existing group information that you don't provide will be removed, including group membership. If you want to only update a specific attribute, use the [Update an attribute for a SCIM enterprise group](#update-an-attribute-for-a-scim-enterprise-group) endpoint instead.
*/
"enterprise-admin/set-information-for-provisioned-enterprise-group": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
scim_group_id: components["parameters"]["scim_group_id"];
};
};
requestBody: {
"application/json": {
/**
* The SCIM schema URIs.
*/
schemas: string[];
/**
* The name of the SCIM group. This must match the GitHub organization that the group maps to.
*/
displayName: string;
members?: {
/**
* The SCIM user ID for a user.
*/
value: string;
}[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["scim-enterprise-group"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*
* Allows you to change a provisioned group’s individual attributes. To change a group’s values, you must provide a specific Operations JSON format that contains at least one of the add, remove, or replace operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2).
*/
"enterprise-admin/update-attribute-for-enterprise-group": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
scim_group_id: components["parameters"]["scim_group_id"];
};
};
requestBody: {
"application/json": {
/**
* The SCIM schema URIs.
*/
schemas: string[];
/**
* Array of [SCIM operations](https://tools.ietf.org/html/rfc7644#section-3.5.2).
*/
Operations: { [key: string]: any }[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["scim-enterprise-group"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*/
"enterprise-admin/delete-scim-group-from-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
scim_group_id: components["parameters"]["scim_group_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*
* Retrieves a paginated list of all provisioned enterprise members, including pending invitations.
*
* When a user with a SAML-provisioned external identity leaves (or is removed from) an enterprise, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member:
* - When a user with a SCIM-provisioned external identity is removed from an enterprise, the account's metadata is preserved to allow the user to re-join the organization in the future.
* - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted).
* - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO.
*
* The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO:
*
* 1. The user is granted access by the IdP and is not a member of the GitHub enterprise.
*
* 1. The user attempts to access the GitHub enterprise and initiates the SAML SSO process, and is not currently signed in to their GitHub account.
*
* 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account:
* - If the user signs in, their GitHub account is linked to this entry.
* - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub enterprise, and the external identity `null` entry remains in place.
*/
"enterprise-admin/list-provisioned-identities-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
query: {
startIndex?: components["parameters"]["start_index"];
count?: components["parameters"]["count"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["scim-user-list-enterprise"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*
* Provision enterprise membership for a user, and send organization invitation emails to the email address.
*
* You can optionally include the groups a user will be invited to join. If you do not provide a list of `groups`, the user is provisioned for the enterprise, but no organization invitation emails will be sent.
*/
"enterprise-admin/provision-and-invite-enterprise-user": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
};
};
requestBody: {
"application/json": {
/**
* The SCIM schema URIs.
*/
schemas: string[];
/**
* The username for the user.
*/
userName: string;
name: {
/**
* The first name of the user.
*/
givenName: string;
/**
* The last name of the user.
*/
familyName: string;
};
/**
* List of user emails.
*/
emails: {
/**
* The email address.
*/
value: string;
/**
* The type of email address.
*/
type: string;
/**
* Whether this email address is the primary address.
*/
primary: boolean;
}[];
/**
* List of SCIM group IDs the user is a member of.
*/
groups?: { value?: string }[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["scim-enterprise-user"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*/
"enterprise-admin/get-provisioning-information-for-enterprise-user": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
scim_user_id: components["parameters"]["scim_user_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["scim-enterprise-user"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*
* Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](#update-an-attribute-for-an-enterprise-scim-user) endpoint instead.
*
* You must at least provide the required values for the user: `userName`, `name`, and `emails`.
*
* **Warning:** Setting `active: false` removes the user from the enterprise, deletes the external identity, and deletes the associated `{scim_user_id}`.
*/
"enterprise-admin/set-information-for-provisioned-enterprise-user": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
scim_user_id: components["parameters"]["scim_user_id"];
};
};
requestBody: {
"application/json": {
/**
* The SCIM schema URIs.
*/
schemas: string[];
/**
* The username for the user.
*/
userName: string;
name: {
/**
* The first name of the user.
*/
givenName: string;
/**
* The last name of the user.
*/
familyName: string;
};
/**
* List of user emails.
*/
emails: {
/**
* The email address.
*/
value: string;
/**
* The type of email address.
*/
type: string;
/**
* Whether this email address is the primary address.
*/
primary: boolean;
}[];
/**
* List of SCIM group IDs the user is a member of.
*/
groups?: { value?: string }[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["scim-enterprise-user"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*
* Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2).
*
* **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work.
*
* **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the enterprise, deletes the external identity, and deletes the associated `:scim_user_id`.
*
* ```
* {
* "Operations":[{
* "op":"replace",
* "value":{
* "active":false
* }
* }]
* }
* ```
*/
"enterprise-admin/update-attribute-for-enterprise-user": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
scim_user_id: components["parameters"]["scim_user_id"];
};
};
requestBody: {
"application/json": {
/**
* The SCIM schema URIs.
*/
schemas: string[];
/**
* Array of [SCIM operations](https://tools.ietf.org/html/rfc7644#section-3.5.2).
*/
Operations: { [key: string]: any }[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["scim-enterprise-user"];
};
};
};
/**
* **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change.
*/
"enterprise-admin/delete-user-from-enterprise": {
parameters: {
path: {
enterprise: components["parameters"]["enterprise"];
scim_user_id: components["parameters"]["scim_user_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* Retrieves a paginated list of all provisioned organization members, including pending invitations. If you provide the `filter` parameter, the resources for all matching provisions members are returned.
*
* When a user with a SAML-provisioned external identity leaves (or is removed from) an organization, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member:
* - When a user with a SCIM-provisioned external identity is removed from an organization, the account's metadata is preserved to allow the user to re-join the organization in the future.
* - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted).
* - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO.
*
* The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO:
*
* 1. The user is granted access by the IdP and is not a member of the GitHub organization.
*
* 1. The user attempts to access the GitHub organization and initiates the SAML SSO process, and is not currently signed in to their GitHub account.
*
* 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account:
* - If the user signs in, their GitHub account is linked to this entry.
* - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub organization, and the external identity `null` entry remains in place.
*/
"scim/list-provisioned-identities": {
parameters: {
path: {
org: components["parameters"]["org"];
};
query: {
/**
* Used for pagination: the index of the first result to return.
*/
startIndex?: number;
/**
* Used for pagination: the number of results to return.
*/
count?: number;
/**
* Filters results using the equals query parameter operator (`eq`). You can filter results that are equal to `id`, `userName`, `emails`, and `external_id`. For example, to search for an identity with the `userName` Octocat, you would use this query:
*
* `?filter=userName%20eq%20\"Octocat\"`.
*
* To filter results for for the identity with the email `octocat@github.com`, you would use this query:
*
* `?filter=emails%20eq%20\"octocat@github.com\"`.
*/
filter?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/scim+json": components["schemas"]["scim-user-list"];
};
"304": never;
"400": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Provision organization membership for a user, and send an activation email to the email address.
*/
"scim/provision-and-invite-user": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* Configured by the admin. Could be an email, login, or username
*/
userName: string;
/**
* The name of the user, suitable for display to end-users
*/
displayName?: string;
name: { givenName: string; familyName: string; formatted?: string };
/**
* user emails
*/
emails: { value: string; primary?: boolean; type?: string }[];
schemas?: string[];
externalId?: string;
groups?: string[];
active?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/scim+json": components["schemas"]["scim-user"];
};
"304": never;
"400": unknown;
"403": unknown;
"404": unknown;
"409": unknown;
"500": unknown;
};
};
"scim/get-provisioning-information-for-user": {
parameters: {
path: {
org: components["parameters"]["org"];
scim_user_id: components["parameters"]["scim_user_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/scim+json": components["schemas"]["scim-user"];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](https://docs.github.com/rest/reference/scim#update-an-attribute-for-a-scim-user) endpoint instead.
*
* You must at least provide the required values for the user: `userName`, `name`, and `emails`.
*
* **Warning:** Setting `active: false` removes the user from the organization, deletes the external identity, and deletes the associated `{scim_user_id}`.
*/
"scim/set-information-for-provisioned-user": {
parameters: {
path: {
org: components["parameters"]["org"];
scim_user_id: components["parameters"]["scim_user_id"];
};
};
requestBody: {
"application/json": {
schemas?: string[];
/**
* The name of the user, suitable for display to end-users
*/
displayName?: string;
externalId?: string;
groups?: string[];
active?: boolean;
/**
* Configured by the admin. Could be an email, login, or username
*/
userName: string;
name: { givenName: string; familyName: string; formatted?: string };
/**
* user emails
*/
emails: { type?: string; value: string; primary?: boolean }[];
};
};
responses: {
/**
* response
*/
"200": {
"application/scim+json": components["schemas"]["scim-user"];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2).
*
* **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work.
*
* **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the organization, deletes the external identity, and deletes the associated `:scim_user_id`.
*
* ```
* {
* "Operations":[{
* "op":"replace",
* "value":{
* "active":false
* }
* }]
* }
* ```
*/
"scim/update-attribute-for-user": {
parameters: {
path: {
org: components["parameters"]["org"];
scim_user_id: components["parameters"]["scim_user_id"];
};
};
requestBody: {
"application/json": {
schemas?: string[];
/**
* Set of operations to be performed
*/
Operations: {
op: "add" | "remove" | "replace";
path?: string;
value?:
| {
active?: boolean | null;
userName?: string | null;
externalId?: string | null;
givenName?: string | null;
familyName?: string | null;
}
| { value?: string; primary?: boolean }[]
| string;
}[];
};
};
responses: {
/**
* response
*/
"200": {
"application/scim+json": components["schemas"]["scim-user"];
};
"304": never;
"400": unknown;
"403": unknown;
"404": unknown;
/**
* Too many requests
*/
"429": {
"application/json": components["schemas"]["basic-error"];
};
};
};
"scim/delete-user-from-org": {
parameters: {
path: {
org: components["parameters"]["org"];
scim_user_id: components["parameters"]["scim_user_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination).
*
* When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata).
*
* For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this:
*
* `q=addClass+in:file+language:js+repo:jquery/jquery`
*
* This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository.
*
* #### Considerations for code search
*
* Due to the complexity of searching code, there are a few restrictions on how searches are performed:
*
* * Only the _default branch_ is considered. In most cases, this will be the `master` branch.
* * Only files smaller than 384 KB are searchable.
* * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing
* language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is.
*/
"search/code": {
parameters: {
query: {
/**
* The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching code](https://help.github.com/articles/searching-code/)" for a detailed list of qualifiers.
*/
q: string;
/**
* Sorts the results of your query. Can only be `indexed`, which indicates how recently a file has been indexed by the GitHub search infrastructure. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results)
*/
sort?: "indexed";
order?: components["parameters"]["order"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
incomplete_results?: boolean;
items?: components["schemas"]["code-search-result-item"][];
};
};
"304": never;
"403": unknown;
"422": unknown;
"503": unknown;
};
};
/**
* Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination).
*
* When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match
* metadata](https://docs.github.com/rest/reference/search#text-match-metadata).
*
* For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this:
*
* `q=repo:octocat/Spoon-Knife+css`
*/
"search/commits": {
parameters: {
query: {
/**
* The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching commits](https://help.github.com/articles/searching-commits/)" for a detailed list of qualifiers.
*/
q: string;
/**
* Sorts the results of your query by `author-date` or `committer-date`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results)
*/
sort?: "author-date" | "committer-date";
order?: components["parameters"]["order"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
incomplete_results?: boolean;
items?: components["schemas"]["commit-search-result-item"][];
};
};
"304": never;
"415": unknown;
};
};
/**
* Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination).
*
* When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted
* search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata).
*
* For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this.
*
* `q=windows+label:bug+language:python+state:open&sort=created&order=asc`
*
* This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results.
*
* **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)."
*/
"search/issues-and-pull-requests": {
parameters: {
query: {
/**
* The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching issues and pull requests](https://help.github.com/articles/searching-issues-and-pull-requests/)" for a detailed list of qualifiers.
*/
q: string;
/**
* Sorts the results of your query by the number of `comments`, `reactions`, `reactions-+1`, `reactions--1`, `reactions-smile`, `reactions-thinking_face`, `reactions-heart`, `reactions-tada`, or `interactions`. You can also sort results by how recently the items were `created` or `updated`, Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results)
*/
sort?:
| "comments"
| "reactions"
| "reactions-+1"
| "reactions--1"
| "reactions-smile"
| "reactions-thinking_face"
| "reactions-heart"
| "reactions-tada"
| "interactions"
| "created"
| "updated";
order?: components["parameters"]["order"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
incomplete_results?: boolean;
items?: components["schemas"]["issue-search-result-item"][];
};
};
"304": never;
"403": unknown;
"422": unknown;
"503": unknown;
};
};
/**
* Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination).
*
* When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata).
*
* For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this:
*
* `q=bug+defect+enhancement&repository_id=64778136`
*
* The labels that best match the query appear first in the search results.
*/
"search/labels": {
parameters: {
query: {
/**
* The id of the repository.
*/
repository_id: number;
/**
* The search keywords. This endpoint does not accept qualifiers in the query. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query).
*/
q: string;
/**
* Sorts the results of your query by when the label was `created` or `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results)
*/
sort?: "created" | "updated";
order?: components["parameters"]["order"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
incomplete_results?: boolean;
items?: components["schemas"]["label-search-result-item"][];
};
};
"304": never;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination).
*
* When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata).
*
* For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this:
*
* `q=tetris+language:assembly&sort=stars&order=desc`
*
* This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results.
*
* When you include the `mercy` preview header, you can also search for multiple topics by adding more `topic:` instances. For example, your query might look like this:
*
* `q=topic:ruby+topic:rails`
*/
"search/repos": {
parameters: {
query: {
/**
* The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching for repositories](https://help.github.com/articles/searching-for-repositories/)" for a detailed list of qualifiers.
*/
q: string;
/**
* Sorts the results of your query by number of `stars`, `forks`, or `help-wanted-issues` or how recently the items were `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results)
*/
sort?: "stars" | "forks" | "help-wanted-issues" | "updated";
order?: components["parameters"]["order"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
incomplete_results?: boolean;
items?: components["schemas"]["repo-search-result-item"][];
};
};
"304": never;
"422": unknown;
"503": unknown;
};
};
/**
* Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://help.github.com/articles/searching-topics/)" for a detailed list of qualifiers.
*
* When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata).
*
* For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this:
*
* `q=ruby+is:featured`
*
* This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results.
*/
"search/topics": {
parameters: {
query: {
/**
* The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query).
*/
q: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
incomplete_results?: boolean;
items?: components["schemas"]["topic-search-result-item"][];
};
};
"304": never;
"415": unknown;
};
};
/**
* Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination).
*
* When searching for users, you can get text match metadata for the issue **login**, **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata).
*
* For example, if you're looking for a list of popular users, you might try this query:
*
* `q=tom+repos:%3E42+followers:%3E1000`
*
* This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers.
*/
"search/users": {
parameters: {
query: {
/**
* The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching users](https://help.github.com/articles/searching-users/)" for a detailed list of qualifiers.
*/
q: string;
/**
* Sorts the results of your query by number of `followers` or `repositories`, or when the person `joined` GitHub. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results)
*/
sort?: "followers" | "repositories" | "joined";
order?: components["parameters"]["order"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": {
total_count?: number;
incomplete_results?: boolean;
items?: components["schemas"]["user-search-result-item"][];
};
};
"304": never;
"422": unknown;
"503": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint.
*/
"teams/get-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-full"];
};
"404": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint.
*
* To edit a team, the authenticated user must either be an organization owner or a team maintainer.
*
* **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`.
*/
"teams/update-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
};
requestBody: {
"application/json": {
/**
* The name of the team.
*/
name: string;
/**
* The description of the team.
*/
description?: string;
/**
* The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. The options are:
* **For a non-nested team:**
* \* `secret` - only visible to organization owners and members of this team.
* \* `closed` - visible to all members of this organization.
* **For a parent or child team:**
* \* `closed` - visible to all members of this organization.
*/
privacy?: "secret" | "closed";
/**
* **Deprecated**. The permission that new repositories will be added to the team with when none is specified. Can be one of:
* \* `pull` - team members can pull, but not push to or administer newly-added repositories.
* \* `push` - team members can pull and push, but not administer newly-added repositories.
* \* `admin` - team members can pull, push and administer newly-added repositories.
*/
permission?: "pull" | "push" | "admin";
/**
* The ID of a team to set as the parent team.
*/
parent_team_id?: number | null;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["team-full"];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint.
*
* To delete a team, the authenticated user must be an organization owner or team maintainer.
*
* If you are an organization owner, deleting a parent team will delete all of its child teams as well.
*/
"teams/delete-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
"422": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint.
*
* List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"teams/list-discussions-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
query: {
direction?: components["parameters"]["direction"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion"][];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint.
*
* Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*/
"teams/create-discussion-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
};
requestBody: {
"application/json": {
/**
* The discussion post's title.
*/
title: string;
/**
* The discussion post's body text.
*/
body: string;
/**
* Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post.
*/
private?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["team-discussion"];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint.
*
* Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"teams/get-discussion-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion"];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint.
*
* Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"teams/update-discussion-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
};
};
requestBody: {
"application/json": {
/**
* The discussion post's title.
*/
title?: string;
/**
* The discussion post's body text.
*/
body?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion"];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint.
*
* Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"teams/delete-discussion-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint.
*
* List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"teams/list-discussion-comments-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
};
query: {
direction?: components["parameters"]["direction"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion-comment"][];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint.
*
* Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*
* This endpoint triggers [notifications](https://help.github.com/articles/about-notifications/). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details.
*/
"teams/create-discussion-comment-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
};
};
requestBody: {
"application/json": {
/**
* The discussion comment's body text.
*/
body: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["team-discussion-comment"];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint.
*
* Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"teams/get-discussion-comment-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion-comment"];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint.
*
* Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"teams/update-discussion-comment-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
};
requestBody: {
"application/json": {
/**
* The discussion comment's body text.
*/
body: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-discussion-comment"];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint.
*
* Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"teams/delete-discussion-comment-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint.
*
* List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"reactions/list-for-team-discussion-comment-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
query: {
/**
* Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment.
*/
content?:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["reaction"][];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion comment`](https://docs.github.comt/rest/reference/reactions#create-reaction-for-a-team-discussion-comment) endpoint.
*
* Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion comment.
*/
"reactions/create-for-team-discussion-comment-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
comment_number: components["parameters"]["comment-number"];
};
};
requestBody: {
"application/json": {
/**
* The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment.
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["reaction"];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint.
*
* List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"reactions/list-for-team-discussion-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
};
query: {
/**
* Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion.
*/
content?:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["reaction"][];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint.
*
* Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion.
*/
"reactions/create-for-team-discussion-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
discussion_number: components["parameters"]["discussion-number"];
};
};
requestBody: {
"application/json": {
/**
* The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion.
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["reaction"];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint.
*
* The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`.
*/
"teams/list-pending-invitations-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-invitation"][];
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint.
*
* Team members will include the members of child teams.
*/
"teams/list-members-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
query: {
/**
* Filters members returned by their role in the team. Can be one of:
* \* `member` - normal members of the team.
* \* `maintainer` - team maintainers.
* \* `all` - all members of the team.
*/
role?: "member" | "maintainer" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"404": unknown;
};
};
/**
* The "Get team member" endpoint (described below) is deprecated.
*
* We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships.
*
* To list members in a team, the team must be visible to the authenticated user.
*/
"teams/get-member-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Response if user is a member
*/
"204": never;
/**
* Response if user is not a member
*/
"404": unknown;
};
};
/**
* The "Add team member" endpoint (described below) is deprecated.
*
* We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams.
*
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization.
*
* **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)."
*
* Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*/
"teams/add-member-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"403": unknown;
/**
* Response if team synchronization is set up
*/
"404": unknown;
/**
* response
*/
"422": {
"application/json": {
message?: string;
errors?: { code?: string; field?: string; resource?: string }[];
documentation_url?: string;
};
};
};
};
/**
* The "Remove team member" endpoint (described below) is deprecated.
*
* We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships.
*
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team.
*
* **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)."
*/
"teams/remove-member-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
/**
* Response if team synchronization is setup
*/
"404": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint.
*
* Team members will include the members of child teams.
*
* To get a user's membership with a team, the team must be visible to the authenticated user.
*
* **Note:** The `role` for organization owners returns as `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team).
*/
"teams/get-membership-for-user-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-membership"];
};
"404": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint.
*
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer.
*
* **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)."
*
* If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner.
*
* If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer.
*/
"teams/add-or-update-membership-for-user-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
username: components["parameters"]["username"];
};
};
requestBody: {
"application/json": {
/**
* The role that this user should have in the team. Can be one of:
* \* `member` - a normal member of the team.
* \* `maintainer` - a team maintainer. Able to add/remove other team members, promote other team members to team maintainer, and edit the team's name and description.
*/
role?: "member" | "maintainer";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-membership"];
};
/**
* Response if team synchronization is set up
*/
"403": unknown;
"404": unknown;
/**
* Response if you attempt to add an organization to a team
*/
"422": {
"application/json": {
message?: string;
errors?: { code?: string; field?: string; resource?: string }[];
documentation_url?: string;
};
};
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint.
*
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team.
*
* **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)."
*/
"teams/remove-membership-for-user-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
/**
* Response if team synchronization is set up
*/
"403": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint.
*
* Lists the organization projects for a team.
*/
"teams/list-projects-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-project"][];
};
"404": unknown;
"415": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint.
*
* Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team.
*/
"teams/check-permissions-for-project-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
project_id: components["parameters"]["project-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-project"];
};
/**
* Response if project is not managed by this team
*/
"404": unknown;
"415": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint.
*
* Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization.
*/
"teams/add-or-update-project-permissions-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
project_id: components["parameters"]["project-id"];
};
};
requestBody: {
"application/json": {
/**
* The permission to grant to the team for this project. Can be one of:
* \* `read` - team members can read, but not write to or administer this project.
* \* `write` - team members can read and write, but not administer this project.
* \* `admin` - team members can read, write and administer this project.
* Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*/
permission?: "read" | "write" | "admin";
};
};
responses: {
/**
* Empty response
*/
"204": never;
/**
* Response if the project is not owned by the organization
*/
"403": {
"application/json": { message?: string; documentation_url?: string };
};
"404": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint.
*
* Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it.
*/
"teams/remove-project-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
project_id: components["parameters"]["project-id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"404": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint.
*/
"teams/list-repos-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
"404": unknown;
};
};
/**
* **Note**: Repositories inherited through a parent team will also be checked.
*
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint.
*
* You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header:
*/
"teams/check-permissions-for-repo-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Alternative response with extra repository information
*/
"200": {
"application/vnd.github.v3.repository+json": components["schemas"]["team-repository"];
};
/**
* Response if repository is managed by this team
*/
"204": never;
/**
* Response if repository is not managed by this team
*/
"404": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team repository permissions](https://docs.github.comt/rest/reference/teams#add-or-update-team-project-permissions) endpoint.
*
* To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization.
*
* Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*/
"teams/add-or-update-repo-permissions-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
requestBody: {
"application/json": {
/**
* The permission to grant the team on this repository. Can be one of:
* \* `pull` - team members can pull, but not push to or administer this repository.
* \* `push` - team members can pull and push, but not administer this repository.
* \* `admin` - team members can pull, push and administer this repository.
*
* If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository.
*/
permission?: "pull" | "push" | "admin";
};
};
responses: {
/**
* Empty response
*/
"204": never;
"403": unknown;
"422": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint.
*
* If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team.
*/
"teams/remove-repo-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List IdP groups for a team`](https://docs.github.com/rest/reference/teams#list-idp-groups-for-a-team) endpoint.
*
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* List IdP groups connected to a team on GitHub.
*/
"teams/list-idp-groups-for-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["group-mapping"];
};
"403": unknown;
"404": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create or update IdP group connections`](https://docs.github.com/rest/reference/teams#create-or-update-idp-group-connections) endpoint.
*
* Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation.
*
* Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team.
*/
"teams/create-or-update-idp-group-connections-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
};
requestBody: {
"application/json": {
/**
* The IdP groups you want to connect to a GitHub team. When updating, the new `groups` object will replace the original one. You must include any existing groups that you don't want to remove.
*/
groups: {
/**
* ID of the IdP group.
*/
group_id: string;
/**
* Name of the IdP group.
*/
group_name: string;
/**
* Description of the IdP group.
*/
group_description: string;
id?: string;
name?: string;
description?: string;
}[];
synced_at?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["group-mapping"];
};
"403": unknown;
"422": unknown;
};
};
/**
* **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint.
*/
"teams/list-child-legacy": {
parameters: {
path: {
team_id: components["parameters"]["team-id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* Response if child teams exist
*/
"200": {
"application/json": components["schemas"]["team"][];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information.
*
* If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information.
*/
"users/get-authenticated": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json":
| components["schemas"]["private-user"]
| components["schemas"]["public-user"];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API.
*/
"users/update-authenticated": {
parameters: {};
requestBody: {
"application/json": {
/**
* The new name of the user.
*/
name?: string;
/**
* The publicly visible email address of the user.
*/
email?: string;
/**
* The new blog URL of the user.
*/
blog?: string;
/**
* The new Twitter username of the user.
*/
twitter_username?: string | null;
/**
* The new company of the user.
*/
company?: string;
/**
* The new location of the user.
*/
location?: string;
/**
* The new hiring availability of the user.
*/
hireable?: boolean;
/**
* The new short biography of the user.
*/
bio?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["private-user"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* List the users you've blocked on your personal account.
*/
"users/list-blocked-by-authenticated": {
parameters: {};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"415": unknown;
};
};
/**
* If the user is blocked:
*
* If the user is not blocked:
*/
"users/check-blocked": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* If the user is blocked:
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
/**
* If the user is not blocked:
*/
"404": {
"application/json": components["schemas"]["basic-error"];
};
};
};
"users/block": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
"users/unblock": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Sets the visibility for your primary email addresses.
*/
"users/set-primary-email-visibility-for-authenticated": {
parameters: {};
requestBody: {
"application/json": {
/**
* An email address associated with the GitHub user account to manage.
*/
email: string;
/**
* Denotes whether an email is publically visible.
*/
visibility: "public" | "private";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["email"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope.
*/
"users/list-emails-for-authenticated": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["email"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* This endpoint is accessible with the `user` scope.
*/
"users/add-email-for-authenticated": {
parameters: {};
requestBody: {
"application/json":
| {
/**
* Adds one or more email addresses to your GitHub account. Must contain at least one email address. **Note:** Alternatively, you can pass a single email address or an `array` of emails addresses directly, but we recommend that you pass an object using the `emails` key.
*/
emails: string[];
}
| string[]
| string;
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["email"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* This endpoint is accessible with the `user` scope.
*/
"users/delete-email-for-authenticated": {
parameters: {};
requestBody: {
"application/json":
| {
/**
* Email addresses associated with the GitHub user account.
*/
emails: string[];
}
| string[]
| string;
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Lists the people following the authenticated user.
*/
"users/list-followers-for-authenticated-user": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* Lists the people who the authenticated user follows.
*/
"users/list-followed-by-authenticated": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
"users/check-person-is-followed-by-authenticated": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* Response if the person is followed by the authenticated user
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
/**
* Response if the person is not followed by the authenticated user
*/
"404": {
"application/json": components["schemas"]["basic-error"];
};
};
};
/**
* Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*
* Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope.
*/
"users/follow": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope.
*/
"users/unfollow": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"users/list-gpg-keys-for-authenticated": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gpg-key"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"users/create-gpg-key-for-authenticated": {
parameters: {};
requestBody: {
"application/json": {
/**
* A GPG key in ASCII-armored format.
*/
armored_public_key: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["gpg-key"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"users/get-gpg-key-for-authenticated": {
parameters: {
path: {
gpg_key_id: components["parameters"]["gpg_key_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gpg-key"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"users/delete-gpg-key-for-authenticated": {
parameters: {
path: {
gpg_key_id: components["parameters"]["gpg_key_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access.
*
* You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint.
*
* The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership.
*
* You can find the permissions for the installation under the `permissions` key.
*/
"apps/list-installations-for-authenticated-user": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* You can find the permissions for the installation under the `permissions` key.
*/
"200": {
"application/json": {
total_count?: number;
installations?: components["schemas"]["installation"][];
};
};
"304": never;
"401": unknown;
"403": unknown;
"415": unknown;
};
};
/**
* List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation.
*
* The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership.
*
* You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint.
*
* The access the user has to each repository is included in the hash under the `permissions` key.
*/
"apps/list-installation-repos-for-authenticated-user": {
parameters: {
path: {
installation_id: components["parameters"]["installation_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* The access the user has to each repository is included in the hash under the `permissions` key.
*/
"200": {
"application/json": {
total_count?: number;
repository_selection?: string;
repositories?: components["schemas"]["repository"][];
};
};
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* Add a single repository to an installation. The authenticated user must have admin access to the repository.
*
* You must use a personal access token (which you can create via the [command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations#create-a-new-authorization) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to access this endpoint.
*/
"apps/add-repo-to-installation": {
parameters: {
path: {
installation_id: components["parameters"]["installation_id"];
repository_id: components["parameters"]["repository_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* Remove a single repository from an installation. The authenticated user must have admin access to the repository.
*
* You must use a personal access token (which you can create via the [command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations#create-a-new-authorization) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to access this endpoint.
*/
"apps/remove-repo-from-installation": {
parameters: {
path: {
installation_id: components["parameters"]["installation_id"];
repository_id: components["parameters"]["repository_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* Shows which type of GitHub user can interact with your public repositories and when the restriction expires. If there are no restrictions, you will see an empty response.
*/
"interactions/get-restrictions-for-your-public-repos": {
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["interaction-limit-response"];
};
};
};
/**
* Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user.
*/
"interactions/set-restrictions-for-your-public-repos": {
requestBody: {
"application/json": components["schemas"]["interaction-limit"];
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["interaction-limit-response"];
};
"422": unknown;
};
};
/**
* Removes any interaction restrictions from your public repositories.
*/
"interactions/remove-restrictions-for-your-public-repos": {
responses: {
/**
* Empty response
*/
"204": never;
};
};
/**
* List issues across owned and member repositories assigned to the authenticated user.
*
* **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this
* reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by
* the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull
* request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint.
*/
"issues/list-for-authenticated-user": {
parameters: {
query: {
/**
* Indicates which sorts of issues to return. Can be one of:
* \* `assigned`: Issues assigned to you
* \* `created`: Issues created by you
* \* `mentioned`: Issues mentioning you
* \* `subscribed`: Issues you're subscribed to updates for
* \* `all`: All issues the authenticated user can see, regardless of participation or creation
*/
filter?: "assigned" | "created" | "mentioned" | "subscribed" | "all";
/**
* Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`.
*/
state?: "open" | "closed" | "all";
labels?: components["parameters"]["labels"];
/**
* What to sort results by. Can be either `created`, `updated`, `comments`.
*/
sort?: "created" | "updated" | "comments";
direction?: components["parameters"]["direction"];
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["issue"][];
};
"304": never;
"404": unknown;
};
};
/**
* Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"users/list-public-ssh-keys-for-authenticated": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["key"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"users/create-public-ssh-key-for-authenticated": {
parameters: {};
requestBody: {
"application/json": {
/**
* A descriptive name for the new key.
*/
title?: string;
/**
* The public SSH key to add to your GitHub account.
*/
key: string;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["key"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"users/get-public-ssh-key-for-authenticated": {
parameters: {
path: {
key_id: components["parameters"]["key_id"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["key"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/).
*/
"users/delete-public-ssh-key-for-authenticated": {
parameters: {
path: {
key_id: components["parameters"]["key_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/).
*/
"apps/list-subscriptions-for-authenticated-user": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["user-marketplace-purchase"][];
};
"304": never;
"401": unknown;
"404": unknown;
};
};
/**
* Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/).
*/
"apps/list-subscriptions-for-authenticated-user-stubbed": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["user-marketplace-purchase"][];
};
"304": never;
"401": unknown;
};
};
"orgs/list-memberships-for-authenticated-user": {
parameters: {
query: {
/**
* Indicates the state of the memberships to return. Can be either `active` or `pending`. If not specified, the API returns both active and pending memberships.
*/
state?: "active" | "pending";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["org-membership"][];
};
"304": never;
"401": unknown;
"403": unknown;
"422": unknown;
};
};
"orgs/get-membership-for-authenticated-user": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["org-membership"];
};
"403": unknown;
"404": unknown;
};
};
"orgs/update-membership-for-authenticated-user": {
parameters: {
path: {
org: components["parameters"]["org"];
};
};
requestBody: {
"application/json": {
/**
* The state that the membership should be in. Only `"active"` will be accepted.
*/
state: "active";
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["org-membership"];
};
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* Lists all migrations a user has started.
*/
"migrations/list-for-authenticated-user": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["migration"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* Initiates the generation of a user migration archive.
*/
"migrations/start-for-authenticated-user": {
parameters: {};
requestBody: {
"application/json": {
/**
* Lock the repositories being migrated at the start of the migration
*/
lock_repositories?: boolean;
/**
* Do not include attachments in the migration
*/
exclude_attachments?: boolean;
/**
* Exclude attributes from the API response to improve performance
*/
exclude?: "repositories"[];
repositories: string[];
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["migration"];
};
"304": never;
"401": unknown;
"403": unknown;
"422": unknown;
};
};
/**
* Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values:
*
* * `pending` - the migration hasn't started yet.
* * `exporting` - the migration is in progress.
* * `exported` - the migration finished successfully.
* * `failed` - the migration failed.
*
* Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive).
*/
"migrations/get-status-for-authenticated-user": {
parameters: {
path: {
migration_id: components["parameters"]["migration_id"];
};
query: {
exclude?: string[];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["migration"];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects:
*
* * attachments
* * bases
* * commit\_comments
* * issue\_comments
* * issue\_events
* * issues
* * milestones
* * organizations
* * projects
* * protected\_branches
* * pull\_request\_reviews
* * pull\_requests
* * releases
* * repositories
* * review\_comments
* * schema
* * users
*
* The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data.
*/
"migrations/get-archive-for-authenticated-user": {
parameters: {
path: {
migration_id: components["parameters"]["migration_id"];
};
};
responses: {
/**
* response
*/
"302": never;
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted.
*/
"migrations/delete-archive-for-authenticated-user": {
parameters: {
path: {
migration_id: components["parameters"]["migration_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked.
*/
"migrations/unlock-repo-for-authenticated-user": {
parameters: {
path: {
migration_id: components["parameters"]["migration_id"];
repo_name: components["parameters"]["repo_name"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Lists all the repositories for this user migration.
*/
"migrations/list-repos-for-user": {
parameters: {
path: {
migration_id: components["parameters"]["migration_id"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
"404": unknown;
};
};
/**
* List organizations for the authenticated user.
*
* **OAuth scope requirements**
*
* This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response.
*/
"orgs/list-for-authenticated-user": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-simple"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
"projects/create-for-authenticated-user": {
parameters: {};
requestBody: {
"application/json": {
/**
* Name of the project
*/
name: string;
/**
* Body of the project
*/
body?: string | null;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["project"];
};
"304": never;
"401": unknown;
"403": unknown;
"415": unknown;
"422": unknown;
};
};
/**
* Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope.
*/
"users/list-public-emails-for-authenticated": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["email"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access.
*
* The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership.
*/
"repos/list-for-authenticated-user": {
parameters: {
query: {
/**
* Can be one of `all`, `public`, or `private`.
*/
visibility?: "all" | "public" | "private";
/**
* Comma-separated list of values. Can include:
* \* `owner`: Repositories that are owned by the authenticated user.
* \* `collaborator`: Repositories that the user has been added to as a collaborator.
* \* `organization_member`: Repositories that the user has access to through being a member of an organization. This includes every repository on every team that the user is on.
*/
affiliation?: string;
/**
* Can be one of `all`, `owner`, `public`, `private`, `member`. Default: `all`
*
* Will cause a `422` error if used in the same request as **visibility** or **affiliation**. Will cause a `422` error if used in the same request as **visibility** or **affiliation**.
*/
type?: "all" | "owner" | "public" | "private" | "member";
/**
* Can be one of `created`, `updated`, `pushed`, `full_name`.
*/
sort?: "created" | "updated" | "pushed" | "full_name";
/**
* Can be one of `asc` or `desc`. Default: `asc` when using `full_name`, otherwise `desc`
*/
direction?: "asc" | "desc";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
since?: components["parameters"]["since"];
before?: components["parameters"]["before"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["repository"][];
};
"304": never;
"401": unknown;
"403": unknown;
/**
* Response definition missing
*/
"418": unknown;
"422": unknown;
};
};
/**
* Creates a new repository for the authenticated user.
*
* **OAuth scope requirements**
*
* When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include:
*
* * `public_repo` scope or `repo` scope to create a public repository
* * `repo` scope to create a private repository
*/
"repos/create-for-authenticated-user": {
parameters: {};
requestBody: {
"application/json": {
/**
* The name of the repository.
*/
name: string;
/**
* A short description of the repository.
*/
description?: string;
/**
* A URL with more information about the repository.
*/
homepage?: string;
/**
* Whether the repository is private or public.
*/
private?: boolean;
/**
* Whether issues are enabled.
*/
has_issues?: boolean;
/**
* Whether projects are enabled.
*/
has_projects?: boolean;
/**
* Whether the wiki is enabled.
*/
has_wiki?: boolean;
/**
* The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization.
*/
team_id?: number;
/**
* Whether the repository is initialized with a minimal README.
*/
auto_init?: boolean;
/**
* The desired language or platform to apply to the .gitignore.
*/
gitignore_template?: string;
/**
* The license keyword of the open source license for this repository.
*/
license_template?: string;
/**
* Whether to allow squash merges for pull requests.
*/
allow_squash_merge?: boolean;
/**
* Whether to allow merge commits for pull requests.
*/
allow_merge_commit?: boolean;
/**
* Whether to allow rebase merges for pull requests.
*/
allow_rebase_merge?: boolean;
/**
* Whether to delete head branches when pull requests are merged
*/
delete_branch_on_merge?: boolean;
/**
* Whether downloads are enabled.
*/
has_downloads?: boolean;
/**
* Whether this repository acts as a template that can be used to generate new repositories.
*/
is_template?: boolean;
};
};
responses: {
/**
* response
*/
"201": {
"application/json": components["schemas"]["repository"];
};
"304": never;
"400": unknown;
"401": unknown;
"403": unknown;
"404": unknown;
"422": unknown;
};
};
/**
* When authenticating as a user, this endpoint will list all currently open repository invitations for that user.
*/
"repos/list-invitations-for-authenticated-user": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["repository-invitation"][];
};
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
"repos/accept-invitation": {
parameters: {
path: {
invitation_id: components["parameters"]["invitation_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
"409": unknown;
};
};
"repos/decline-invitation": {
parameters: {
path: {
invitation_id: components["parameters"]["invitation_id"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"403": unknown;
"404": unknown;
"409": unknown;
};
};
/**
* Lists repositories the authenticated user has starred.
*
* You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header:
*/
"activity/list-repos-starred-by-authenticated-user": {
parameters: {
query: {
sort?: components["parameters"]["sort"];
direction?: components["parameters"]["direction"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["repository"][];
"application/vnd.github.v3.star+json": components["schemas"]["starred-repository"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
"activity/check-repo-is-starred-by-authenticated-user": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Response if this repository is starred by you
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
/**
* Response if this repository is not starred by you
*/
"404": {
"application/json": components["schemas"]["basic-error"];
};
};
};
/**
* Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)."
*/
"activity/star-repo-for-authenticated-user": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
"activity/unstar-repo-for-authenticated-user": {
parameters: {
path: {
owner: components["parameters"]["owner"];
repo: components["parameters"]["repo"];
};
};
responses: {
/**
* Empty response
*/
"204": never;
"304": never;
"401": unknown;
"403": unknown;
"404": unknown;
};
};
/**
* Lists repositories the authenticated user is watching.
*/
"activity/list-watched-repos-for-authenticated-user": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
"304": never;
"401": unknown;
"403": unknown;
};
};
/**
* List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/).
*/
"teams/list-for-authenticated-user": {
parameters: {
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["team-full"][];
};
"304": never;
"403": unknown;
"404": unknown;
};
};
/**
* Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts.
*
* Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users.
*/
"users/list": {
parameters: {
query: {
since?: components["parameters"]["since-user"];
per_page?: components["parameters"]["per_page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
"304": never;
};
};
/**
* Provides publicly available information about someone with a GitHub account.
*
* GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below"
*
* The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication).
*
* The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)".
*/
"users/get-by-username": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json":
| components["schemas"]["private-user"]
| components["schemas"]["public-user"];
};
"404": unknown;
};
};
/**
* If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events.
*/
"activity/list-events-for-authenticated-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
};
};
/**
* This is the user's organization dashboard. You must be authenticated as the user to view this.
*/
"activity/list-org-events-for-authenticated-user": {
parameters: {
path: {
username: components["parameters"]["username"];
org: components["parameters"]["org"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
};
};
"activity/list-public-events-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
};
};
/**
* Lists the people following the specified user.
*/
"users/list-followers-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
};
};
/**
* Lists the people who the specified user follows.
*/
"users/list-following-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["simple-user"][];
};
};
};
"users/check-following-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
target_user: string;
};
};
responses: {
/**
* Response if the user follows the target user
*/
"204": never;
/**
* Response if the user does not follow the target user
*/
"404": unknown;
};
};
/**
* Lists public gists for the specified user:
*/
"gists/list-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
since?: components["parameters"]["since"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["base-gist"][];
};
"422": unknown;
};
};
/**
* Lists the GPG keys for a user. This information is accessible by anyone.
*/
"users/list-gpg-keys-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["gpg-key"][];
};
};
};
/**
* Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations.
*
* The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this:
*
* ```shell
* curl -u username:token
* https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192
* ```
*/
"users/get-context-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
/**
* Identifies which additional information you'd like to receive about the person's hovercard. Can be `organization`, `repository`, `issue`, `pull_request`. **Required** when using `subject_id`.
*/
subject_type?: "organization" | "repository" | "issue" | "pull_request";
/**
* Uses the ID for the `subject_type` you specified. **Required** when using `subject_type`.
*/
subject_id?: string;
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["hovercard"];
};
"404": unknown;
"422": unknown;
};
};
/**
* Enables an authenticated GitHub App to find the user’s installation information.
*
* You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint.
*/
"apps/get-user-installation": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["installation"];
};
};
};
/**
* Lists the _verified_ public SSH keys for a user. This is accessible by anyone.
*/
"users/list-public-keys-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["key-simple"][];
};
};
};
/**
* List [public organization memberships](https://help.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user.
*
* This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead.
*/
"orgs/list-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["organization-simple"][];
};
};
};
"projects/list-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
/**
* Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`.
*/
state?: "open" | "closed" | "all";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["project"][];
};
"415": unknown;
"422": unknown;
};
};
/**
* These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events.
*/
"activity/list-received-events-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
};
};
"activity/list-received-public-events-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["event"][];
};
};
};
/**
* Lists public repositories for the specified user.
*/
"repos/list-for-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
/**
* Can be one of `all`, `owner`, `member`.
*/
type?: "all" | "owner" | "member";
/**
* Can be one of `created`, `updated`, `pushed`, `full_name`.
*/
sort?: "created" | "updated" | "pushed" | "full_name";
/**
* Can be one of `asc` or `desc`. Default: `asc` when using `full_name`, otherwise `desc`
*/
direction?: "asc" | "desc";
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
};
};
/**
* Gets the summary of the free and paid GitHub Actions minutes used.
*
* Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)".
*
* Access tokens must have the `user` scope.
*/
"billing/get-github-actions-billing-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["actions-billing-usage"];
};
};
};
/**
* Gets the free and paid storage used for GitHub Packages in gigabytes.
*
* Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)."
*
* Access tokens must have the `user` scope.
*/
"billing/get-github-packages-billing-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["packages-billing-usage"];
};
};
};
/**
* Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages.
*
* Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)."
*
* Access tokens must have the `user` scope.
*/
"billing/get-shared-storage-billing-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["combined-billing-usage"];
};
};
};
/**
* Lists repositories a user has starred.
*
* You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header:
*/
"activity/list-repos-starred-by-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
sort?: components["parameters"]["sort"];
direction?: components["parameters"]["direction"];
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["repository"][];
"application/vnd.github.v3.star+json": components["schemas"]["starred-repository"][];
};
};
};
/**
* Lists repositories a user is watching.
*/
"activity/list-repos-watched-by-user": {
parameters: {
path: {
username: components["parameters"]["username"];
};
query: {
per_page?: components["parameters"]["per_page"];
page?: components["parameters"]["page"];
};
};
responses: {
/**
* response
*/
"200": {
"application/json": components["schemas"]["minimal-repository"][];
};
};
};
/**
* Get a random sentence from the Zen of GitHub
*/
"meta/get-zen": {
responses: {
/**
* response
*/
"200": {
"text/plain": string;
};
};
};
}
export interface components {
parameters: {
/**
* Results per page (max 100)
*/
per_page: number;
/**
* Page number of the results to fetch.
*/
page: number;
/**
* Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
since: string;
/**
* installation_id parameter
*/
installation_id: number;
/**
* grant_id parameter
*/
grant_id: number;
"client-id": string;
"access-token": string;
app_slug: string;
/**
* authorization_id parameter
*/
authorization_id: number;
/**
* The slug version of the enterprise name. You can also substitute this value with the enterprise id.
*/
enterprise: string;
/**
* Unique identifier of an organization.
*/
org_id: number;
/**
* Unique identifier of the self-hosted runner group.
*/
runner_group_id: number;
/**
* Unique identifier of the self-hosted runner.
*/
runner_id: number;
/**
* gist_id parameter
*/
gist_id: string;
/**
* comment_id parameter
*/
comment_id: number;
/**
* A list of comma separated label names. Example: `bug,ui,@high`
*/
labels: string;
/**
* One of `asc` (ascending) or `desc` (descending).
*/
direction: "asc" | "desc";
/**
* account_id parameter
*/
account_id: number;
/**
* plan_id parameter
*/
plan_id: number;
/**
* One of `created` (when the repository was starred) or `updated` (when it was last pushed to).
*/
sort: "created" | "updated";
owner: string;
repo: string;
/**
* If `true`, show notifications marked as read.
*/
all: boolean;
/**
* If `true`, only shows notifications in which the user is directly participating or mentioned.
*/
participating: boolean;
/**
* Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
before: string;
/**
* thread_id parameter
*/
thread_id: number;
/**
* An organization ID. Only return organizations with an ID greater than this ID.
*/
"since-org": number;
org: string;
repository_id: number;
/**
* secret_name parameter
*/
secret_name: string;
username: string;
"hook-id": number;
/**
* invitation_id parameter
*/
invitation_id: number;
/**
* migration_id parameter
*/
migration_id: number;
/**
* repo_name parameter
*/
repo_name: string;
/**
* team_slug parameter
*/
team_slug: string;
"discussion-number": number;
"comment-number": number;
"reaction-id": number;
"project-id": number;
/**
* card_id parameter
*/
card_id: number;
/**
* column_id parameter
*/
column_id: number;
/**
* artifact_id parameter
*/
artifact_id: number;
/**
* job_id parameter
*/
job_id: number;
/**
* Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run.
*/
actor: string;
/**
* Returns workflow runs associated with a branch. Use the name of the branch of the `push`.
*/
"workflow-run-branch": string;
/**
* Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)."
*/
event: string;
/**
* Returns workflow runs associated with the check run `status` or `conclusion` you specify. For example, a conclusion can be `success` or a status can be `completed`. For more information, see the `status` and `conclusion` options available in "[Create a check run](https://docs.github.com/rest/reference/checks#create-a-check-run)."
*/
"workflow-run-status": "completed" | "status" | "conclusion";
"run-id": number;
/**
* The ID of the workflow. You can also pass the workflow file name as a string.
*/
"workflow-id": number | string;
/**
* branch+ parameter
*/
branch: string;
/**
* check_run_id parameter
*/
check_run_id: number;
/**
* check_suite_id parameter
*/
check_suite_id: number;
/**
* Returns check runs with the specified `name`.
*/
check_name: string;
/**
* Returns check runs with the specified `status`. Can be one of `queued`, `in_progress`, or `completed`.
*/
status: "queued" | "in_progress" | "completed";
/**
* The security alert number, found at the end of the security alert's URL.
*/
alert_number: components["schemas"]["alert-number"];
/**
* commit_sha+ parameter
*/
commit_sha: string;
/**
* deployment_id parameter
*/
deployment_id: number;
/**
* A user ID. Only return users with an ID greater than this ID.
*/
"since-user": number;
/**
* issue_number parameter
*/
issue_number: number;
/**
* key_id parameter
*/
key_id: number;
/**
* milestone_number parameter
*/
milestone_number: number;
"pull-number": number;
/**
* review_id parameter
*/
review_id: number;
/**
* asset_id parameter
*/
asset_id: number;
/**
* release_id parameter
*/
release_id: number;
/**
* Must be one of: `day`, `week`.
*/
per: "day" | "week";
/**
* A repository ID. Only return repositories with an ID greater than this ID.
*/
"since-repo": number;
/**
* Used for pagination: the index of the first result to return.
*/
start_index: number;
/**
* Used for pagination: the number of results to return.
*/
count: number;
/**
* Identifier generated by the GitHub SCIM endpoint.
*/
scim_group_id: string;
/**
* scim_user_id parameter
*/
scim_user_id: string;
/**
* Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`.
*/
order: "desc" | "asc";
"team-id": number;
/**
* gpg_key_id parameter
*/
gpg_key_id: number;
};
schemas: {
/**
* Simple User
*/
"simple-user": {
login: string;
id: number;
node_id: string;
avatar_url: string;
gravatar_id: string | null;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
starred_at?: string;
} | null;
/**
* GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub.
*/
integration: {
/**
* Unique identifier of the GitHub app
*/
id: number;
/**
* The slug name of the GitHub app
*/
slug?: string;
node_id: string;
owner: components["schemas"]["simple-user"] | null;
/**
* The name of the GitHub app
*/
name: string;
description: string | null;
external_url: string;
html_url: string;
created_at: string;
updated_at: string;
/**
* The set of permissions for the GitHub app
*/
permissions: {
issues?: string;
checks?: string;
metadata?: string;
contents?: string;
deployments?: string;
} & { [key: string]: string };
/**
* The list of events for the GitHub app
*/
events: string[];
/**
* The number of installations associated with the GitHub app
*/
installations_count?: number;
client_id?: string;
client_secret?: string;
webhook_secret?: string;
pem?: string;
} & { [key: string]: any };
/**
* Basic Error
*/
"basic-error": { message?: string; documentation_url?: string };
/**
* Validation Error Simple
*/
"validation-error-simple": {
message: string;
documentation_url: string;
errors?: string[];
};
/**
* The URL to which the payloads will be delivered.
*/
"webhook-config-url": string;
/**
* The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`.
*/
"webhook-config-content-type": string;
/**
* If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers).
*/
"webhook-config-secret": string;
/**
* Determines whether the SSL certificate of the host for `url` will be verified when delivering payloads. Supported values include `0` (verification is performed) and `1` (verification is not performed). The default is `0`. **We strongly recommend not setting this to `1` as you are subject to man-in-the-middle and other attacks.**
*/
"webhook-config-insecure-ssl": string;
/**
* Configuration object of the webhook
*/
"webhook-config": {
url?: components["schemas"]["webhook-config-url"];
content_type?: components["schemas"]["webhook-config-content-type"];
secret?: components["schemas"]["webhook-config-secret"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
};
/**
* An enterprise account
*/
enterprise: {
/**
* A short description of the enterprise.
*/
description?: string | null;
html_url: string;
/**
* The enterprise's website URL.
*/
website_url?: string | null;
/**
* Unique identifier of the enterprise
*/
id: number;
node_id: string;
/**
* The name of the enterprise.
*/
name: string;
/**
* The slug url identifier for the enterprise.
*/
slug: string;
created_at: string | null;
updated_at: string | null;
avatar_url: string;
};
/**
* Installation
*/
installation: {
/**
* The ID of the installation.
*/
id: number;
account:
| (Partial<components["schemas"]["simple-user"]> &
Partial<components["schemas"]["enterprise"]>)
| null;
/**
* Describe whether all repositories have been selected or there's a selection involved
*/
repository_selection: "all" | "selected";
access_tokens_url: string;
repositories_url: string;
html_url: string;
app_id: number;
/**
* The ID of the user or organization this token is being scoped to.
*/
target_id: number;
target_type: string;
permissions: {
deployments?: string;
checks?: string;
metadata?: string;
contents?: string;
pull_requests?: string;
statuses?: string;
issues?: string;
organization_administration?: string;
};
events: string[];
created_at: string;
updated_at: string;
single_file_name: string | null;
has_multiple_single_files?: boolean;
single_file_paths?: string[];
app_slug: string;
suspended_by?: components["schemas"]["simple-user"] | null;
suspended_at?: string | null;
contact_email?: string | null;
};
/**
* License Simple
*/
"license-simple": {
key: string;
name: string;
url: string | null;
spdx_id: string | null;
node_id: string;
html_url?: string;
};
/**
* A git repository
*/
repository: {
/**
* Unique identifier of the repository
*/
id: number;
node_id: string;
/**
* The name of the repository.
*/
name: string;
full_name: string;
license: components["schemas"]["license-simple"] | null;
forks: number;
permissions?: {
admin: boolean;
pull: boolean;
triage?: boolean;
push: boolean;
maintain?: boolean;
};
owner: components["schemas"]["simple-user"] | null;
/**
* Whether the repository is private or public.
*/
private: boolean;
html_url: string;
description: string | null;
fork: boolean;
url: string;
archive_url: string;
assignees_url: string;
blobs_url: string;
branches_url: string;
collaborators_url: string;
comments_url: string;
commits_url: string;
compare_url: string;
contents_url: string;
contributors_url: string;
deployments_url: string;
downloads_url: string;
events_url: string;
forks_url: string;
git_commits_url: string;
git_refs_url: string;
git_tags_url: string;
git_url: string;
issue_comment_url: string;
issue_events_url: string;
issues_url: string;
keys_url: string;
labels_url: string;
languages_url: string;
merges_url: string;
milestones_url: string;
notifications_url: string;
pulls_url: string;
releases_url: string;
ssh_url: string;
stargazers_url: string;
statuses_url: string;
subscribers_url: string;
subscription_url: string;
tags_url: string;
teams_url: string;
trees_url: string;
clone_url: string;
mirror_url: string | null;
hooks_url: string;
svn_url: string;
homepage: string | null;
language: string | null;
forks_count: number;
stargazers_count: number;
watchers_count: number;
size: number;
/**
* The default branch of the repository.
*/
default_branch: string;
open_issues_count: number;
/**
* Whether this repository acts as a template that can be used to generate new repositories.
*/
is_template?: boolean;
topics?: string[];
/**
* Whether issues are enabled.
*/
has_issues: boolean;
/**
* Whether projects are enabled.
*/
has_projects: boolean;
/**
* Whether the wiki is enabled.
*/
has_wiki: boolean;
has_pages: boolean;
/**
* Whether downloads are enabled.
*/
has_downloads: boolean;
/**
* Whether the repository is archived.
*/
archived: boolean;
/**
* Returns whether or not this repository disabled.
*/
disabled: boolean;
/**
* The repository visibility: public, private, or internal.
*/
visibility?: string;
pushed_at: string | null;
created_at: string | null;
updated_at: string | null;
/**
* Whether to allow rebase merges for pull requests.
*/
allow_rebase_merge?: boolean;
template_repository?: {
id?: number;
node_id?: string;
name?: string;
full_name?: string;
owner?: {
login?: string;
id?: number;
node_id?: string;
avatar_url?: string;
gravatar_id?: string;
url?: string;
html_url?: string;
followers_url?: string;
following_url?: string;
gists_url?: string;
starred_url?: string;
subscriptions_url?: string;
organizations_url?: string;
repos_url?: string;
events_url?: string;
received_events_url?: string;
type?: string;
site_admin?: boolean;
};
private?: boolean;
html_url?: string;
description?: string;
fork?: boolean;
url?: string;
archive_url?: string;
assignees_url?: string;
blobs_url?: string;
branches_url?: string;
collaborators_url?: string;
comments_url?: string;
commits_url?: string;
compare_url?: string;
contents_url?: string;
contributors_url?: string;
deployments_url?: string;
downloads_url?: string;
events_url?: string;
forks_url?: string;
git_commits_url?: string;
git_refs_url?: string;
git_tags_url?: string;
git_url?: string;
issue_comment_url?: string;
issue_events_url?: string;
issues_url?: string;
keys_url?: string;
labels_url?: string;
languages_url?: string;
merges_url?: string;
milestones_url?: string;
notifications_url?: string;
pulls_url?: string;
releases_url?: string;
ssh_url?: string;
stargazers_url?: string;
statuses_url?: string;
subscribers_url?: string;
subscription_url?: string;
tags_url?: string;
teams_url?: string;
trees_url?: string;
clone_url?: string;
mirror_url?: string;
hooks_url?: string;
svn_url?: string;
homepage?: string;
language?: string;
forks_count?: number;
stargazers_count?: number;
watchers_count?: number;
size?: number;
default_branch?: string;
open_issues_count?: number;
is_template?: boolean;
topics?: string[];
has_issues?: boolean;
has_projects?: boolean;
has_wiki?: boolean;
has_pages?: boolean;
has_downloads?: boolean;
archived?: boolean;
disabled?: boolean;
visibility?: string;
pushed_at?: string;
created_at?: string;
updated_at?: string;
permissions?: { admin?: boolean; push?: boolean; pull?: boolean };
allow_rebase_merge?: boolean;
template_repository?: string;
temp_clone_token?: string;
allow_squash_merge?: boolean;
delete_branch_on_merge?: boolean;
allow_merge_commit?: boolean;
subscribers_count?: number;
network_count?: number;
} | null;
temp_clone_token?: string;
/**
* Whether to allow squash merges for pull requests.
*/
allow_squash_merge?: boolean;
/**
* Whether to delete head branches when pull requests are merged
*/
delete_branch_on_merge?: boolean;
/**
* Whether to allow merge commits for pull requests.
*/
allow_merge_commit?: boolean;
subscribers_count?: number;
network_count?: number;
open_issues: number;
watchers: number;
master_branch?: string;
starred_at?: string;
};
/**
* Authentication token for a GitHub App installed on a user or org.
*/
"installation-token": {
token?: string;
expires_at?: string;
permissions?: {
issues?: string;
contents?: string;
metadata?: string;
single_file?: string;
};
repository_selection?: "all" | "selected";
repositories?: components["schemas"]["repository"][];
single_file?: string;
has_multiple_single_files?: boolean;
single_file_paths?: string[];
};
/**
* Validation Error
*/
"validation-error": {
message: string;
documentation_url: string;
errors?: {
resource?: string;
field?: string;
message?: string;
code: string;
index?: number;
value?: string | number | string[];
}[];
};
/**
* The authorization associated with an OAuth Access.
*/
"application-grant": {
id: number;
url: string;
app: { client_id: string; name: string; url: string };
created_at: string;
updated_at: string;
scopes: string[];
user?: components["schemas"]["simple-user"] | null;
};
"scoped-installation": {
permissions: { [key: string]: any };
/**
* Describe whether all repositories have been selected or there's a selection involved
*/
repository_selection: "all" | "selected";
single_file_name: string | null;
has_multiple_single_files?: boolean;
single_file_paths?: string[];
repositories_url: string;
account: components["schemas"]["simple-user"];
};
/**
* The authorization for an OAuth app, GitHub App, or a Personal Access Token.
*/
authorization: {
id: number;
url: string;
/**
* A list of scopes that this authorization is in.
*/
scopes: string[] | null;
token: string;
token_last_eight: string | null;
hashed_token: string | null;
app: { client_id: string; name: string; url: string };
note: string | null;
note_url: string | null;
updated_at: string;
created_at: string;
fingerprint: string | null;
user?: components["schemas"]["simple-user"] | null;
installation?: components["schemas"]["scoped-installation"] | null;
};
/**
* Code Of Conduct
*/
"code-of-conduct": {
key: string;
name: string;
url: string;
body?: string;
html_url: string | null;
};
/**
* Content Reference attachments allow you to provide context around URLs posted in comments
*/
"content-reference-attachment": {
/**
* The ID of the attachment
*/
id: number;
/**
* The title of the attachment
*/
title: string;
/**
* The body of the attachment
*/
body: string;
/**
* The node_id of the content attachment
*/
node_id?: string;
};
/**
* The policy that controls the organizations in the enterprise that are allowed to run GitHub Actions. Can be one of: `all`, `none`, or `selected`.
*/
"enabled-organizations": "all" | "none" | "selected";
/**
* The permissions policy that controls the actions that are allowed to run. Can be one of: `all`, `local_only`, or `selected`.
*/
"allowed-actions": "all" | "local_only" | "selected";
/**
* The API URL to use to get or set the actions that are allowed to run, when `allowed_actions` is set to `selected`.
*/
"selected-actions-url": string;
"actions-enterprise-permissions": {
enabled_organizations?: components["schemas"]["enabled-organizations"];
/**
* The API URL to use to get or set the selected organizations that are allowed to run GitHub Actions, when `enabled_organizations` is set to `selected`.
*/
selected_organizations_url?: string;
allowed_actions?: components["schemas"]["allowed-actions"];
selected_actions_url?: components["schemas"]["selected-actions-url"];
};
/**
* Organization Simple
*/
"organization-simple": {
login: string;
id: number;
node_id: string;
url: string;
repos_url: string;
events_url: string;
hooks_url: string;
issues_url: string;
members_url: string;
public_members_url: string;
avatar_url: string;
description: string | null;
};
"selected-actions": {
/**
* Whether GitHub-owned actions are allowed. For example, this includes the actions in the `actions` organization.
*/
github_owned_allowed?: boolean;
/**
* Whether actions in GitHub Marketplace from verified creators are allowed. Set to `true` to allow all GitHub Marketplace actions by verified creators.
*/
verified_allowed?: boolean;
/**
* Specifies a list of string-matching patterns to allow specific action(s). Wildcards, tags, and SHAs are allowed. For example, `monalisa/octocat@*`, `monalisa/octocat@v2`, `monalisa/*`."
*/
patterns_allowed?: string[];
};
"runner-groups-enterprise": {
id?: number;
name?: string;
visibility?: string;
default?: boolean;
selected_organizations_url?: string;
runners_url?: string;
};
/**
* A self hosted runner
*/
runner: {
/**
* The id of the runner.
*/
id: number;
/**
* The name of the runner.
*/
name: string;
/**
* The Operating System of the runner.
*/
os: string;
/**
* The status of the runner.
*/
status: string;
busy: boolean;
labels: {
/**
* Unique identifier of the label.
*/
id?: number;
/**
* Name of the label.
*/
name?: string;
/**
* The type of label. Read-only labels are applied automatically when the runner is configured.
*/
type?: "read-only" | "custom";
}[];
};
/**
* Runner Application
*/
"runner-application": {
os?: string;
architecture?: string;
download_url?: string;
filename?: string;
};
/**
* Authentication Token
*/
"authentication-token": {
/**
* The token used for authentication
*/
token: string;
/**
* The time this token expires
*/
expires_at: string;
permissions?: { [key: string]: any };
/**
* The repositories this token has access to
*/
repositories?: components["schemas"]["repository"][];
single_file?: string | null;
/**
* Describe whether all repositories have been selected or there's a selection involved
*/
repository_selection?: "all" | "selected";
};
"actions-billing-usage": {
/**
* The sum of the free and paid GitHub Actions minutes used.
*/
total_minutes_used?: number;
/**
* The total paid GitHub Actions minutes used.
*/
total_paid_minutes_used?: number;
/**
* The amount of free GitHub Actions minutes available.
*/
included_minutes?: number;
minutes_used_breakdown?: {
/**
* Total minutes used on Ubuntu runner machines.
*/
UBUNTU?: number;
/**
* Total minutes used on macOS runner machines.
*/
MACOS?: number;
/**
* Total minutes used on Windows runner machines.
*/
WINDOWS?: number;
};
};
"packages-billing-usage": {
/**
* Sum of the free and paid storage space (GB) for GitHuub Packages.
*/
total_gigabytes_bandwidth_used?: number;
/**
* Total paid storage space (GB) for GitHuub Packages.
*/
total_paid_gigabytes_bandwidth_used?: number;
/**
* Free storage space (GB) for GitHub Packages.
*/
included_gigabytes_bandwidth?: number;
};
"combined-billing-usage": {
/**
* Numbers of days left in billing cycle.
*/
days_left_in_billing_cycle?: number;
/**
* Estimated storage space (GB) used in billing cycle.
*/
estimated_paid_storage_for_month?: number;
/**
* Estimated sum of free and paid storage space (GB) used in billing cycle.
*/
estimated_storage_for_month?: number;
};
/**
* Actor
*/
actor: {
id: number;
login: string;
display_login?: string;
gravatar_id: string | null;
url: string;
avatar_url: string;
};
/**
* A collection of related issues and pull requests.
*/
milestone: {
url: string;
html_url: string;
labels_url: string;
id: number;
node_id: string;
/**
* The number of the milestone.
*/
number: number;
/**
* The state of the milestone.
*/
state: "open" | "closed";
/**
* The title of the milestone.
*/
title: string;
description: string | null;
creator: components["schemas"]["simple-user"] | null;
open_issues: number;
closed_issues: number;
created_at: string;
updated_at: string;
closed_at: string | null;
due_on: string | null;
};
/**
* Issue Simple
*/
"issue-simple": {
id: number;
node_id: string;
url: string;
repository_url: string;
labels_url: string;
comments_url: string;
events_url: string;
html_url: string;
number: number;
state: string;
title: string;
body?: string;
user: components["schemas"]["simple-user"] | null;
labels: {
id?: number;
node_id?: string;
url?: string;
name?: string;
description?: string | null;
color?: string;
default?: boolean;
}[];
assignee: components["schemas"]["simple-user"] | null;
assignees?: components["schemas"]["simple-user"][] | null;
milestone: components["schemas"]["milestone"] | null;
locked: boolean;
active_lock_reason?: string | null;
comments: number;
pull_request?: {
merged_at?: string | null;
diff_url: string | null;
html_url: string | null;
patch_url: string | null;
url: string | null;
};
closed_at: string | null;
created_at: string;
updated_at: string;
author_association: string;
body_html?: string;
body_text?: string;
timeline_url?: string;
repository?: components["schemas"]["repository"];
performed_via_github_app?: components["schemas"]["integration"] | null;
};
"reaction-rollup": {
url: string;
total_count: number;
"+1": number;
"-1": number;
laugh: number;
confused: number;
heart: number;
hooray: number;
eyes: number;
rocket: number;
};
/**
* Comments provide a way for people to collaborate on an issue.
*/
"issue-comment": {
/**
* Unique identifier of the issue comment
*/
id: number;
node_id: string;
/**
* URL for the issue comment
*/
url: string;
/**
* Contents of the issue comment
*/
body?: string;
body_text?: string;
body_html?: string;
html_url: string;
user: components["schemas"]["simple-user"] | null;
created_at: string;
updated_at: string;
issue_url: string;
author_association:
| "collaborator"
| "contributor"
| "first_timer"
| "first_time_contributor"
| "mannequin"
| "member"
| "none"
| "owner";
performed_via_github_app?: components["schemas"]["integration"] | null;
reactions?: components["schemas"]["reaction-rollup"];
};
/**
* Event
*/
event: {
id: string;
type: string | null;
actor: components["schemas"]["actor"];
repo: { id: number; name: string; url: string };
org?: components["schemas"]["actor"];
payload: {
action: string;
issue?: components["schemas"]["issue-simple"];
comment?: components["schemas"]["issue-comment"];
pages?: {
page_name?: string;
title?: string;
summary?: string | null;
action?: string;
sha?: string;
html_url?: string;
}[];
};
public: boolean;
created_at: string | null;
};
/**
* Hypermedia Link with Type
*/
"link-with-type": { href: string; type: string };
/**
* Feed
*/
feed: {
timeline_url: string;
user_url: string;
current_user_public_url?: string;
current_user_url?: string;
current_user_actor_url?: string;
current_user_organization_url?: string;
current_user_organization_urls?: string[];
security_advisories_url?: string;
_links: {
timeline: components["schemas"]["link-with-type"];
user: components["schemas"]["link-with-type"];
security_advisories?: components["schemas"]["link-with-type"];
current_user?: components["schemas"]["link-with-type"];
current_user_public?: components["schemas"]["link-with-type"];
current_user_actor?: components["schemas"]["link-with-type"];
current_user_organization?: components["schemas"]["link-with-type"];
current_user_organizations?: components["schemas"]["link-with-type"][];
};
};
/**
* Base Gist
*/
"base-gist": {
url: string;
forks_url: string;
commits_url: string;
id: string;
node_id: string;
git_pull_url: string;
git_push_url: string;
html_url: string;
files: {
[key: string]: {
filename?: string;
type?: string;
language?: string;
raw_url?: string;
size?: number;
};
};
public: boolean;
created_at: string;
updated_at: string;
description: string | null;
comments: number;
user: components["schemas"]["simple-user"] | null;
comments_url: string;
owner?: components["schemas"]["simple-user"] | null;
truncated?: boolean;
forks?: { [key: string]: any }[];
history?: { [key: string]: any }[];
};
/**
* Gist Simple
*/
"gist-simple": {
url?: string;
forks_url?: string;
commits_url?: string;
id?: string;
node_id?: string;
git_pull_url?: string;
git_push_url?: string;
html_url?: string;
files?: {
[key: string]: {
filename?: string;
type?: string;
language?: string;
raw_url?: string;
size?: number;
truncated?: boolean;
content?: string;
};
};
public?: boolean;
created_at?: string;
updated_at?: string;
description?: string | null;
comments?: number;
user?: string | null;
comments_url?: string;
owner?: components["schemas"]["simple-user"];
truncated?: boolean;
};
/**
* Gist Full
*/
"gist-full": components["schemas"]["gist-simple"] & {
forks?: {
user?: {
login?: string;
id?: number;
node_id?: string;
avatar_url?: string;
gravatar_id?: string;
url?: string;
html_url?: string;
followers_url?: string;
following_url?: string;
gists_url?: string;
starred_url?: string;
subscriptions_url?: string;
organizations_url?: string;
repos_url?: string;
events_url?: string;
received_events_url?: string;
type?: string;
site_admin?: boolean;
};
url?: string;
id?: string;
created_at?: string;
updated_at?: string;
}[];
history?: {
url?: string;
version?: string;
user?: {
login?: string;
id?: number;
node_id?: string;
avatar_url?: string;
gravatar_id?: string;
url?: string;
html_url?: string;
followers_url?: string;
following_url?: string;
gists_url?: string;
starred_url?: string;
subscriptions_url?: string;
organizations_url?: string;
repos_url?: string;
events_url?: string;
received_events_url?: string;
type?: string;
site_admin?: boolean;
} | null;
change_status?: {
deletions?: number;
additions?: number;
total?: number;
};
committed_at?: string;
}[];
fork_of?: components["schemas"]["gist-simple"] | null;
url?: string;
forks_url?: string;
commits_url?: string;
id?: string;
node_id?: string;
git_pull_url?: string;
git_push_url?: string;
html_url?: string;
created_at?: string;
updated_at?: string;
description?: string | null;
comments?: number;
comments_url?: string;
};
/**
* A comment made to a gist.
*/
"gist-comment": {
id: number;
node_id: string;
url: string;
/**
* The comment text.
*/
body: string;
user: components["schemas"]["simple-user"] | null;
created_at: string;
updated_at: string;
author_association: string;
};
/**
* Gist Commit
*/
"gist-commit": {
url: string;
version: string;
user: components["schemas"]["simple-user"] | null;
change_status: { total?: number; additions?: number; deletions?: number };
committed_at: string;
};
/**
* Gitignore Template
*/
"gitignore-template": { name: string; source: string };
/**
* Issues are a great way to keep track of tasks, enhancements, and bugs for your projects.
*/
issue: {
id: number;
node_id: string;
/**
* URL for the issue
*/
url: string;
repository_url: string;
labels_url: string;
comments_url: string;
events_url: string;
html_url: string;
/**
* Number uniquely identifying the issue within its repository
*/
number: number;
/**
* State of the issue; either 'open' or 'closed'
*/
state: string;
/**
* Title of the issue
*/
title: string;
/**
* Contents of the issue
*/
body?: string;
user: components["schemas"]["simple-user"] | null;
/**
* Labels to associate with this issue; pass one or more label names to replace the set of labels on this issue; send an empty array to clear all labels from the issue; note that the labels are silently dropped for users without push access to the repository
*/
labels: (
| string
| {
id?: number;
node_id?: string;
url?: string;
name?: string;
description?: string | null;
color?: string | null;
default?: boolean;
}
)[];
assignee: components["schemas"]["simple-user"] | null;
assignees?: components["schemas"]["simple-user"][] | null;
milestone: components["schemas"]["milestone"] | null;
locked: boolean;
active_lock_reason?: string | null;
comments: number;
pull_request?: {
merged_at?: string | null;
diff_url: string | null;
html_url: string | null;
patch_url: string | null;
url: string | null;
};
closed_at: string | null;
created_at: string;
updated_at: string;
closed_by?: components["schemas"]["simple-user"] | null;
body_html?: string;
body_text?: string;
timeline_url?: string;
repository?: components["schemas"]["repository"];
performed_via_github_app?: components["schemas"]["integration"] | null;
author_association: string;
reactions?: components["schemas"]["reaction-rollup"];
};
/**
* License
*/
license: {
key: string;
name: string;
spdx_id: string | null;
url: string | null;
node_id: string;
html_url: string;
description: string;
implementation: string;
permissions: string[];
conditions: string[];
limitations: string[];
body: string;
featured: boolean;
};
/**
* Marketplace Listing Plan
*/
"marketplace-listing-plan": {
url: string;
accounts_url: string;
id: number;
number: number;
name: string;
description: string;
monthly_price_in_cents: number;
yearly_price_in_cents: number;
price_model: string;
has_free_trial: boolean;
unit_name: string | null;
state: string;
bullets: string[];
};
/**
* Marketplace Purchase
*/
"marketplace-purchase": {
url: string;
type: string;
id: number;
login: string;
organization_billing_email?: string;
marketplace_pending_change?: {
is_installed?: boolean;
effective_date?: string;
unit_count?: number | null;
id?: number;
plan?: components["schemas"]["marketplace-listing-plan"];
} | null;
marketplace_purchase: {
billing_cycle?: string;
next_billing_date?: string | null;
is_installed?: boolean;
unit_count?: number | null;
on_free_trial?: boolean;
free_trial_ends_on?: string | null;
updated_at?: string;
plan?: components["schemas"]["marketplace-listing-plan"];
};
};
/**
* Api Overview
*/
"api-overview": {
verifiable_password_authentication: boolean;
ssh_key_fingerprints?: { SHA256_RSA?: string; SHA256_DSA?: string };
hooks?: string[];
web?: string[];
api?: string[];
git?: string[];
pages?: string[];
importer?: string[];
github_services_sha?: string;
installed_version?: string;
};
/**
* Minimal Repository
*/
"minimal-repository": {
id: number;
node_id: string;
name: string;
full_name: string;
owner: components["schemas"]["simple-user"] | null;
private: boolean;
html_url: string;
description: string | null;
fork: boolean;
url: string;
archive_url: string;
assignees_url: string;
blobs_url: string;
branches_url: string;
collaborators_url: string;
comments_url: string;
commits_url: string;
compare_url: string;
contents_url: string;
contributors_url: string;
deployments_url: string;
downloads_url: string;
events_url: string;
forks_url: string;
git_commits_url: string;
git_refs_url: string;
git_tags_url: string;
git_url?: string;
issue_comment_url: string;
issue_events_url: string;
issues_url: string;
keys_url: string;
labels_url: string;
languages_url: string;
merges_url: string;
milestones_url: string;
notifications_url: string;
pulls_url: string;
releases_url: string;
ssh_url?: string;
stargazers_url: string;
statuses_url: string;
subscribers_url: string;
subscription_url: string;
tags_url: string;
teams_url: string;
trees_url: string;
clone_url?: string;
mirror_url?: string | null;
hooks_url: string;
svn_url?: string;
homepage?: string | null;
language?: string | null;
forks_count?: number;
stargazers_count?: number;
watchers_count?: number;
size?: number;
default_branch?: string;
open_issues_count?: number;
is_template?: boolean;
topics?: string[];
has_issues?: boolean;
has_projects?: boolean;
has_wiki?: boolean;
has_pages?: boolean;
has_downloads?: boolean;
archived?: boolean;
disabled?: boolean;
visibility?: string;
pushed_at?: string | null;
created_at?: string | null;
updated_at?: string | null;
permissions?: { admin?: boolean; push?: boolean; pull?: boolean };
template_repository?: string;
temp_clone_token?: string;
delete_branch_on_merge?: boolean;
subscribers_count?: number;
network_count?: number;
license?: {
key?: string;
name?: string;
spdx_id?: string;
url?: string;
node_id?: string;
} | null;
forks?: number;
open_issues?: number;
watchers?: number;
};
/**
* Thread
*/
thread: {
id?: string;
repository?: components["schemas"]["minimal-repository"];
subject?: {
title?: string;
url?: string;
latest_comment_url?: string;
type?: string;
};
reason?: string;
unread?: boolean;
updated_at?: string;
last_read_at?: string | null;
url?: string;
subscription_url?: string;
};
/**
* Thread Subscription
*/
"thread-subscription": {
subscribed: boolean;
ignored: boolean;
reason: string | null;
created_at: string | null;
url: string;
thread_url?: string;
repository_url?: string;
};
/**
* Organization Full
*/
"organization-full": {
login: string;
id: number;
node_id: string;
url: string;
repos_url: string;
events_url: string;
hooks_url: string;
issues_url: string;
members_url: string;
public_members_url: string;
avatar_url: string;
description: string | null;
name?: string;
company?: string;
blog?: string;
location?: string;
email?: string;
twitter_username?: string | null;
is_verified?: boolean;
has_organization_projects: boolean;
has_repository_projects: boolean;
public_repos: number;
public_gists: number;
followers: number;
following: number;
html_url: string;
created_at: string;
type: string;
total_private_repos?: number;
owned_private_repos?: number;
private_gists?: number | null;
disk_usage?: number | null;
collaborators?: number | null;
billing_email?: string | null;
plan?: {
name: string;
space: number;
private_repos: number;
filled_seats?: number;
seats?: number;
};
default_repository_permission?: string | null;
members_can_create_repositories?: boolean | null;
two_factor_requirement_enabled?: boolean | null;
members_allowed_repository_creation_type?: string;
members_can_create_public_repositories?: boolean;
members_can_create_private_repositories?: boolean;
members_can_create_internal_repositories?: boolean;
members_can_create_pages?: boolean;
updated_at: string;
};
/**
* The policy that controls the repositories in the organization that are allowed to run GitHub Actions. Can be one of: `all`, `none`, or `selected`.
*/
"enabled-repositories": "all" | "none" | "selected";
"actions-organization-permissions": {
enabled_repositories?: components["schemas"]["enabled-repositories"];
/**
* The API URL to use to get or set the selected repositories that are allowed to run GitHub Actions, when `enabled_repositories` is set to `selected`.
*/
selected_repositories_url?: string;
allowed_actions?: components["schemas"]["allowed-actions"];
selected_actions_url?: components["schemas"]["selected-actions-url"];
};
"runner-groups-org": {
id?: number;
name?: string;
visibility?: string;
default?: boolean;
selected_repositories_url?: string;
runners_url?: string;
inherited?: boolean;
};
/**
* Secrets for GitHub Actions for an organization.
*/
"organization-actions-secret": {
/**
* The name of the secret.
*/
name: string;
created_at: string;
updated_at: string;
/**
* Visibility of a secret
*/
visibility: "all" | "private" | "selected";
selected_repositories_url?: string;
};
/**
* The public key used for setting Actions Secrets.
*/
"actions-public-key": {
/**
* The identifier for the key.
*/
key_id: string;
/**
* The Base64 encoded public key.
*/
key: string;
id?: number;
url?: string;
title?: string;
created_at?: string;
};
/**
* Credential Authorization
*/
"credential-authorization": {
/**
* User login that owns the underlying credential.
*/
login: string;
/**
* Unique identifier for the credential.
*/
credential_id: number;
/**
* Human-readable description of the credential type.
*/
credential_type: string;
/**
* Last eight characters of the credential. Only included in responses with credential_type of personal access token.
*/
token_last_eight?: string;
/**
* Date when the credential was authorized for use.
*/
credential_authorized_at: string;
/**
* List of oauth scopes the token has been granted.
*/
scopes?: string[];
/**
* Unique string to distinguish the credential. Only included in responses with credential_type of SSH Key.
*/
fingerprint?: string;
/**
* Date when the credential was last accessed. May be null if it was never accessed
*/
credential_accessed_at?: string | null;
};
/**
* Org Hook
*/
"org-hook": {
id: number;
url: string;
ping_url: string;
name: string;
events: string[];
active: boolean;
config: {
url?: string;
insecure_ssl?: string;
content_type?: string;
secret?: string;
};
updated_at: string;
created_at: string;
type: string;
};
/**
* The type of GitHub user that can comment, open issues, or create pull requests while the interaction limit is in effect. Can be one of: `existing_users`, `contributors_only`, `collaborators_only`.
*/
"interaction-group":
| "existing_users"
| "contributors_only"
| "collaborators_only";
/**
* Interaction limit settings.
*/
"interaction-limit-response": {
limit: components["schemas"]["interaction-group"];
origin: string;
expires_at: string;
};
/**
* The duration of the interaction restriction. Can be one of: `one_day`, `three_days`, `one_week`, `one_month`, `six_months`. Default: `one_day`.
*/
"interaction-expiry":
| "one_day"
| "three_days"
| "one_week"
| "one_month"
| "six_months";
/**
* Limit interactions to a specific type of user for a specified duration
*/
"interaction-limit": {
limit: components["schemas"]["interaction-group"];
expiry?: components["schemas"]["interaction-expiry"];
};
/**
* Organization Invitation
*/
"organization-invitation": {
id?: number;
login?: string | null;
email?: string | null;
role?: string;
created_at?: string;
inviter?: components["schemas"]["simple-user"];
team_count?: number;
invitation_team_url?: string;
node_id?: string;
invitation_teams_url?: string;
};
/**
* Groups of organization members that gives permissions on specified repositories.
*/
"team-simple": {
/**
* Unique identifier of the team
*/
id: number;
node_id: string;
/**
* URL for the team
*/
url: string;
members_url: string;
/**
* Name of the team
*/
name: string;
/**
* Description of the team
*/
description: string | null;
/**
* Permission that the team will have for its repositories
*/
permission: string;
/**
* The level of privacy this team should have
*/
privacy?: string;
html_url: string;
repositories_url: string;
slug: string;
/**
* Distinguished Name (DN) that team maps to within LDAP environment
*/
ldap_dn?: string;
} | null;
/**
* Groups of organization members that gives permissions on specified repositories.
*/
team: {
id: number;
node_id: string;
name: string;
slug: string;
description: string | null;
privacy?: string;
permission: string;
url: string;
html_url: string;
members_url: string;
repositories_url: string;
parent?: components["schemas"]["team-simple"] | null;
};
/**
* Org Membership
*/
"org-membership": {
url: string;
state: string;
role: string;
organization_url: string;
organization: components["schemas"]["organization-simple"];
user: components["schemas"]["simple-user"] | null;
permissions?: { can_create_repository: boolean };
};
/**
* A migration.
*/
migration: {
id: number;
owner: components["schemas"]["simple-user"] | null;
guid: string;
state: string;
lock_repositories: boolean;
exclude_attachments: boolean;
repositories: components["schemas"]["repository"][];
url: string;
created_at: string;
updated_at: string;
node_id: string;
archive_url?: string;
exclude?: { [key: string]: any }[];
};
/**
* Projects are a way to organize columns and cards of work.
*/
project: {
owner_url: string;
url: string;
html_url: string;
columns_url: string;
id: number;
node_id: string;
/**
* Name of the project
*/
name: string;
/**
* Body of the project
*/
body: string | null;
number: number;
/**
* State of the project; either 'open' or 'closed'
*/
state: string;
creator: components["schemas"]["simple-user"] | null;
created_at: string;
updated_at: string;
/**
* The baseline permission that all organization members have on this project
*/
organization_permission?: "read" | "write" | "admin" | "none";
/**
* Whether or not this project can be seen by everyone.
*/
private?: boolean;
cards_url?: string;
permissions?: { read: boolean; write: boolean; admin: boolean };
};
/**
* External Groups to be mapped to a team for membership
*/
"group-mapping": {
/**
* Array of groups to be mapped to this team
*/
groups?: {
/**
* The ID of the group
*/
group_id: string;
/**
* The name of the group
*/
group_name: string;
/**
* a description of the group
*/
group_description: string;
}[];
/**
* The ID of the group
*/
group_id?: string;
/**
* The name of the group
*/
group_name?: string;
/**
* a description of the group
*/
group_description?: string;
/**
* synchronization status for this group mapping
*/
status?: string;
/**
* the time of the last sync for this group-mapping
*/
synced_at?: string;
};
/**
* Groups of organization members that gives permissions on specified repositories.
*/
"team-full": {
/**
* Unique identifier of the team
*/
id: number;
node_id: string;
/**
* URL for the team
*/
url: string;
html_url: string;
/**
* Name of the team
*/
name: string;
slug: string;
description: string | null;
/**
* The level of privacy this team should have
*/
privacy?: "closed" | "secret";
/**
* Permission that the team will have for its repositories
*/
permission: string;
members_url: string;
repositories_url: string;
parent?: components["schemas"]["team-simple"] | null;
members_count: number;
repos_count: number;
created_at: string;
updated_at: string;
organization: components["schemas"]["organization-full"];
/**
* Distinguished Name (DN) that team maps to within LDAP environment
*/
ldap_dn?: string;
};
/**
* A team discussion is a persistent record of a free-form conversation within a team.
*/
"team-discussion": {
author: components["schemas"]["simple-user"] | null;
/**
* The main text of the discussion.
*/
body: string;
body_html: string;
/**
* The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server.
*/
body_version: string;
comments_count: number;
comments_url: string;
created_at: string;
last_edited_at: string | null;
html_url: string;
node_id: string;
/**
* The unique sequence number of a team discussion.
*/
number: number;
/**
* Whether or not this discussion should be pinned for easy retrieval.
*/
pinned: boolean;
/**
* Whether or not this discussion should be restricted to team members and organization administrators.
*/
private: boolean;
team_url: string;
/**
* The title of the discussion.
*/
title: string;
updated_at: string;
url: string;
reactions?: components["schemas"]["reaction-rollup"];
};
/**
* A reply to a discussion within a team.
*/
"team-discussion-comment": {
author: components["schemas"]["simple-user"] | null;
/**
* The main text of the comment.
*/
body: string;
body_html: string;
/**
* The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server.
*/
body_version: string;
created_at: string;
last_edited_at: string | null;
discussion_url: string;
html_url: string;
node_id: string;
/**
* The unique sequence number of a team discussion comment.
*/
number: number;
updated_at: string;
url: string;
reactions?: components["schemas"]["reaction-rollup"];
};
/**
* Reactions to conversations provide a way to help people express their feelings more simply and effectively.
*/
reaction: {
id: number;
node_id: string;
user: components["schemas"]["simple-user"] | null;
/**
* The reaction to use
*/
content:
| "+1"
| "-1"
| "laugh"
| "confused"
| "heart"
| "hooray"
| "rocket"
| "eyes";
created_at: string;
};
/**
* Team Membership
*/
"team-membership": {
url: string;
/**
* The role of the user in the team.
*/
role: "member" | "maintainer";
state: string;
};
/**
* A team's access to a project.
*/
"team-project": {
owner_url?: string;
url?: string;
html_url?: string;
columns_url?: string;
id?: number;
node_id?: string;
name?: string;
body?: string | null;
number?: number;
state?: string;
creator?: components["schemas"]["simple-user"];
created_at?: string;
updated_at?: string;
organization_permission?: string;
private?: boolean;
permissions?: { read?: boolean; write?: boolean; admin?: boolean };
};
/**
* A team's access to a repository.
*/
"team-repository": {
/**
* Unique identifier of the repository
*/
id: number;
node_id: string;
/**
* The name of the repository.
*/
name: string;
full_name: string;
license: components["schemas"]["license-simple"] | null;
forks: number;
permissions?: {
admin: boolean;
pull: boolean;
triage?: boolean;
push: boolean;
maintain?: boolean;
};
owner: components["schemas"]["simple-user"] | null;
/**
* Whether the repository is private or public.
*/
private: boolean;
html_url: string;
description: string | null;
fork: boolean;
url: string;
archive_url: string;
assignees_url: string;
blobs_url: string;
branches_url: string;
collaborators_url: string;
comments_url: string;
commits_url: string;
compare_url: string;
contents_url: string;
contributors_url: string;
deployments_url: string;
downloads_url: string;
events_url: string;
forks_url: string;
git_commits_url: string;
git_refs_url: string;
git_tags_url: string;
git_url: string;
issue_comment_url: string;
issue_events_url: string;
issues_url: string;
keys_url: string;
labels_url: string;
languages_url: string;
merges_url: string;
milestones_url: string;
notifications_url: string;
pulls_url: string;
releases_url: string;
ssh_url: string;
stargazers_url: string;
statuses_url: string;
subscribers_url: string;
subscription_url: string;
tags_url: string;
teams_url: string;
trees_url: string;
clone_url: string;
mirror_url: string | null;
hooks_url: string;
svn_url: string;
homepage: string | null;
language: string | null;
forks_count: number;
stargazers_count: number;
watchers_count: number;
size: number;
/**
* The default branch of the repository.
*/
default_branch: string;
open_issues_count: number;
/**
* Whether this repository acts as a template that can be used to generate new repositories.
*/
is_template?: boolean;
topics?: string[];
/**
* Whether issues are enabled.
*/
has_issues: boolean;
/**
* Whether projects are enabled.
*/
has_projects: boolean;
/**
* Whether the wiki is enabled.
*/
has_wiki: boolean;
has_pages: boolean;
/**
* Whether downloads are enabled.
*/
has_downloads: boolean;
/**
* Whether the repository is archived.
*/
archived: boolean;
/**
* Returns whether or not this repository disabled.
*/
disabled: boolean;
/**
* The repository visibility: public, private, or internal.
*/
visibility?: string;
pushed_at: string | null;
created_at: string | null;
updated_at: string | null;
/**
* Whether to allow rebase merges for pull requests.
*/
allow_rebase_merge?: boolean;
template_repository?: {
id?: number;
node_id?: string;
name?: string;
full_name?: string;
owner?: {
login?: string;
id?: number;
node_id?: string;
avatar_url?: string;
gravatar_id?: string;
url?: string;
html_url?: string;
followers_url?: string;
following_url?: string;
gists_url?: string;
starred_url?: string;
subscriptions_url?: string;
organizations_url?: string;
repos_url?: string;
events_url?: string;
received_events_url?: string;
type?: string;
site_admin?: boolean;
};
private?: boolean;
html_url?: string;
description?: string;
fork?: boolean;
url?: string;
archive_url?: string;
assignees_url?: string;
blobs_url?: string;
branches_url?: string;
collaborators_url?: string;
comments_url?: string;
commits_url?: string;
compare_url?: string;
contents_url?: string;
contributors_url?: string;
deployments_url?: string;
downloads_url?: string;
events_url?: string;
forks_url?: string;
git_commits_url?: string;
git_refs_url?: string;
git_tags_url?: string;
git_url?: string;
issue_comment_url?: string;
issue_events_url?: string;
issues_url?: string;
keys_url?: string;
labels_url?: string;
languages_url?: string;
merges_url?: string;
milestones_url?: string;
notifications_url?: string;
pulls_url?: string;
releases_url?: string;
ssh_url?: string;
stargazers_url?: string;
statuses_url?: string;
subscribers_url?: string;
subscription_url?: string;
tags_url?: string;
teams_url?: string;
trees_url?: string;
clone_url?: string;
mirror_url?: string;
hooks_url?: string;
svn_url?: string;
homepage?: string;
language?: string;
forks_count?: number;
stargazers_count?: number;
watchers_count?: number;
size?: number;
default_branch?: string;
open_issues_count?: number;
is_template?: boolean;
topics?: string[];
has_issues?: boolean;
has_projects?: boolean;
has_wiki?: boolean;
has_pages?: boolean;
has_downloads?: boolean;
archived?: boolean;
disabled?: boolean;
visibility?: string;
pushed_at?: string;
created_at?: string;
updated_at?: string;
permissions?: { admin?: boolean; push?: boolean; pull?: boolean };
allow_rebase_merge?: boolean;
template_repository?: string;
temp_clone_token?: string;
allow_squash_merge?: boolean;
delete_branch_on_merge?: boolean;
allow_merge_commit?: boolean;
subscribers_count?: number;
network_count?: number;
} | null;
temp_clone_token?: string;
/**
* Whether to allow squash merges for pull requests.
*/
allow_squash_merge?: boolean;
/**
* Whether to delete head branches when pull requests are merged
*/
delete_branch_on_merge?: boolean;
/**
* Whether to allow merge commits for pull requests.
*/
allow_merge_commit?: boolean;
subscribers_count?: number;
network_count?: number;
open_issues: number;
watchers: number;
master_branch?: string;
};
/**
* Project cards represent a scope of work.
*/
"project-card": {
url: string;
/**
* The project card's ID
*/
id: number;
node_id: string;
note: string | null;
creator: components["schemas"]["simple-user"] | null;
created_at: string;
updated_at: string;
/**
* Whether or not the card is archived
*/
archived?: boolean;
column_url: string;
content_url?: string;
project_url: string;
};
/**
* Project columns contain cards of work.
*/
"project-column": {
url: string;
project_url: string;
cards_url: string;
/**
* The unique identifier of the project column
*/
id: number;
node_id: string;
/**
* Name of the project column
*/
name: string;
created_at: string;
updated_at: string;
};
/**
* Repository Collaborator Permission
*/
"repository-collaborator-permission": {
permission: string;
user: components["schemas"]["simple-user"] | null;
};
"rate-limit": { limit: number; remaining: number; reset: number };
/**
* Rate Limit Overview
*/
"rate-limit-overview": {
resources: {
core: components["schemas"]["rate-limit"];
graphql?: components["schemas"]["rate-limit"];
search: components["schemas"]["rate-limit"];
source_import?: components["schemas"]["rate-limit"];
integration_manifest?: components["schemas"]["rate-limit"];
code_scanning_upload?: components["schemas"]["rate-limit"];
};
rate: components["schemas"]["rate-limit"];
};
/**
* Full Repository
*/
"full-repository": {
id: number;
node_id: string;
name: string;
full_name: string;
owner: components["schemas"]["simple-user"] | null;
private: boolean;
html_url: string;
description: string | null;
fork: boolean;
url: string;
archive_url: string;
assignees_url: string;
blobs_url: string;
branches_url: string;
collaborators_url: string;
comments_url: string;
commits_url: string;
compare_url: string;
contents_url: string;
contributors_url: string;
deployments_url: string;
downloads_url: string;
events_url: string;
forks_url: string;
git_commits_url: string;
git_refs_url: string;
git_tags_url: string;
git_url: string;
issue_comment_url: string;
issue_events_url: string;
issues_url: string;
keys_url: string;
labels_url: string;
languages_url: string;
merges_url: string;
milestones_url: string;
notifications_url: string;
pulls_url: string;
releases_url: string;
ssh_url: string;
stargazers_url: string;
statuses_url: string;
subscribers_url: string;
subscription_url: string;
tags_url: string;
teams_url: string;
trees_url: string;
clone_url: string;
mirror_url: string | null;
hooks_url: string;
svn_url: string;
homepage: string | null;
language: string | null;
forks_count: number;
stargazers_count: number;
watchers_count: number;
size: number;
default_branch: string;
open_issues_count: number;
is_template?: boolean;
topics?: string[];
has_issues: boolean;
has_projects: boolean;
has_wiki: boolean;
has_pages: boolean;
has_downloads: boolean;
archived: boolean;
/**
* Returns whether or not this repository disabled.
*/
disabled: boolean;
/**
* The repository visibility: public, private, or internal.
*/
visibility?: string;
pushed_at: string;
created_at: string;
updated_at: string;
permissions?: { admin: boolean; pull: boolean; push: boolean };
allow_rebase_merge?: boolean;
template_repository?: components["schemas"]["repository"] | null;
temp_clone_token?: string | null;
allow_squash_merge?: boolean;
delete_branch_on_merge?: boolean;
allow_merge_commit?: boolean;
subscribers_count: number;
network_count: number;
license: components["schemas"]["license-simple"] | null;
organization?: components["schemas"]["simple-user"] | null;
parent?: components["schemas"]["repository"];
source?: components["schemas"]["repository"];
forks: number;
master_branch?: string;
open_issues: number;
watchers: number;
/**
* Whether anonymous git access is allowed.
*/
anonymous_access_enabled?: boolean;
};
/**
* An artifact
*/
artifact: {
id: number;
node_id: string;
/**
* The name of the artifact.
*/
name: string;
/**
* The size in bytes of the artifact.
*/
size_in_bytes: number;
url: string;
archive_download_url: string;
/**
* Whether or not the artifact has expired.
*/
expired: boolean;
created_at: string | null;
expires_at: string;
updated_at: string | null;
};
/**
* Information of a job execution in a workflow run
*/
job: {
/**
* The id of the job.
*/
id: number;
/**
* The id of the associated workflow run.
*/
run_id: number;
run_url: string;
node_id: string;
/**
* The SHA of the commit that is being run.
*/
head_sha: string;
url: string;
html_url: string | null;
/**
* The phase of the lifecycle that the job is currently in.
*/
status: "queued" | "in_progress" | "completed";
/**
* The outcome of the job.
*/
conclusion: string | null;
/**
* The time that the job started, in ISO 8601 format.
*/
started_at: string;
/**
* The time that the job finished, in ISO 8601 format.
*/
completed_at: string | null;
/**
* The name of the job.
*/
name: string;
/**
* Steps in this job.
*/
steps?: {
/**
* The phase of the lifecycle that the job is currently in.
*/
status: "queued" | "in_progress" | "completed";
/**
* The outcome of the job.
*/
conclusion: string | null;
/**
* The name of the job.
*/
name: string;
number: number;
/**
* The time that the step started, in ISO 8601 format.
*/
started_at?: string | null;
/**
* The time that the job finished, in ISO 8601 format.
*/
completed_at?: string | null;
}[];
check_run_url: string;
};
/**
* Whether GitHub Actions is enabled on the repository.
*/
"actions-enabled": boolean;
"actions-repository-permissions": {
enabled?: components["schemas"]["actions-enabled"];
allowed_actions?: components["schemas"]["allowed-actions"];
selected_actions_url?: components["schemas"]["selected-actions-url"];
};
"pull-request-minimal": {
id: number;
number: number;
url: string;
head: {
ref: string;
sha: string;
repo: { id: number; url: string; name: string };
};
base: {
ref: string;
sha: string;
repo: { id: number; url: string; name: string };
};
};
/**
* Simple Commit
*/
"simple-commit": {
id: string;
tree_id: string;
message: string;
timestamp: string;
author: { name: string; email: string } | null;
committer: { name: string; email: string } | null;
};
/**
* An invocation of a workflow
*/
"workflow-run": {
/**
* The ID of the workflow run.
*/
id: number;
/**
* The name of the workflow run.
*/
name?: string;
node_id: string;
head_branch: string | null;
/**
* The SHA of the head commit that points to the version of the worflow being run.
*/
head_sha: string;
/**
* The auto incrementing run number for the workflow run.
*/
run_number: number;
event: string;
status: string | null;
conclusion: string | null;
/**
* The ID of the parent workflow.
*/
workflow_id: number;
/**
* The URL to the workflow run.
*/
url: string;
html_url: string;
pull_requests: components["schemas"]["pull-request-minimal"][] | null;
created_at: string | null;
updated_at: string | null;
/**
* The URL to the jobs for the workflow run.
*/
jobs_url: string;
/**
* The URL to download the logs for the workflow run.
*/
logs_url: string;
/**
* The URL to the associated check suite.
*/
check_suite_url: string;
/**
* The URL to the artifacts for the workflow run.
*/
artifacts_url: string;
/**
* The URL to cancel the workflow run.
*/
cancel_url: string;
/**
* The URL to rerun the workflow run.
*/
rerun_url: string;
/**
* The URL to the workflow.
*/
workflow_url: string;
head_commit: components["schemas"]["simple-commit"];
repository: components["schemas"]["minimal-repository"];
head_repository: components["schemas"]["minimal-repository"];
head_repository_id?: number;
};
/**
* Workflow Run Usage
*/
"workflow-run-usage": {
billable?: {
UBUNTU?: { total_ms?: number; jobs?: number };
MACOS?: { total_ms?: number; jobs?: number };
WINDOWS?: { total_ms?: number; jobs?: number };
};
run_duration_ms?: number;
};
/**
* Set secrets for GitHub Actions.
*/
"actions-secret": {
/**
* The name of the secret.
*/
name: string;
created_at: string;
updated_at: string;
};
/**
* A GitHub Actions workflow
*/
workflow: {
id: number;
node_id: string;
name: string;
path: string;
state: "active" | "deleted";
created_at: string;
updated_at: string;
url: string;
html_url: string;
badge_url: string;
deleted_at?: string;
};
/**
* Workflow Usage
*/
"workflow-usage": {
billable?: {
UBUNTU?: { total_ms?: number };
MACOS?: { total_ms?: number };
WINDOWS?: { total_ms?: number };
};
};
/**
* Protected Branch Admin Enforced
*/
"protected-branch-admin-enforced": { url: string; enabled: boolean };
/**
* Protected Branch Pull Request Review
*/
"protected-branch-pull-request-review": {
url?: string;
dismissal_restrictions?: {
/**
* The list of users with review dismissal access.
*/
users?: components["schemas"]["simple-user"][];
/**
* The list of teams with review dismissal access.
*/
teams?: components["schemas"]["team"][];
url?: string;
users_url?: string;
teams_url?: string;
};
dismiss_stale_reviews: boolean;
require_code_owner_reviews: boolean;
required_approving_review_count?: number;
};
/**
* Branch Restriction Policy
*/
"branch-restriction-policy": {
url: string;
users_url: string;
teams_url: string;
apps_url: string;
users: {
login?: string;
id?: number;
node_id?: string;
avatar_url?: string;
gravatar_id?: string;
url?: string;
html_url?: string;
followers_url?: string;
following_url?: string;
gists_url?: string;
starred_url?: string;
subscriptions_url?: string;
organizations_url?: string;
repos_url?: string;
events_url?: string;
received_events_url?: string;
type?: string;
site_admin?: boolean;
}[];
teams: {
id?: number;
node_id?: string;
url?: string;
html_url?: string;
name?: string;
slug?: string;
description?: string | null;
privacy?: string;
permission?: string;
members_url?: string;
repositories_url?: string;
parent?: string | null;
}[];
apps: {
id?: number;
slug?: string;
node_id?: string;
owner?: {
login?: string;
id?: number;
node_id?: string;
url?: string;
repos_url?: string;
events_url?: string;
hooks_url?: string;
issues_url?: string;
members_url?: string;
public_members_url?: string;
avatar_url?: string;
description?: string;
gravatar_id?: string;
html_url?: string;
followers_url?: string;
following_url?: string;
gists_url?: string;
starred_url?: string;
subscriptions_url?: string;
organizations_url?: string;
received_events_url?: string;
type?: string;
};
name?: string;
description?: string;
external_url?: string;
html_url?: string;
created_at?: string;
updated_at?: string;
permissions?: {
metadata?: string;
contents?: string;
issues?: string;
single_file?: string;
};
events?: string[];
}[];
};
/**
* Branch Protection
*/
"branch-protection": {
url?: string;
required_status_checks: {
url?: string;
enforcement_level: string;
contexts: string[];
contexts_url?: string;
};
enforce_admins?: components["schemas"]["protected-branch-admin-enforced"];
required_pull_request_reviews?: components["schemas"]["protected-branch-pull-request-review"];
restrictions?: components["schemas"]["branch-restriction-policy"];
required_linear_history?: { enabled?: boolean };
allow_force_pushes?: { enabled?: boolean };
allow_deletions?: { enabled?: boolean };
enabled: boolean;
name?: string;
protection_url?: string;
};
/**
* Short Branch
*/
"short-branch": {
name: string;
commit: { sha: string; url: string };
protected: boolean;
protection?: components["schemas"]["branch-protection"];
protection_url?: string;
};
/**
* Metaproperties for Git author/committer information.
*/
"git-user": { name?: string; email?: string; date?: string };
verification: {
verified: boolean;
reason: string;
payload: string | null;
signature: string | null;
};
/**
* Commit
*/
commit: {
url: string | null;
sha: string | null;
node_id: string;
html_url: string;
comments_url: string;
commit: {
url: string;
author: components["schemas"]["git-user"] | null;
committer: components["schemas"]["git-user"] | null;
message: string;
comment_count: number;
tree: { sha: string; url: string };
verification?: components["schemas"]["verification"];
};
author: components["schemas"]["simple-user"] | null;
committer: components["schemas"]["simple-user"] | null;
parents: { sha: string; url: string; html_url?: string }[];
stats?: { additions?: number; deletions?: number; total?: number };
files?: {
filename?: string;
additions?: number;
deletions?: number;
changes?: number;
status?: string;
raw_url?: string;
blob_url?: string;
patch?: string;
sha?: string;
contents_url?: string;
previous_filename?: string;
}[];
};
/**
* Branch With Protection
*/
"branch-with-protection": {
name: string;
commit: components["schemas"]["commit"];
_links: { html: string; self: string };
protected: boolean;
protection: components["schemas"]["branch-protection"];
protection_url: string;
pattern?: string;
required_approving_review_count?: number;
};
/**
* Status Check Policy
*/
"status-check-policy": {
url: string;
strict: boolean;
contexts: string[];
contexts_url: string;
};
/**
* Branch protections protect branches
*/
"protected-branch": {
url: string;
required_status_checks?: components["schemas"]["status-check-policy"];
required_pull_request_reviews?: {
url: string;
dismiss_stale_reviews?: boolean;
require_code_owner_reviews?: boolean;
required_approving_review_count?: number;
dismissal_restrictions?: {
url: string;
users_url: string;
teams_url: string;
users: components["schemas"]["simple-user"][];
teams: components["schemas"]["team"][];
};
};
required_signatures?: { url: string; enabled: boolean };
enforce_admins?: { url: string; enabled: boolean };
required_linear_history?: { enabled: boolean };
allow_force_pushes?: { enabled: boolean };
allow_deletions?: { enabled: boolean };
restrictions?: components["schemas"]["branch-restriction-policy"];
};
/**
* A check performed on the code of a given code change
*/
"check-run": {
/**
* The id of the check.
*/
id: number;
/**
* The SHA of the commit that is being checked.
*/
head_sha: string;
node_id: string;
external_id: string | null;
url: string;
html_url: string | null;
details_url: string | null;
/**
* The phase of the lifecycle that the check is currently in.
*/
status: "queued" | "in_progress" | "completed";
conclusion: string | null;
started_at: string | null;
completed_at: string | null;
output: {
title: string | null;
summary: string | null;
text: string | null;
annotations_count: number;
annotations_url: string;
};
/**
* The name of the check.
*/
name: string;
check_suite: { id: number } | null;
app: components["schemas"]["integration"] | null;
pull_requests: components["schemas"]["pull-request-minimal"][];
};
/**
* Check Annotation
*/
"check-annotation": {
path: string;
start_line: number;
end_line: number;
start_column: number | null;
end_column: number | null;
annotation_level: string | null;
title: string | null;
message: string | null;
raw_details: string | null;
blob_href: string;
};
/**
* A suite of checks performed on the code of a given code change
*/
"check-suite": {
id: number;
node_id: string;
head_branch: string | null;
/**
* The SHA of the head commit that is being checked.
*/
head_sha: string;
status: string | null;
conclusion: string | null;
url: string | null;
before: string | null;
after: string | null;
pull_requests: components["schemas"]["pull-request-minimal"][] | null;
app: components["schemas"]["integration"] | null;
repository: components["schemas"]["minimal-repository"];
created_at: string | null;
updated_at: string | null;
head_commit: components["schemas"]["simple-commit"];
latest_check_runs_count: number;
check_runs_url: string;
};
/**
* Check suite configuration preferences for a repository.
*/
"check-suite-preference": {
preferences?: {
auto_trigger_checks?: { app_id: number; setting: boolean }[];
};
repository?: components["schemas"]["repository"];
};
/**
* State of a code scanning alert.
*/
"code-scanning-alert-state": "open" | "dismissed" | "fixed";
/**
* The full Git reference, formatted as `refs/heads/<branch name>`.
*/
"code-scanning-alert-ref": string;
/**
* The security alert number.
*/
"alert-number": number;
/**
* The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
"alert-created-at": string;
/**
* The REST API URL of the alert resource.
*/
"alert-url": string;
/**
* The GitHub URL of the alert resource.
*/
"alert-html-url": string;
/**
* The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
"code-scanning-alert-dismissed-at": string | null;
/**
* **Required when the state is dismissed.** The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`.
*/
"code-scanning-alert-dismissed-reason": string | null;
"code-scanning-alert-rule": {
/**
* A unique identifier for the rule used to detect the alert.
*/
id?: string | null;
/**
* The severity of the alert.
*/
severity?: ("none" | "note" | "warning" | "error") | null;
/**
* A short description of the rule used to detect the alert.
*/
description?: string;
};
/**
* The name of the tool used to generate the code scanning analysis alert.
*/
"code-scanning-analysis-tool-name": string;
"code-scanning-analysis-tool": {
name?: components["schemas"]["code-scanning-analysis-tool-name"];
/**
* The version of the tool used to detect the alert.
*/
version?: string | null;
};
"code-scanning-alert-code-scanning-alert-items": {
number?: components["schemas"]["alert-number"];
created_at?: components["schemas"]["alert-created-at"];
url?: components["schemas"]["alert-url"];
html_url?: components["schemas"]["alert-html-url"];
state?: components["schemas"]["code-scanning-alert-state"];
dismissed_by?: components["schemas"]["simple-user"];
dismissed_at?: components["schemas"]["code-scanning-alert-dismissed-at"];
dismissed_reason?: components["schemas"]["code-scanning-alert-dismissed-reason"];
rule?: components["schemas"]["code-scanning-alert-rule"];
tool?: components["schemas"]["code-scanning-analysis-tool"];
};
/**
* Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name.
*/
"code-scanning-analysis-analysis-key": string;
/**
* Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed.
*/
"code-scanning-alert-environment": string;
"code-scanning-alert-instances":
| {
ref?: components["schemas"]["code-scanning-alert-ref"];
analysis_key?: components["schemas"]["code-scanning-analysis-analysis-key"];
environment?: components["schemas"]["code-scanning-alert-environment"];
matrix_vars?: string | null;
state?: components["schemas"]["code-scanning-alert-state"];
}[]
| null;
"code-scanning-alert-code-scanning-alert": {
number?: components["schemas"]["alert-number"];
created_at?: components["schemas"]["alert-created-at"];
url?: components["schemas"]["alert-url"];
html_url?: components["schemas"]["alert-html-url"];
instances?: components["schemas"]["code-scanning-alert-instances"];
state?: components["schemas"]["code-scanning-alert-state"];
dismissed_by?: components["schemas"]["simple-user"];
dismissed_at?: components["schemas"]["code-scanning-alert-dismissed-at"];
dismissed_reason?: components["schemas"]["code-scanning-alert-dismissed-reason"];
rule?: components["schemas"]["code-scanning-alert-rule"];
tool?: components["schemas"]["code-scanning-analysis-tool"];
};
/**
* Sets the state of the code scanning alert. Can be one of `open` or `dismissed`. You must provide `dismissed_reason` when you set the state to `dismissed`.
*/
"code-scanning-alert-set-state": "open" | "dismissed";
/**
* The full Git reference of the code scanning analysis file, formatted as `refs/heads/<branch name>`.
*/
"code-scanning-analysis-ref": string;
/**
* The commit SHA of the code scanning analysis file.
*/
"code-scanning-analysis-commit-sha": string;
/**
* The time that the analysis was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
"code-scanning-analysis-created-at": string;
/**
* Identifies the variable values associated with the environment in which this analysis was performed.
*/
"code-scanning-analysis-environment": string;
"code-scanning-analysis-code-scanning-analysis": {
commit_sha?: components["schemas"]["code-scanning-analysis-commit-sha"];
ref?: components["schemas"]["code-scanning-analysis-ref"];
analysis_key?: components["schemas"]["code-scanning-analysis-analysis-key"];
created_at?: components["schemas"]["code-scanning-analysis-created-at"];
tool_name?: components["schemas"]["code-scanning-analysis-tool-name"];
error?: string;
environment?: components["schemas"]["code-scanning-analysis-environment"];
};
/**
* A Base64 string representing the SARIF file to upload. You must first compress your SARIF file using [`gzip`](http://www.gnu.org/software/gzip/manual/gzip.html) and then translate the contents of the file into a Base64 encoding string.
*/
"code-scanning-analysis-sarif-file": string;
/**
* Collaborator
*/
collaborator: {
login: string;
id: number;
node_id: string;
avatar_url: string;
gravatar_id: string | null;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
permissions?: { pull: boolean; push: boolean; admin: boolean };
};
/**
* Repository invitations let you manage who you collaborate with.
*/
"repository-invitation": {
/**
* Unique identifier of the repository invitation.
*/
id: number;
repository: components["schemas"]["minimal-repository"];
invitee: components["schemas"]["simple-user"] | null;
inviter: components["schemas"]["simple-user"] | null;
/**
* The permission associated with the invitation.
*/
permissions: "read" | "write" | "admin";
created_at: string;
/**
* URL for the repository invitation
*/
url: string;
html_url: string;
node_id: string;
};
/**
* Commit Comment
*/
"commit-comment": {
html_url: string;
url: string;
id: number;
node_id: string;
body: string;
path: string | null;
position: number | null;
line: number | null;
commit_id: string;
user: components["schemas"]["simple-user"] | null;
created_at: string;
updated_at: string;
author_association: string;
reactions?: components["schemas"]["reaction-rollup"];
};
/**
* Scim Error
*/
"scim-error": {
message?: string | null;
documentation_url?: string | null;
detail?: string | null;
status?: number;
scimType?: string | null;
schemas?: string[];
};
/**
* Branch Short
*/
"branch-short": {
name?: string;
commit?: { sha?: string; url?: string };
protected?: boolean;
};
/**
* Hypermedia Link
*/
link: { href: string };
/**
* Pull Request Simple
*/
"pull-request-simple": {
url: string;
id: number;
node_id: string;
html_url: string;
diff_url: string;
patch_url: string;
issue_url: string;
commits_url: string;
review_comments_url: string;
review_comment_url: string;
comments_url: string;
statuses_url: string;
number: number;
state: string;
locked: boolean;
title: string;
user: components["schemas"]["simple-user"] | null;
body: string | null;
labels: {
id?: number;
node_id?: string;
url?: string;
name?: string;
description?: string;
color?: string;
default?: boolean;
}[];
milestone: components["schemas"]["milestone"] | null;
active_lock_reason?: string | null;
created_at: string;
updated_at: string;
closed_at: string | null;
merged_at: string | null;
merge_commit_sha: string | null;
assignee: components["schemas"]["simple-user"] | null;
assignees?: components["schemas"]["simple-user"][] | null;
requested_reviewers?: components["schemas"]["simple-user"][] | null;
requested_teams?: components["schemas"]["team-simple"][] | null;
head: {
label: string;
ref: string;
repo: components["schemas"]["repository"];
sha: string;
user: components["schemas"]["simple-user"] | null;
};
base: {
label: string;
ref: string;
repo: components["schemas"]["repository"];
sha: string;
user: components["schemas"]["simple-user"] | null;
};
_links: {
comments: components["schemas"]["link"];
commits: components["schemas"]["link"];
statuses: components["schemas"]["link"];
html: components["schemas"]["link"];
issue: components["schemas"]["link"];
review_comments: components["schemas"]["link"];
review_comment: components["schemas"]["link"];
self: components["schemas"]["link"];
};
author_association: string;
/**
* Indicates whether or not the pull request is a draft.
*/
draft?: boolean;
};
"simple-commit-status": {
description: string | null;
id: number;
node_id: string;
state: string;
context: string;
target_url: string;
required?: boolean | null;
avatar_url: string | null;
url: string;
created_at: string;
updated_at: string;
};
/**
* Combined Commit Status
*/
"combined-commit-status": {
state: string;
statuses: components["schemas"]["simple-commit-status"][];
sha: string;
total_count: number;
repository: components["schemas"]["minimal-repository"];
commit_url: string;
url: string;
};
/**
* The status of a commit.
*/
status: {
url?: string;
avatar_url?: string | null;
id?: number;
node_id?: string;
state?: string;
description?: string;
target_url?: string;
context?: string;
created_at?: string;
updated_at?: string;
creator?: components["schemas"]["simple-user"];
};
/**
* Code of Conduct Simple
*/
"code-of-conduct-simple": {
url: string;
key: string;
name: string;
html_url: string | null;
};
"community-health-file": { url: string; html_url: string };
/**
* Community Profile
*/
"community-profile": {
health_percentage: number;
description: string | null;
documentation: string | null;
files: {
code_of_conduct: components["schemas"]["code-of-conduct-simple"] | null;
license: components["schemas"]["license-simple"] | null;
contributing: components["schemas"]["community-health-file"] | null;
readme: components["schemas"]["community-health-file"] | null;
issue_template: components["schemas"]["community-health-file"] | null;
pull_request_template:
| components["schemas"]["community-health-file"]
| null;
};
updated_at: string | null;
content_reports_enabled?: boolean;
};
/**
* Diff Entry
*/
"diff-entry": {
sha: string;
filename: string;
status: string;
additions: number;
deletions: number;
changes: number;
blob_url: string;
raw_url: string;
contents_url: string;
patch?: string;
previous_filename?: string;
};
/**
* Commit Comparison
*/
"commit-comparison": {
url: string;
html_url: string;
permalink_url: string;
diff_url: string;
patch_url: string;
base_commit: components["schemas"]["commit"];
merge_base_commit: components["schemas"]["commit"];
status: "diverged" | "ahead" | "behind" | "identical";
ahead_by: number;
behind_by: number;
total_commits: number;
commits: components["schemas"]["commit"][];
files: components["schemas"]["diff-entry"][];
};
/**
* Content Tree
*/
"content-tree": {
type: string;
size: number;
name: string;
path: string;
sha: string;
url: string;
git_url: string | null;
html_url: string | null;
download_url: string | null;
entries?: {
type: string;
size: number;
name: string;
path: string;
content?: string;
sha: string;
url: string;
git_url: string | null;
html_url: string | null;
download_url: string | null;
_links: { git: string | null; html: string | null; self: string };
}[];
_links: { git: string | null; html: string | null; self: string };
};
/**
* A list of directory items
*/
"content-directory": {
type: string;
size: number;
name: string;
path: string;
content?: string;
sha: string;
url: string;
git_url: string | null;
html_url: string | null;
download_url: string | null;
_links: { git: string | null; html: string | null; self: string };
}[];
/**
* Content File
*/
"content-file": {
type: string;
encoding: string;
size: number;
name: string;
path: string;
content: string;
sha: string;
url: string;
git_url: string | null;
html_url: string | null;
download_url: string | null;
_links: { git: string | null; html: string | null; self: string };
target?: string;
submodule_git_url?: string;
};
/**
* An object describing a symlink
*/
"content-symlink": {
type: string;
target: string;
size: number;
name: string;
path: string;
sha: string;
url: string;
git_url: string | null;
html_url: string | null;
download_url: string | null;
_links: { git: string | null; html: string | null; self: string };
};
/**
* An object describing a symlink
*/
"content-submodule": {
type: string;
submodule_git_url: string;
size: number;
name: string;
path: string;
sha: string;
url: string;
git_url: string | null;
html_url: string | null;
download_url: string | null;
_links: { git: string | null; html: string | null; self: string };
};
/**
* File Commit
*/
"file-commit": {
content?: {
name?: string;
path?: string;
sha?: string;
size?: number;
url?: string;
html_url?: string;
git_url?: string;
download_url?: string;
type?: string;
_links?: { self?: string; git?: string; html?: string };
} | null;
commit?: {
sha?: string;
node_id?: string;
url?: string;
html_url?: string;
author?: { date?: string; name?: string; email?: string };
committer?: { date?: string; name?: string; email?: string };
message?: string;
tree?: { url?: string; sha?: string };
parents?: { url?: string; html_url?: string; sha?: string }[];
verification?: {
verified?: boolean;
reason?: string;
signature?: string | null;
payload?: string | null;
};
};
};
/**
* Contributor
*/
contributor: {
login?: string;
id?: number;
node_id?: string;
avatar_url?: string;
gravatar_id?: string | null;
url?: string;
html_url?: string;
followers_url?: string;
following_url?: string;
gists_url?: string;
starred_url?: string;
subscriptions_url?: string;
organizations_url?: string;
repos_url?: string;
events_url?: string;
received_events_url?: string;
type: string;
site_admin?: boolean;
contributions: number;
email?: string;
name?: string;
};
/**
* A request for a specific ref(branch,sha,tag) to be deployed
*/
deployment: {
url: string;
/**
* Unique identifier of the deployment
*/
id: number;
node_id: string;
sha: string;
/**
* The ref to deploy. This can be a branch, tag, or sha.
*/
ref: string;
/**
* Parameter to specify a task to execute
*/
task: string;
payload: { [key: string]: any };
original_environment?: string;
/**
* Name for the target deployment environment.
*/
environment: string;
description: string | null;
creator: components["schemas"]["simple-user"] | null;
created_at: string;
updated_at: string;
statuses_url: string;
repository_url: string;
/**
* Specifies if the given environment is will no longer exist at some point in the future. Default: false.
*/
transient_environment?: boolean;
/**
* Specifies if the given environment is one that end-users directly interact with. Default: false.
*/
production_environment?: boolean;
performed_via_github_app?: components["schemas"]["integration"] | null;
};
/**
* The status of a deployment.
*/
"deployment-status": {
url: string;
id: number;
node_id: string;
/**
* The state of the status.
*/
state:
| "error"
| "failure"
| "inactive"
| "pending"
| "success"
| "queued"
| "in_progress";
creator: components["schemas"]["simple-user"] | null;
/**
* A short description of the status.
*/
description: string;
/**
* The environment of the deployment that the status is for.
*/
environment?: string;
/**
* Deprecated: the URL to associate with this status.
*/
target_url: string;
created_at: string;
updated_at: string;
deployment_url: string;
repository_url: string;
/**
* The URL for accessing your environment.
*/
environment_url?: string;
/**
* The URL to associate with this status.
*/
log_url?: string;
performed_via_github_app?: components["schemas"]["integration"] | null;
};
/**
* Short Blob
*/
"short-blob": { url?: string; sha?: string };
/**
* Blob
*/
blob: {
content: string;
encoding: string;
url: string;
sha: string;
size: number | null;
node_id: string;
highlighted_content?: string;
};
/**
* Low-level Git commit operations within a repository
*/
"git-commit": {
/**
* SHA for the commit
*/
sha?: string;
node_id?: string;
url?: string;
/**
* Identifying information for the git-user
*/
author?: {
/**
* Timestamp of the commit
*/
date?: string;
/**
* Git email address of the user
*/
email: string;
/**
* Name of the git user
*/
name: string;
};
/**
* Identifying information for the git-user
*/
committer?: {
/**
* Timestamp of the commit
*/
date?: string;
/**
* Git email address of the user
*/
email: string;
/**
* Name of the git user
*/
name: string;
};
/**
* Message describing the purpose of the commit
*/
message?: string;
tree?: {
/**
* SHA for the commit
*/
sha?: string;
url?: string;
};
parents?: {
/**
* SHA for the commit
*/
sha?: string;
url?: string;
html_url?: string;
}[];
verification?: {
verified?: boolean;
reason?: string;
signature?: string | null;
payload?: string | null;
};
html_url?: string;
};
/**
* Git references within a repository
*/
"git-ref": {
ref?: string;
node_id?: string;
url?: string;
object?: {
type?: string;
/**
* SHA for the reference
*/
sha?: string;
url?: string;
};
};
/**
* Metadata for a Git tag
*/
"git-tag": {
node_id: string;
/**
* Name of the tag
*/
tag: string;
sha: string;
/**
* URL for the tag
*/
url: string;
/**
* Message describing the purpose of the tag
*/
message: string;
tagger: { date: string; email: string; name: string };
object: { sha: string; type: string; url: string };
verification?: components["schemas"]["verification"];
};
/**
* The hierarchy between files in a Git repository.
*/
"git-tree": {
sha: string;
url: string;
truncated: boolean;
/**
* Objects specifying a tree structure
*/
tree: {
path?: string;
mode?: string;
type?: string;
sha?: string;
size?: number;
url?: string;
}[];
};
"hook-response": {
code: number | null;
status: string | null;
message: string | null;
};
/**
* Webhooks for repositories.
*/
hook: {
type: string;
/**
* Unique identifier of the webhook.
*/
id: number;
/**
* The name of a valid service, use 'web' for a webhook.
*/
name: string;
/**
* Determines whether the hook is actually triggered on pushes.
*/
active: boolean;
/**
* Determines what events the hook is triggered for. Default: ['push'].
*/
events: string[];
config: {
email?: string;
password?: string;
room?: string;
subdomain?: string;
url?: components["schemas"]["webhook-config-url"];
insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"];
content_type?: components["schemas"]["webhook-config-content-type"];
digest?: string;
secret?: components["schemas"]["webhook-config-secret"];
token?: string;
};
updated_at: string;
created_at: string;
url: string;
test_url: string;
ping_url: string;
last_response: components["schemas"]["hook-response"];
};
/**
* A repository import from an external source.
*/
import: {
vcs: string | null;
use_lfs?: string;
/**
* The URL of the originating repository.
*/
vcs_url: string;
svc_root?: string;
tfvc_project?: string;
status:
| "auth"
| "error"
| "none"
| "detecting"
| "choose"
| "auth_failed"
| "importing"
| "mapping"
| "waiting_to_push"
| "pushing"
| "complete"
| "setup"
| "unknown"
| "detection_found_multiple"
| "detection_found_nothing"
| "detection_needs_auth";
status_text?: string | null;
failed_step?: string | null;
error_message?: string | null;
import_percent?: number | null;
commit_count?: number | null;
push_percent?: number | null;
has_large_files?: boolean;
large_files_size?: number;
large_files_count?: number;
project_choices?: {
vcs?: string;
tfvc_project?: string;
human_name?: string;
}[];
message?: string;
authors_count?: number | null;
url: string;
html_url: string;
authors_url: string;
repository_url: string;
svn_root?: string;
};
/**
* Porter Author
*/
"porter-author": {
id: number;
remote_id: string;
remote_name: string;
email: string;
name: string;
url: string;
import_url: string;
};
/**
* Porter Large File
*/
"porter-large-file": {
ref_name: string;
path: string;
oid: string;
size: number;
};
/**
* Issue Event Label
*/
"issue-event-label": { name: string | null; color: string | null };
"issue-event-dismissed-review": {
state: string;
review_id: number;
dismissal_message: string | null;
dismissal_commit_id?: string | null;
};
/**
* Issue Event Milestone
*/
"issue-event-milestone": { title: string };
/**
* Issue Event Project Card
*/
"issue-event-project-card": {
url: string;
id: number;
project_url: string;
project_id: number;
column_name: string;
previous_column_name?: string;
};
/**
* Issue Event Rename
*/
"issue-event-rename": { from: string; to: string };
/**
* Issue Event
*/
"issue-event": {
id: number;
node_id: string;
url: string;
actor: components["schemas"]["simple-user"] | null;
event: string;
commit_id: string | null;
commit_url: string | null;
created_at: string;
issue?: components["schemas"]["issue-simple"];
label?: components["schemas"]["issue-event-label"];
assignee?: components["schemas"]["simple-user"] | null;
assigner?: components["schemas"]["simple-user"] | null;
review_requester?: components["schemas"]["simple-user"] | null;
requested_reviewer?: components["schemas"]["simple-user"] | null;
requested_team?: components["schemas"]["team"];
dismissed_review?: components["schemas"]["issue-event-dismissed-review"];
milestone?: components["schemas"]["issue-event-milestone"];
project_card?: components["schemas"]["issue-event-project-card"];
rename?: components["schemas"]["issue-event-rename"];
author_association?: string;
lock_reason?: string | null;
};
/**
* Issue Event for Issue
*/
"issue-event-for-issue": {
id?: number;
node_id?: string;
url?: string;
actor?: components["schemas"]["simple-user"];
event?: string;
commit_id?: string | null;
commit_url?: string | null;
created_at?: string;
sha?: string;
html_url?: string;
message?: string;
issue_url?: string;
updated_at?: string;
author_association?: string;
body?: string;
lock_reason?: string;
submitted_at?: string;
state?: string;
pull_request_url?: string;
body_html?: string;
body_text?: string;
};
/**
* Color-coded labels help you categorize and filter your issues (just like labels in Gmail).
*/
label: {
id: number;
node_id: string;
/**
* URL for the label
*/
url: string;
/**
* The name of the label.
*/
name: string;
description?: string | null;
/**
* 6-character hex code, without the leading #, identifying the color
*/
color: string;
default: boolean;
};
/**
* An SSH key granting access to a single repository.
*/
"deploy-key": {
id?: number;
key?: string;
url?: string;
title?: string;
verified?: boolean;
created_at?: string;
read_only?: boolean;
};
/**
* Language
*/
language: { [key: string]: number };
/**
* License Content
*/
"license-content": {
name: string;
path: string;
sha: string;
size: number;
url: string;
html_url: string | null;
git_url: string | null;
download_url: string | null;
type: string;
content: string;
encoding: string;
_links: { git: string | null; html: string | null; self: string };
license: components["schemas"]["license-simple"] | null;
};
"pages-source-hash": { branch: string; path: string };
/**
* The configuration for GitHub Pages for a repository.
*/
page: {
/**
* The API address for accessing this Page resource.
*/
url: string;
/**
* The status of the most recent build of the Page.
*/
status: ("built" | "building" | "errored") | null;
/**
* The Pages site's custom domain
*/
cname: string | null;
/**
* Whether the Page has a custom 404 page.
*/
custom_404: boolean;
/**
* The web address the Page can be accessed from.
*/
html_url?: string;
source?: components["schemas"]["pages-source-hash"];
};
/**
* Page Build
*/
"page-build": {
url: string;
status: string;
error: { message: string | null };
pusher: components["schemas"]["simple-user"] | null;
commit: string;
duration: number;
created_at: string;
updated_at: string;
};
/**
* Page Build Status
*/
"page-build-status": { url: string; status: string };
/**
* Pull requests let you tell others about changes you've pushed to a repository on GitHub. Once a pull request is sent, interested parties can review the set of changes, discuss potential modifications, and even push follow-up commits if necessary.
*/
"pull-request": {
url: string;
id: number;
node_id: string;
html_url: string;
diff_url: string;
patch_url: string;
issue_url: string;
commits_url: string;
review_comments_url: string;
review_comment_url: string;
comments_url: string;
statuses_url: string;
/**
* Number uniquely identifying the pull request within its repository.
*/
number: number;
/**
* State of this Pull Request. Either `open` or `closed`.
*/
state: "open" | "closed";
locked: boolean;
/**
* The title of the pull request.
*/
title: string;
user: components["schemas"]["simple-user"] | null;
body: string | null;
labels: {
id?: number;
node_id?: string;
url?: string;
name?: string;
description?: string | null;
color?: string;
default?: boolean;
}[];
milestone: components["schemas"]["milestone"] | null;
active_lock_reason?: string | null;
created_at: string;
updated_at: string;
closed_at: string | null;
merged_at: string | null;
merge_commit_sha: string | null;
assignee: components["schemas"]["simple-user"] | null;
assignees?: components["schemas"]["simple-user"][] | null;
requested_reviewers?: components["schemas"]["simple-user"][] | null;
requested_teams?: components["schemas"]["team-simple"][] | null;
head: {
label: string;
ref: string;
repo: {
archive_url: string;
assignees_url: string;
blobs_url: string;
branches_url: string;
collaborators_url: string;
comments_url: string;
commits_url: string;
compare_url: string;
contents_url: string;
contributors_url: string;
deployments_url: string;
description: string | null;
downloads_url: string;
events_url: string;
fork: boolean;
forks_url: string;
full_name: string;
git_commits_url: string;
git_refs_url: string;
git_tags_url: string;
hooks_url: string;
html_url: string;
id: number;
node_id: string;
issue_comment_url: string;
issue_events_url: string;
issues_url: string;
keys_url: string;
labels_url: string;
languages_url: string;
merges_url: string;
milestones_url: string;
name: string;
notifications_url: string;
owner: {
avatar_url: string;
events_url: string;
followers_url: string;
following_url: string;
gists_url: string;
gravatar_id: string | null;
html_url: string;
id: number;
node_id: string;
login: string;
organizations_url: string;
received_events_url: string;
repos_url: string;
site_admin: boolean;
starred_url: string;
subscriptions_url: string;
type: string;
url: string;
};
private: boolean;
pulls_url: string;
releases_url: string;
stargazers_url: string;
statuses_url: string;
subscribers_url: string;
subscription_url: string;
tags_url: string;
teams_url: string;
trees_url: string;
url: string;
clone_url: string;
default_branch: string;
forks: number;
forks_count: number;
git_url: string;
has_downloads: boolean;
has_issues: boolean;
has_projects: boolean;
has_wiki: boolean;
has_pages: boolean;
homepage: string | null;
language: string | null;
master_branch?: string;
archived: boolean;
disabled: boolean;
mirror_url: string | null;
open_issues: number;
open_issues_count: number;
permissions?: { admin: boolean; pull: boolean; push: boolean };
temp_clone_token?: string;
allow_merge_commit?: boolean;
allow_squash_merge?: boolean;
allow_rebase_merge?: boolean;
license: {
key: string;
name: string;
url: string | null;
spdx_id: string | null;
node_id: string;
} | null;
pushed_at: string;
size: number;
ssh_url: string;
stargazers_count: number;
svn_url: string;
topics?: string[];
watchers: number;
watchers_count: number;
created_at: string;
updated_at: string;
};
sha: string;
user: {
avatar_url: string;
events_url: string;
followers_url: string;
following_url: string;
gists_url: string;
gravatar_id: string | null;
html_url: string;
id: number;
node_id: string;
login: string;
organizations_url: string;
received_events_url: string;
repos_url: string;
site_admin: boolean;
starred_url: string;
subscriptions_url: string;
type: string;
url: string;
};
};
base: {
label: string;
ref: string;
repo: {
archive_url: string;
assignees_url: string;
blobs_url: string;
branches_url: string;
collaborators_url: string;
comments_url: string;
commits_url: string;
compare_url: string;
contents_url: string;
contributors_url: string;
deployments_url: string;
description: string | null;
downloads_url: string;
events_url: string;
fork: boolean;
forks_url: string;
full_name: string;
git_commits_url: string;
git_refs_url: string;
git_tags_url: string;
hooks_url: string;
html_url: string;
id: number;
node_id: string;
issue_comment_url: string;
issue_events_url: string;
issues_url: string;
keys_url: string;
labels_url: string;
languages_url: string;
merges_url: string;
milestones_url: string;
name: string;
notifications_url: string;
owner: {
avatar_url: string;
events_url: string;
followers_url: string;
following_url: string;
gists_url: string;
gravatar_id: string | null;
html_url: string;
id: number;
node_id: string;
login: string;
organizations_url: string;
received_events_url: string;
repos_url: string;
site_admin: boolean;
starred_url: string;
subscriptions_url: string;
type: string;
url: string;
};
private: boolean;
pulls_url: string;
releases_url: string;
stargazers_url: string;
statuses_url: string;
subscribers_url: string;
subscription_url: string;
tags_url: string;
teams_url: string;
trees_url: string;
url: string;
clone_url: string;
default_branch: string;
forks: number;
forks_count: number;
git_url: string;
has_downloads: boolean;
has_issues: boolean;
has_projects: boolean;
has_wiki: boolean;
has_pages: boolean;
homepage: string | null;
language: string | null;
master_branch?: string;
archived: boolean;
disabled: boolean;
mirror_url: string | null;
open_issues: number;
open_issues_count: number;
permissions?: { admin: boolean; pull: boolean; push: boolean };
temp_clone_token?: string;
allow_merge_commit?: boolean;
allow_squash_merge?: boolean;
allow_rebase_merge?: boolean;
license: components["schemas"]["license-simple"] | null;
pushed_at: string;
size: number;
ssh_url: string;
stargazers_count: number;
svn_url: string;
topics?: string[];
watchers: number;
watchers_count: number;
created_at: string;
updated_at: string;
};
sha: string;
user: {
avatar_url: string;
events_url: string;
followers_url: string;
following_url: string;
gists_url: string;
gravatar_id: string | null;
html_url: string;
id: number;
node_id: string;
login: string;
organizations_url: string;
received_events_url: string;
repos_url: string;
site_admin: boolean;
starred_url: string;
subscriptions_url: string;
type: string;
url: string;
};
};
_links: {
comments: components["schemas"]["link"];
commits: components["schemas"]["link"];
statuses: components["schemas"]["link"];
html: components["schemas"]["link"];
issue: components["schemas"]["link"];
review_comments: components["schemas"]["link"];
review_comment: components["schemas"]["link"];
self: components["schemas"]["link"];
};
author_association: string;
/**
* Indicates whether or not the pull request is a draft.
*/
draft?: boolean;
merged: boolean;
mergeable: boolean | null;
rebaseable?: boolean | null;
mergeable_state: string;
merged_by: components["schemas"]["simple-user"] | null;
comments: number;
review_comments: number;
/**
* Indicates whether maintainers can modify the pull request.
*/
maintainer_can_modify: boolean;
commits: number;
additions: number;
deletions: number;
changed_files: number;
};
/**
* Pull Request Review Comments are comments on a portion of the Pull Request's diff.
*/
"pull-request-review-comment": {
/**
* URL for the pull request review comment
*/
url: string;
/**
* The ID of the pull request review to which the comment belongs.
*/
pull_request_review_id: number | null;
/**
* The ID of the pull request review comment.
*/
id: number;
/**
* The node ID of the pull request review comment.
*/
node_id: string;
/**
* The diff of the line that the comment refers to.
*/
diff_hunk: string;
/**
* The relative path of the file to which the comment applies.
*/
path: string;
/**
* The line index in the diff to which the comment applies.
*/
position: number;
/**
* The index of the original line in the diff to which the comment applies.
*/
original_position: number;
/**
* The SHA of the commit to which the comment applies.
*/
commit_id: string;
/**
* The SHA of the original commit to which the comment applies.
*/
original_commit_id: string;
/**
* The comment ID to reply to.
*/
in_reply_to_id?: number;
user: components["schemas"]["simple-user"];
/**
* The text of the comment.
*/
body: string;
created_at: string;
updated_at: string;
/**
* HTML URL for the pull request review comment.
*/
html_url: string;
/**
* URL for the pull request that the review comment belongs to.
*/
pull_request_url: string;
/**
* How the author of the comment is associated with the pull request.
*/
author_association: string;
_links: {
self: { href: string };
html: { href: string };
pull_request: { href: string };
};
/**
* The first line of the range for a multi-line comment.
*/
start_line?: number | null;
/**
* The first line of the range for a multi-line comment.
*/
original_start_line?: number | null;
/**
* The side of the first line of the range for a multi-line comment.
*/
start_side?: ("LEFT" | "RIGHT") | null;
/**
* The line of the blob to which the comment applies. The last line of the range for a multi-line comment
*/
line?: number;
/**
* The line of the blob to which the comment applies. The last line of the range for a multi-line comment
*/
original_line?: number;
/**
* The side of the diff to which the comment applies. The side of the last line of the range for a multi-line comment
*/
side?: "LEFT" | "RIGHT";
reactions?: components["schemas"]["reaction-rollup"];
body_html?: string;
body_text?: string;
};
/**
* Pull Request Merge Result
*/
"pull-request-merge-result": {
sha: string;
merged: boolean;
message: string;
};
/**
* Pull Request Review Request
*/
"pull-request-review-request": {
users?: {
login?: string;
id?: number;
node_id?: string;
avatar_url?: string;
gravatar_id?: string;
url?: string;
html_url?: string;
followers_url?: string;
following_url?: string;
gists_url?: string;
starred_url?: string;
subscriptions_url?: string;
organizations_url?: string;
repos_url?: string;
events_url?: string;
received_events_url?: string;
type?: string;
site_admin?: boolean;
}[];
teams?: {
id?: number;
node_id?: string;
url?: string;
html_url?: string;
name?: string;
slug?: string;
description?: string | null;
privacy?: string;
permission?: string;
members_url?: string;
repositories_url?: string;
parent?: string | null;
}[];
};
/**
* Pull Request Reviews are reviews on pull requests.
*/
"pull-request-review": {
/**
* Unique identifier of the review
*/
id: number;
node_id: string;
user: components["schemas"]["simple-user"] | null;
/**
* The text of the review.
*/
body: string;
state: string;
html_url: string;
pull_request_url: string;
_links: { html: { href: string }; pull_request: { href: string } };
submitted_at?: string;
/**
* A commit SHA for the review.
*/
commit_id: string;
body_html?: string;
body_text?: string;
author_association: string;
};
/**
* Legacy Review Comment
*/
"review-comment": {
url: string;
pull_request_review_id: number | null;
id: number;
node_id: string;
diff_hunk: string;
path: string;
position: number | null;
original_position: number;
commit_id: string;
original_commit_id: string;
in_reply_to_id?: number;
user: components["schemas"]["simple-user"] | null;
body: string;
created_at: string;
updated_at: string;
html_url: string;
pull_request_url: string;
author_association: string;
_links: {
self: components["schemas"]["link"];
html: components["schemas"]["link"];
pull_request: components["schemas"]["link"];
};
body_text?: string;
body_html?: string;
/**
* The side of the first line of the range for a multi-line comment.
*/
side?: "LEFT" | "RIGHT";
/**
* The side of the first line of the range for a multi-line comment.
*/
start_side?: ("LEFT" | "RIGHT") | null;
/**
* The line of the blob to which the comment applies. The last line of the range for a multi-line comment
*/
line?: number;
/**
* The original line of the blob to which the comment applies. The last line of the range for a multi-line comment
*/
original_line?: number;
/**
* The first line of the range for a multi-line comment.
*/
start_line?: number | null;
/**
* The original first line of the range for a multi-line comment.
*/
original_start_line?: number | null;
};
/**
* Data related to a release.
*/
"release-asset": {
url: string;
browser_download_url: string;
id: number;
node_id: string;
/**
* The file name of the asset.
*/
name: string;
label: string | null;
/**
* State of the release asset.
*/
state: "uploaded";
content_type: string;
size: number;
download_count: number;
created_at: string;
updated_at: string;
uploader: components["schemas"]["simple-user"] | null;
};
/**
* A release.
*/
release: {
url: string;
html_url: string;
assets_url: string;
upload_url: string;
tarball_url: string | null;
zipball_url: string | null;
id: number;
node_id: string;
/**
* The name of the tag.
*/
tag_name: string;
/**
* Specifies the commitish value that determines where the Git tag is created from.
*/
target_commitish: string;
name: string | null;
body?: string | null;
/**
* true to create a draft (unpublished) release, false to create a published one.
*/
draft: boolean;
/**
* Whether to identify the release as a prerelease or a full release.
*/
prerelease: boolean;
created_at: string;
published_at: string | null;
author: components["schemas"]["simple-user"];
assets: components["schemas"]["release-asset"][];
body_html?: string;
body_text?: string;
};
/**
* Sets the state of the secret scanning alert. Can be either `open` or `resolved`. You must provide `resolution` when you set the state to `resolved`.
*/
"secret-scanning-alert-state": "open" | "resolved";
/**
* **Required when the `state` is `resolved`.** The reason for resolving the alert. Can be one of `false_positive`, `wont_fix`, `revoked`, or `used_in_tests`.
*/
"secret-scanning-alert-resolution": string | null;
"secret-scanning-alert": {
number?: components["schemas"]["alert-number"];
created_at?: components["schemas"]["alert-created-at"];
url?: components["schemas"]["alert-url"];
html_url?: components["schemas"]["alert-html-url"];
state?: components["schemas"]["secret-scanning-alert-state"];
resolution?: components["schemas"]["secret-scanning-alert-resolution"];
/**
* The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`.
*/
resolved_at?: string | null;
resolved_by?: components["schemas"]["simple-user"];
/**
* The type of secret that secret scanning detected.
*/
secret_type?: string;
/**
* The secret that was detected.
*/
secret?: string;
};
/**
* Stargazer
*/
stargazer: {
starred_at: string;
user: components["schemas"]["simple-user"] | null;
};
/**
* Code Frequency Stat
*/
"code-frequency-stat": number[];
/**
* Commit Activity
*/
"commit-activity": { days: number[]; total: number; week: number };
/**
* Contributor Activity
*/
"contributor-activity": {
author: components["schemas"]["simple-user"] | null;
total: number;
weeks: { w?: string; a?: number; d?: number; c?: number }[];
};
"participation-stats": { all?: number[]; owner?: number[] };
/**
* Repository invitations let you manage who you collaborate with.
*/
"repository-subscription": {
/**
* Determines if notifications should be received from this repository.
*/
subscribed: boolean;
/**
* Determines if all notifications should be blocked from this repository.
*/
ignored: boolean;
reason: string | null;
created_at: string;
url: string;
repository_url: string;
};
/**
* Tag
*/
tag: {
name: string;
commit: { sha: string; url: string };
zipball_url: string;
tarball_url: string;
node_id: string;
};
/**
* A topic aggregates entities that are related to a subject.
*/
topic: { names?: string[] };
traffic: { timestamp: string; uniques: number; count: number };
/**
* Clone Traffic
*/
"clone-traffic": {
count: number;
uniques: number;
clones: components["schemas"]["traffic"][];
};
/**
* Content Traffic
*/
"content-traffic": {
path: string;
title: string;
count: number;
uniques: number;
};
/**
* Referrer Traffic
*/
"referrer-traffic": { referrer: string; count: number; uniques: number };
/**
* View Traffic
*/
"view-traffic": {
count: number;
uniques: number;
views: components["schemas"]["traffic"][];
};
"scim-group-list-enterprise": {
schemas?: string[];
totalResults?: number;
itemsPerPage?: number;
startIndex?: number;
Resources?: {
schemas?: string[];
id?: string;
externalId?: string | null;
displayName?: string;
members?: { value?: string; $ref?: string; display?: string }[];
meta?: {
resourceType?: string;
created?: string;
lastModified?: string;
location?: string;
};
}[];
};
"scim-enterprise-group": {
schemas?: string[];
id?: string;
externalId?: string | null;
displayName?: string;
members?: { value?: string; $ref?: string; display?: string }[];
meta?: {
resourceType?: string;
created?: string;
lastModified?: string;
location?: string;
};
};
"scim-user-list-enterprise": {
schemas?: string[];
totalResults?: number;
itemsPerPage?: number;
startIndex?: number;
Resources?: {
schemas?: string[];
id?: string;
externalId?: string;
userName?: string;
name?: { givenName?: string; familyName?: string };
emails?: { value?: string; primary?: boolean; type?: string }[];
groups?: { value?: string }[];
active?: boolean;
meta?: {
resourceType?: string;
created?: string;
lastModified?: string;
location?: string;
};
}[];
};
"scim-enterprise-user": {
schemas?: string[];
id?: string;
externalId?: string;
userName?: string;
name?: { givenName?: string; familyName?: string };
emails?: { value?: string; type?: string; primary?: boolean }[];
groups?: { value?: string }[];
active?: boolean;
meta?: {
resourceType?: string;
created?: string;
lastModified?: string;
location?: string;
};
};
/**
* SCIM /Users provisioning endpoints
*/
"scim-user": {
/**
* SCIM schema used.
*/
schemas: string[];
/**
* Unique identifier of an external identity
*/
id: string;
/**
* The ID of the User.
*/
externalId: string | null;
/**
* Configured by the admin. Could be an email, login, or username
*/
userName: string | null;
/**
* The name of the user, suitable for display to end-users
*/
displayName?: string | null;
name: {
givenName: string | null;
familyName: string | null;
formatted?: string | null;
};
/**
* user emails
*/
emails: { value: string; primary?: boolean }[];
/**
* The active status of the User.
*/
active: boolean;
meta: {
resourceType?: string;
created?: string;
lastModified?: string;
location?: string;
};
/**
* The ID of the organization.
*/
organization_id?: number;
/**
* Set of operations to be performed
*/
operations?: {
op: "add" | "remove" | "replace";
path?: string;
value?: string | { [key: string]: any } | { [key: string]: any }[];
}[];
/**
* associated groups
*/
groups?: { value?: string; display?: string }[];
};
/**
* SCIM User List
*/
"scim-user-list": {
/**
* SCIM schema used.
*/
schemas: string[];
totalResults: number;
itemsPerPage: number;
startIndex: number;
Resources: components["schemas"]["scim-user"][];
};
"search-result-text-matches": {
object_url?: string;
object_type?: string | null;
property?: string;
fragment?: string;
matches?: { text?: string; indices?: number[] }[];
}[];
/**
* Code Search Result Item
*/
"code-search-result-item": {
name: string;
path: string;
sha: string;
url: string;
git_url: string;
html_url: string;
repository: components["schemas"]["minimal-repository"];
score: number;
file_size?: number;
language?: string | null;
last_modified_at?: string;
line_numbers?: string[];
text_matches?: components["schemas"]["search-result-text-matches"];
};
/**
* Commit Search Result Item
*/
"commit-search-result-item": {
url: string;
sha: string;
html_url: string;
comments_url: string;
commit: {
author: { name: string; email: string; date: string };
committer: components["schemas"]["git-user"] | null;
comment_count: number;
message: string;
tree: { sha: string; url: string };
url: string;
verification?: components["schemas"]["verification"];
};
author: components["schemas"]["simple-user"] | null;
committer: components["schemas"]["git-user"] | null;
parents: { url?: string; html_url?: string; sha?: string }[];
repository: components["schemas"]["minimal-repository"];
score: number;
node_id: string;
text_matches?: components["schemas"]["search-result-text-matches"];
};
/**
* Issue Search Result Item
*/
"issue-search-result-item": {
url: string;
repository_url: string;
labels_url: string;
comments_url: string;
events_url: string;
html_url: string;
id: number;
node_id: string;
number: number;
title: string;
locked: boolean;
active_lock_reason?: string | null;
assignees?: components["schemas"]["simple-user"][] | null;
user: components["schemas"]["simple-user"] | null;
labels: {
id?: number;
node_id?: string;
url?: string;
name?: string;
color?: string;
default?: boolean;
description?: string | null;
}[];
state: string;
assignee: components["schemas"]["simple-user"] | null;
milestone: components["schemas"]["milestone"] | null;
comments: number;
created_at: string;
updated_at: string;
closed_at: string | null;
text_matches?: components["schemas"]["search-result-text-matches"];
pull_request?: {
merged_at?: string | null;
diff_url: string | null;
html_url: string | null;
patch_url: string | null;
url: string | null;
};
body?: string;
score: number;
author_association: string;
draft?: boolean;
repository?: components["schemas"]["repository"];
body_html?: string;
body_text?: string;
timeline_url?: string;
performed_via_github_app?: components["schemas"]["integration"] | null;
};
/**
* Label Search Result Item
*/
"label-search-result-item": {
id: number;
node_id: string;
url: string;
name: string;
color: string;
default: boolean;
description: string | null;
score: number;
text_matches?: components["schemas"]["search-result-text-matches"];
};
/**
* Repo Search Result Item
*/
"repo-search-result-item": {
id: number;
node_id: string;
name: string;
full_name: string;
owner: components["schemas"]["simple-user"] | null;
private: boolean;
html_url: string;
description: string | null;
fork: boolean;
url: string;
created_at: string;
updated_at: string;
pushed_at: string;
homepage: string | null;
size: number;
stargazers_count: number;
watchers_count: number;
language: string | null;
forks_count: number;
open_issues_count: number;
master_branch?: string;
default_branch: string;
score: number;
forks_url: string;
keys_url: string;
collaborators_url: string;
teams_url: string;
hooks_url: string;
issue_events_url: string;
events_url: string;
assignees_url: string;
branches_url: string;
tags_url: string;
blobs_url: string;
git_tags_url: string;
git_refs_url: string;
trees_url: string;
statuses_url: string;
languages_url: string;
stargazers_url: string;
contributors_url: string;
subscribers_url: string;
subscription_url: string;
commits_url: string;
git_commits_url: string;
comments_url: string;
issue_comment_url: string;
contents_url: string;
compare_url: string;
merges_url: string;
archive_url: string;
downloads_url: string;
issues_url: string;
pulls_url: string;
milestones_url: string;
notifications_url: string;
labels_url: string;
releases_url: string;
deployments_url: string;
git_url: string;
ssh_url: string;
clone_url: string;
svn_url: string;
forks: number;
open_issues: number;
watchers: number;
topics?: string[];
mirror_url: string | null;
has_issues: boolean;
has_projects: boolean;
has_pages: boolean;
has_wiki: boolean;
has_downloads: boolean;
archived: boolean;
/**
* Returns whether or not this repository disabled.
*/
disabled: boolean;
license: components["schemas"]["license-simple"] | null;
permissions?: { admin: boolean; pull: boolean; push: boolean };
text_matches?: components["schemas"]["search-result-text-matches"];
temp_clone_token?: string;
allow_merge_commit?: boolean;
allow_squash_merge?: boolean;
allow_rebase_merge?: boolean;
delete_branch_on_merge?: boolean;
};
/**
* Topic Search Result Item
*/
"topic-search-result-item": {
name: string;
display_name: string | null;
short_description: string | null;
description: string | null;
created_by: string | null;
released: string | null;
created_at: string;
updated_at: string;
featured: boolean;
curated: boolean;
score: number;
repository_count?: number | null;
logo_url?: string | null;
text_matches?: components["schemas"]["search-result-text-matches"];
related?:
| {
topic_relation?: {
id?: number;
name?: string;
topic_id?: number;
relation_type?: string;
};
}[]
| null;
aliases?:
| {
topic_relation?: {
id?: number;
name?: string;
topic_id?: number;
relation_type?: string;
};
}[]
| null;
};
/**
* User Search Result Item
*/
"user-search-result-item": {
login: string;
id: number;
node_id: string;
avatar_url: string;
gravatar_id: string | null;
url: string;
html_url: string;
followers_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
received_events_url: string;
type: string;
score: number;
following_url: string;
gists_url: string;
starred_url: string;
events_url: string;
public_repos?: number;
public_gists?: number;
followers?: number;
following?: number;
created_at?: string;
updated_at?: string;
name?: string | null;
bio?: string | null;
email?: string | null;
location?: string | null;
site_admin: boolean;
hireable?: boolean | null;
text_matches?: components["schemas"]["search-result-text-matches"];
blog?: string | null;
company?: string | null;
suspended_at?: string | null;
};
/**
* Private User
*/
"private-user": {
login: string;
id: number;
node_id: string;
avatar_url: string;
gravatar_id: string | null;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
name: string | null;
company: string | null;
blog: string | null;
location: string | null;
email: string | null;
hireable: boolean | null;
bio: string | null;
twitter_username?: string | null;
public_repos: number;
public_gists: number;
followers: number;
following: number;
created_at: string;
updated_at: string;
private_gists: number;
total_private_repos: number;
owned_private_repos: number;
disk_usage: number;
collaborators: number;
two_factor_authentication: boolean;
plan?: {
collaborators: number;
name: string;
space: number;
private_repos: number;
};
suspended_at?: string | null;
business_plus?: boolean;
ldap_dn?: string;
};
/**
* Public User
*/
"public-user": {
login: string;
id: number;
node_id: string;
avatar_url: string;
gravatar_id: string | null;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
name: string | null;
company: string | null;
blog: string | null;
location: string | null;
email: string | null;
hireable: boolean | null;
bio: string | null;
twitter_username?: string | null;
public_repos: number;
public_gists: number;
followers: number;
following: number;
created_at: string;
updated_at: string;
plan?: {
collaborators: number;
name: string;
space: number;
private_repos: number;
};
suspended_at?: string | null;
private_gists?: number;
total_private_repos?: number;
owned_private_repos?: number;
disk_usage?: number;
collaborators?: number;
};
/**
* Email
*/
email:
| {
email: string;
primary: boolean;
verified: boolean;
visibility: string | null;
}
| string;
/**
* A unique encryption key
*/
"gpg-key": {
id: number;
primary_key_id: number | null;
key_id: string;
public_key: string;
emails: { email?: string; verified?: boolean }[];
subkeys: {
id?: number;
primary_key_id?: number;
key_id?: string;
public_key?: string;
emails?: { [key: string]: any }[];
subkeys?: { [key: string]: any }[];
can_sign?: boolean;
can_encrypt_comms?: boolean;
can_encrypt_storage?: boolean;
can_certify?: boolean;
created_at?: string;
expires_at?: string | null;
raw_key?: string | null;
}[];
can_sign: boolean;
can_encrypt_comms: boolean;
can_encrypt_storage: boolean;
can_certify: boolean;
created_at: string;
expires_at: string | null;
raw_key: string | null;
};
/**
* Key
*/
key: {
key_id?: string;
key?: string;
id?: number;
url?: string;
title?: string;
created_at?: string;
verified?: boolean;
read_only?: boolean;
};
"marketplace-account": {
url: string;
id: number;
type: string;
node_id?: string;
login: string;
email?: string | null;
organization_billing_email?: string | null;
};
/**
* User Marketplace Purchase
*/
"user-marketplace-purchase": {
billing_cycle: string;
next_billing_date: string | null;
unit_count: number | null;
on_free_trial: boolean;
free_trial_ends_on: string | null;
updated_at: string | null;
account: components["schemas"]["marketplace-account"];
plan: components["schemas"]["marketplace-listing-plan"];
};
/**
* Starred Repository
*/
"starred-repository": {
starred_at: string;
repo: components["schemas"]["repository"];
};
/**
* Hovercard
*/
hovercard: { contexts: { message: string; octicon: string }[] };
/**
* Key Simple
*/
"key-simple": { id: number; key: string };
};
responses: {
/**
* Resource Not Found
*/
not_found: { [key: string]: any };
/**
* Validation Failed
*/
validation_failed_simple: { [key: string]: any };
/**
* Preview Header Missing
*/
preview_header_missing: { [key: string]: any };
/**
* Forbidden
*/
forbidden: { [key: string]: any };
/**
* Requires Authentication
*/
requires_authentication: { [key: string]: any };
/**
* Validation Failed
*/
validation_failed: { [key: string]: any };
/**
* Not Modified
*/
not_modified: { [key: string]: any };
/**
* Gone
*/
gone: { [key: string]: any };
/**
* Service Unavailable
*/
service_unavailable: { [key: string]: any };
/**
* Forbidden Gist
*/
forbidden_gist: { [key: string]: any };
/**
* Moved Permanently
*/
moved_permanently: { [key: string]: any };
/**
* Conflict
*/
conflict: { [key: string]: any };
/**
* Internal Error
*/
internal_error: { [key: string]: any };
/**
* Bad Request
*/
bad_request: { [key: string]: any };
/**
* Found
*/
found: { [key: string]: any };
/**
* Resource Not Found
*/
scim_not_found: { [key: string]: any };
/**
* Forbidden
*/
scim_forbidden: { [key: string]: any };
/**
* Bad Request
*/
scim_bad_request: { [key: string]: any };
/**
* Internal Error
*/
scim_internal_error: { [key: string]: any };
/**
* Conflict
*/
scim_conflict: { [key: string]: any };
};
} | the_stack |
import * as Bluebird from 'bluebird';
import { NextFunction, Response, Router } from 'express';
import * as _ from 'lodash';
import * as deviceState from '../device-state';
import * as apiBinder from '../api-binder';
import * as applicationManager from '../compose/application-manager';
import {
CompositionStepAction,
generateStep,
} from '../compose/composition-steps';
import { getApp } from '../device-state/db-format';
import { Service } from '../compose/service';
import Volume from '../compose/volume';
import * as commitStore from '../compose/commit';
import * as config from '../config';
import * as db from '../db';
import * as deviceConfig from '../device-config';
import * as logger from '../logger';
import * as images from '../compose/images';
import * as volumeManager from '../compose/volume-manager';
import * as serviceManager from '../compose/service-manager';
import { spawnJournalctl } from '../lib/journald';
import {
appNotFoundMessage,
serviceNotFoundMessage,
v2ServiceEndpointInputErrorMessage,
} from '../lib/messages';
import log from '../lib/supervisor-console';
import supervisorVersion = require('../lib/supervisor-version');
import { checkInt, checkTruthy } from '../lib/validation';
import { isVPNActive } from '../network';
import { doPurge, doRestart, safeStateClone } from './common';
import { AuthorizedRequest } from '../lib/api-keys';
export function createV2Api(router: Router) {
const handleServiceAction = (
req: AuthorizedRequest,
res: Response,
next: NextFunction,
action: CompositionStepAction,
): Resolvable<void> => {
const { imageId, serviceName, force } = req.body;
const appId = checkInt(req.params.appId);
if (!appId) {
res.status(400).json({
status: 'failed',
message: 'Missing app id',
});
return;
}
// handle the case where the appId is out of scope
if (!req.auth.isScoped({ apps: [appId] })) {
res.status(401).json({
status: 'failed',
message: 'Application is not available',
});
return;
}
return Promise.all([applicationManager.getCurrentApps(), getApp(appId)])
.then(([apps, targetApp]) => {
const app = apps[appId];
if (app == null) {
res.status(404).send(appNotFoundMessage);
return;
}
// Work if we have a service name or an image id
if (imageId == null && serviceName == null) {
throw new Error(v2ServiceEndpointInputErrorMessage);
}
let service: Service | undefined;
let targetService: Service | undefined;
if (imageId != null) {
service = _.find(app.services, { imageId });
targetService = _.find(targetApp.services, { imageId });
} else {
service = _.find(app.services, { serviceName });
targetService = _.find(targetApp.services, { serviceName });
}
if (service == null) {
res.status(404).send(serviceNotFoundMessage);
return;
}
applicationManager.setTargetVolatileForService(service.imageId!, {
running: action !== 'stop',
});
return applicationManager
.executeStep(
generateStep(action, {
current: service,
target: targetService,
wait: true,
}),
{
force,
},
)
.then(() => {
res.status(200).send('OK');
});
})
.catch(next);
};
const createServiceActionHandler = (action: string) =>
_.partial(handleServiceAction, _, _, _, action);
router.post(
'/v2/applications/:appId/purge',
(req: AuthorizedRequest, res: Response, next: NextFunction) => {
const { force } = req.body;
const appId = checkInt(req.params.appId);
if (!appId) {
return res.status(400).json({
status: 'failed',
message: 'Missing app id',
});
}
// handle the case where the application is out of scope
if (!req.auth.isScoped({ apps: [appId] })) {
res.status(401).json({
status: 'failed',
message: 'Application is not available',
});
return;
}
return doPurge(appId, force)
.then(() => {
res.status(200).send('OK');
})
.catch(next);
},
);
router.post(
'/v2/applications/:appId/restart-service',
createServiceActionHandler('restart'),
);
router.post(
'/v2/applications/:appId/stop-service',
createServiceActionHandler('stop'),
);
router.post(
'/v2/applications/:appId/start-service',
createServiceActionHandler('start'),
);
router.post(
'/v2/applications/:appId/restart',
(req: AuthorizedRequest, res: Response, next: NextFunction) => {
const { force } = req.body;
const appId = checkInt(req.params.appId);
if (!appId) {
return res.status(400).json({
status: 'failed',
message: 'Missing app id',
});
}
// handle the case where the appId is out of scope
if (!req.auth.isScoped({ apps: [appId] })) {
res.status(401).json({
status: 'failed',
message: 'Application is not available',
});
return;
}
return doRestart(appId, force)
.then(() => {
res.status(200).send('OK');
})
.catch(next);
},
);
// TODO: Support dependent applications when this feature is complete
router.get(
'/v2/applications/state',
async (req: AuthorizedRequest, res: Response, next: NextFunction) => {
// It's kinda hacky to access the services and db via the application manager
// maybe refactor this code
Bluebird.join(
serviceManager.getStatus(),
images.getStatus(),
db.models('app').select(['appId', 'commit', 'name']),
(
services,
imgs,
apps: Array<{ appId: string; commit: string; name: string }>,
) => {
// Create an object which is keyed my application name
const response: {
[appName: string]: {
appId: number;
commit: string;
services: {
[serviceName: string]: {
status?: string;
releaseId: number;
downloadProgress: number | null;
};
};
};
} = {};
const appNameById: { [id: number]: string } = {};
// only access scoped apps
apps
.filter((app) =>
req.auth.isScoped({ apps: [parseInt(app.appId, 10)] }),
)
.forEach((app) => {
const appId = parseInt(app.appId, 10);
response[app.name] = {
appId,
commit: app.commit,
services: {},
};
appNameById[appId] = app.name;
});
// only access scoped images
imgs
.filter((img) => req.auth.isScoped({ apps: [img.appId] }))
.forEach((img) => {
const appName = appNameById[img.appId];
if (appName == null) {
log.warn(
`Image found for unknown application!\nImage: ${JSON.stringify(
img,
)}`,
);
return;
}
const svc = _.find(services, (service: Service) => {
return service.imageId === img.imageId;
});
let status: string | undefined;
if (svc == null) {
status = img.status;
} else {
status = svc.status || img.status;
}
response[appName].services[img.serviceName] = {
status,
releaseId: img.releaseId,
downloadProgress: img.downloadProgress || null,
};
});
res.status(200).json(response);
},
).catch(next);
},
);
router.get(
'/v2/applications/:appId/state',
async (req: AuthorizedRequest, res: Response) => {
// Check application ID provided is valid
const appId = checkInt(req.params.appId);
if (!appId) {
return res.status(400).json({
status: 'failed',
message: `Invalid application ID: ${req.params.appId}`,
});
}
// Query device for all applications
let apps: any;
try {
apps = await applicationManager.getStatus();
} catch (e) {
log.error(e.message);
return res.status(500).json({
status: 'failed',
message: `Unable to retrieve state for application ID: ${appId}`,
});
}
// Check if the application exists
if (!(appId in apps.local) || !req.auth.isScoped({ apps: [appId] })) {
return res.status(409).json({
status: 'failed',
message: `Application ID does not exist: ${appId}`,
});
}
// handle the case where the appId is out of scope
if (!req.auth.isScoped({ apps: [appId] })) {
res.status(401).json({
status: 'failed',
message: 'Application is not available',
});
return;
}
// Filter applications we do not want
for (const app in apps.local) {
if (app !== appId.toString()) {
delete apps.local[app];
}
}
const commit = await commitStore.getCommitForApp(appId);
// Return filtered applications
return res.status(200).json({ commit, ...apps });
},
);
router.get('/v2/local/target-state', async (_req, res) => {
const targetState = await deviceState.getTarget();
const target = safeStateClone(targetState);
res.status(200).json({
status: 'success',
state: target,
});
});
router.post('/v2/local/target-state', async (req, res) => {
// let's first ensure that we're in local mode, otherwise
// this function should not do anything
const localMode = await config.get('localMode');
if (!localMode) {
return res.status(400).json({
status: 'failed',
message: 'Target state can only set when device is in local mode',
});
}
// Now attempt to set the state
const force = req.body.force;
const targetState = req.body;
try {
await deviceState.setTarget(targetState, true);
await deviceState.triggerApplyTarget({ force });
res.status(200).json({
status: 'success',
message: 'OK',
});
} catch (e) {
res.status(400).json({
status: 'failed',
message: e.message,
});
}
});
router.get('/v2/local/device-info', async (_req, res) => {
try {
const { deviceType, deviceArch } = await config.getMany([
'deviceType',
'deviceArch',
]);
return res.status(200).json({
status: 'success',
info: {
arch: deviceArch,
deviceType,
},
});
} catch (e) {
res.status(500).json({
status: 'failed',
message: e.message,
});
}
});
router.get('/v2/local/logs', async (_req, res) => {
const serviceNameCache: { [sId: number]: string } = {};
const backend = logger.getLocalBackend();
// Cache the service names to IDs per call to the endpoint
backend.assignServiceNameResolver(async (id: number) => {
if (id in serviceNameCache) {
return serviceNameCache[id];
} else {
const name = await applicationManager.serviceNameFromId(id);
serviceNameCache[id] = name;
return name;
}
});
// Get the stream, and stream it into res
const listenStream = backend.attachListener();
// The http connection doesn't correctly intialise until some data is sent,
// which means any callers waiting on the data being returned will hang
// until the first logs comes through. To avoid this we send an initial
// message
res.write(
`${JSON.stringify({ message: 'Streaming logs', isSystem: true })}\n`,
);
listenStream.pipe(res);
});
router.get('/v2/version', (_req, res) => {
res.status(200).json({
status: 'success',
version: supervisorVersion,
});
});
router.get('/v2/containerId', async (req: AuthorizedRequest, res) => {
const services = (await serviceManager.getAll()).filter((service) =>
req.auth.isScoped({ apps: [service.appId] }),
);
if (req.query.serviceName != null || req.query.service != null) {
const serviceName = req.query.serviceName || req.query.service;
const service = _.find(
services,
(svc) => svc.serviceName === serviceName,
);
if (service != null) {
res.status(200).json({
status: 'success',
containerId: service.containerId,
});
} else {
res.status(503).json({
status: 'failed',
message: 'Could not find service with that name',
});
}
} else {
res.status(200).json({
status: 'success',
services: _(services)
.keyBy('serviceName')
.mapValues('containerId')
.value(),
});
}
});
router.get('/v2/state/status', async (req: AuthorizedRequest, res) => {
const appIds: number[] = [];
const pending = deviceState.isApplyInProgress();
const containerStates = (await serviceManager.getAll())
.filter((service) => req.auth.isScoped({ apps: [service.appId] }))
.map((svc) => {
appIds.push(svc.appId);
return _.pick(
svc,
'status',
'serviceName',
'appId',
'imageId',
'serviceId',
'containerId',
'createdAt',
);
});
let downloadProgressTotal = 0;
let downloads = 0;
const imagesStates = (await images.getStatus())
.filter((img) => req.auth.isScoped({ apps: [img.appId] }))
.map((img) => {
appIds.push(img.appId);
if (img.downloadProgress != null) {
downloadProgressTotal += img.downloadProgress;
downloads += 1;
}
return _.pick(
img,
'name',
'appId',
'serviceName',
'imageId',
'dockerImageId',
'status',
'downloadProgress',
);
});
let overallDownloadProgress: number | null = null;
if (downloads > 0) {
overallDownloadProgress = downloadProgressTotal / downloads;
}
// This endpoint does not support multi-app but the device might be running multiple apps
// We must return information for only 1 application so use the first one in the list
const appId = appIds[0];
// Get the commit for this application
const commit = await commitStore.getCommitForApp(appId);
// Filter containers by this application
const appContainers = containerStates.filter((c) => c.appId === appId);
// Filter images by this application
const appImages = imagesStates.filter((i) => i.appId === appId);
return res.status(200).send({
status: 'success',
appState: pending ? 'applying' : 'applied',
overallDownloadProgress,
containers: appContainers,
images: appImages,
release: commit,
});
});
router.get('/v2/device/name', async (_req, res) => {
const deviceName = await config.get('name');
res.json({
status: 'success',
deviceName,
});
});
router.get('/v2/device/tags', async (_req, res) => {
try {
const tags = await apiBinder.fetchDeviceTags();
return res.json({
status: 'success',
tags,
});
} catch (e) {
log.error(e);
res.status(500).json({
status: 'failed',
message: e.message,
});
}
});
router.get('/v2/device/vpn', async (_req, res) => {
const conf = await deviceConfig.getCurrent();
// Build VPNInfo
const info = {
enabled: conf.SUPERVISOR_VPN_CONTROL === 'true',
connected: await isVPNActive(),
};
// Return payload
return res.json({
status: 'success',
vpn: info,
});
});
router.get('/v2/cleanup-volumes', async (req: AuthorizedRequest, res) => {
const targetState = await applicationManager.getTargetApps();
const referencedVolumes: string[] = [];
_.each(targetState, (app, appId) => {
// if this app isn't in scope of the request, do not cleanup it's volumes
if (!req.auth.isScoped({ apps: [parseInt(appId, 10)] })) {
return;
}
_.each(app.volumes, (_volume, volumeName) => {
referencedVolumes.push(
Volume.generateDockerName(parseInt(appId, 10), volumeName),
);
});
});
await volumeManager.removeOrphanedVolumes(referencedVolumes);
res.json({
status: 'success',
});
});
router.post('/v2/journal-logs', (req, res) => {
const all = checkTruthy(req.body.all);
const follow = checkTruthy(req.body.follow);
const count = checkInt(req.body.count, { positive: true }) || undefined;
const unit = req.body.unit;
const format = req.body.format || 'short';
const containerId = req.body.containerId;
const journald = spawnJournalctl({
all,
follow,
count,
unit,
format,
containerId,
});
res.status(200);
// We know stdout will be present
journald.stdout!.pipe(res);
res.on('close', () => {
journald.kill('SIGKILL');
});
journald.on('exit', () => {
journald.stdout!.unpipe();
res.end();
});
});
} | the_stack |
import * as Jimp from 'jimp'
import * as path from 'path'
import javascriptBarcodeReader from '../src/index'
import { combineAllPossible } from '../src/utilities/combineAllPossible'
import { getImageDataFromSource } from '../src/utilities/getImageDataFromSource'
import { getLines } from '../src/utilities/getLines'
import { isUrl } from '../src/utilities/isUrl'
async function loadImage(src: string): Promise<HTMLImageElement> {
return new Promise((resolve, reject) => {
const img = new Image()
img.onerror = reject
img.onload = (): void => resolve(img)
img.src = src
})
}
function loadCanvas(img: HTMLImageElement): HTMLCanvasElement {
const canvas = document.createElement('canvas')
canvas.width = img.naturalWidth
canvas.height = img.naturalHeight
const ctx = canvas.getContext('2d')
ctx?.drawImage(img, 0, 0)
return canvas
}
beforeAll(async () => {
const imageUrl = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Code_93_wikipedia.png'
const img = await loadImage(imageUrl)
img.id = 'Code_93_wikipedia_image'
const canvas = loadCanvas(img)
canvas.id = 'Code_93_wikipedia_canvas'
document.body.appendChild(img)
document.body.appendChild(canvas)
})
describe('Count lines in an image', () => {
test('should detect lines in barcode image', async () => {
const rowsToScan = 3
const image = await Jimp.read('./test/sample-images/small-padding.png')
const { data, width, height } = image.bitmap
const channels = data.length / (width * height)
const startIndex = channels * width * Math.floor(height / 2)
const endIndex = startIndex + rowsToScan * channels * width
const lines = getLines(
Uint8ClampedArray.from(data.slice(startIndex, endIndex)),
width,
rowsToScan
)
expect(lines.length).toBe(27)
})
test('should detect lines in barcode image without padding', async () => {
const rowsToScan = 3
const image = await Jimp.read('./test/sample-images/small.png')
const { data, width, height } = image.bitmap
const channels = data.length / (width * height)
const startIndex = channels * width * Math.floor(height / 2)
const endIndex = startIndex + rowsToScan * channels * width
const lines = getLines(
Uint8ClampedArray.from(data.slice(startIndex, endIndex)),
width,
rowsToScan
)
expect(lines.length).toBe(27)
})
test('should return zero lines with empty image', async () => {
const rowsToScan = 3
const image = await Jimp.read('./test/sample-images/empty.jpg')
const { data, width, height } = image.bitmap
const channels = data.length / (width * height)
const startIndex = channels * width * Math.floor(height / 2)
const endIndex = startIndex + rowsToScan * channels * width
const lines = getLines(
Uint8ClampedArray.from(data.slice(startIndex, endIndex)),
width,
rowsToScan
)
expect(lines.length).toBe(0)
})
})
describe('get imageData from source', () => {
test('should get imageData from url', async () => {
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Code_93_wikipedia.png'
const dataSource = await getImageDataFromSource(url)
expect(typeof dataSource.data).toBe('object')
expect(typeof dataSource.width).toBe('number')
expect(typeof dataSource.height).toBe('number')
})
test('should get imageData from file path', async () => {
const url = path.resolve('./test/sample-images/codabar.jpg')
const dataSource = await getImageDataFromSource(url)
expect(typeof dataSource.data).toBe('object')
expect(typeof dataSource.width).toBe('number')
expect(typeof dataSource.height).toBe('number')
})
test('should get imageData from HTMLImageElement id', async () => {
const dataSource = await getImageDataFromSource('#Code_93_wikipedia_image')
expect(typeof dataSource.data).toBe('object')
expect(typeof dataSource.width).toBe('number')
expect(typeof dataSource.height).toBe('number')
})
test('should get imageData from HTMLCanvasElement id', async () => {
const dataSource = await getImageDataFromSource('#Code_93_wikipedia_canvas')
expect(typeof dataSource.data).toBe('object')
expect(typeof dataSource.width).toBe('number')
expect(typeof dataSource.height).toBe('number')
})
test('should get imageData from HTMLImageElement', async () => {
const imageElement = document.getElementById('Code_93_wikipedia_image')
if (!imageElement || !(imageElement instanceof HTMLImageElement)) return
const dataSource = await getImageDataFromSource(imageElement)
expect(typeof dataSource.data).toBe('object')
expect(typeof dataSource.width).toBe('number')
expect(typeof dataSource.height).toBe('number')
})
test('should get imageData from HTMLCanvasElement', async () => {
const imageElement = document.getElementById('Code_93_wikipedia_canvas')
if (!imageElement || !(imageElement instanceof HTMLCanvasElement)) return
const dataSource = await getImageDataFromSource(imageElement)
expect(typeof dataSource.data).toBe('object')
expect(typeof dataSource.width).toBe('number')
expect(typeof dataSource.height).toBe('number')
})
test('should throw with invalid source', () => {
getImageDataFromSource('Olalalala').catch(err => {
expect(err).toBeDefined()
})
})
})
describe('isUrl', () => {
test('check if string is URL', () => {
const url = 'https://upload.wikimedia.org/wikipedia/en/a/a9/Code_93_wikipedia.png'
expect(isUrl(url)).toBeTruthy()
expect(isUrl('#someString')).toBeFalsy()
})
})
describe('combineAllPossible', () => {
test('should be able to combine multiple results into one complete', () => {
const result = combineAllPossible('?123456', '012345?')
expect(result).toBe('0123456')
expect(combineAllPossible('', '')).toBe('')
})
})
describe('extract barcode from local files', () => {
test('should detect barcode codabar', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/codabar.jpg'),
barcode: 'codabar',
})
expect(result).toBe('A40156C')
})
test('should detect barcode codabar', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/codabar.jpg'),
barcode: 'codabar',
options: {
singlePass: true,
},
})
expect(result).toBe('A40156C')
})
test('should detect barcode 2 of 5 standard', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-2of5.jpg'),
barcode: 'code-2of5',
})
expect(result).toBe('12345670')
})
test('should detect barcode 2 of 5 interleaved', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-2of5-interleaved.jpg'),
barcode: 'code-2of5',
barcodeType: 'interleaved',
})
expect(result).toBe('12345670')
})
test('should detect barcode 39', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-39.jpg'),
barcode: 'code-39',
})
expect(result).toBe('10023')
})
test('should detect barcode 93', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-93.jpg'),
barcode: 'code-93',
})
expect(result).toBe('123ABC')
})
test('should detect barcode 128: ABC-abc-1234', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-128.jpg'),
barcode: 'code-128',
})
expect(result).toBe('ABC-abc-1234')
})
// test('should detect barcode 128: eeb00f0c-0c7e-a937-1794-25685779ba0c', async () => {
// const result = await javascriptBarcodeReader({
// image: path.resolve('./test/sample-images/code-128-eeb00f0c-0c7e-a937-1794-25685779ba0c.png'),
// barcode: 'code-128',
// })
// expect(result).toBe('eeb00f0c-0c7e-a937-1794-25685779ba0c')
// })
// test('should detect barcode 128: 3107cde3-d1ff-0f93-a215-4109753c0c9e', async () => {
// const result = await javascriptBarcodeReader({
// image: path.resolve('./test/sample-images/code-128-3107cde3-d1ff-0f93-a215-4109753c0c9e.png'),
// barcode: 'code-128',
// })
// expect(result).toBe('3107cde3-d1ff-0f93-a215-4109753c0c9e')
// })
test('should detect barcode EAN-8', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/ean-8.jpg'),
barcode: 'ean-8',
options: {
useAdaptiveThreshold: true,
},
})
expect(result).toBe('73127727')
})
test('should detect barcode EAN-13 small', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/ean-13-5901234123457.png'),
barcode: 'ean-13',
})
expect(result).toBe('901234123457')
})
test('should detect barcode EAN-13 large', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/ean-13.jpg'),
barcode: 'ean-13',
})
expect(result).toBe('901234123457')
})
test('should detect barcode 128 without padding white bars', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-128-no-padding.jpg'),
barcode: 'code-128',
})
expect(result).toBe('12ab#!')
})
test('should detect barcode 128 with multiple zeros', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-128-000.jpg'),
barcode: 'code-128',
})
expect(result).toBe('79619647103200000134407005')
})
test('should detect barcode 128 with default start Code B', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/L89HE1806005080432.gif'),
barcode: 'code-128',
})
expect(result).toBe('L89HE1806005080432')
})
test('should detect barcode 93 without padding white bars', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-93-no-padding.jpg'),
barcode: 'code-93',
})
expect(result).toBe('WIKIPEDIA')
})
test('should detect barcode 93 with bitmap data', async () => {
const image = await Jimp.read('./test/sample-images/code-93-no-padding.jpg')
const { data, width, height } = image.bitmap
const result = await javascriptBarcodeReader({
image: {
data: Uint8ClampedArray.from(data),
width,
height,
},
barcode: 'code-93',
})
expect(result).toBe('WIKIPEDIA')
})
})
describe('extract barcode after applying adaptive threhsold', () => {
test('should detect barcode codabar', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/codabar.jpg'),
barcode: 'codabar',
options: {
useAdaptiveThreshold: true,
},
})
expect(result).toBe('A40156C')
})
test('should detect barcode 2 of 5', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-2of5.jpg'),
barcode: 'code-2of5',
options: {
useAdaptiveThreshold: true,
},
})
expect(result).toBe('12345670')
})
test('should detect barcode 2 of 5 interleaved', async () => {
const result = await javascriptBarcodeReader({
image: path.resolve('./test/sample-images/code-2of5-interleaved.jpg'),
barcode: 'code-2of5',
barcodeType: 'interleaved',
options: {
useAdaptiveThreshold: true,
},
})
expect(result).toBe('12345670')
})
})
describe('extract barcode from remote URL', () => {
test('should detect barcode 93 from remote url', async () => {
const result = await javascriptBarcodeReader({
image: 'https://upload.wikimedia.org/wikipedia/en/a/a9/Code_93_wikipedia.png',
barcode: 'code-93',
})
expect(result).toBe('WIKIPEDIA')
})
})
describe('Fails', () => {
test('throws when no barcode specified', async () => {
try {
await javascriptBarcodeReader({
image: 'https://upload.wikimedia.org/wikipedia/en/a/a9/Code_93_wikipedia.png',
barcode: 'oallal',
})
} catch (err) {
expect(err).toBeDefined()
}
})
test('throws when invalid barcode specified', async () => {
try {
await javascriptBarcodeReader({
image: './test/sample-images/empty.jpg',
barcode: 'none',
})
} catch (err) {
expect(err).toBeDefined()
}
})
test('throws when no barcode found', async () => {
try {
await javascriptBarcodeReader({
image: './test/sample-images/empty.jpg',
barcode: 'code-93',
})
} catch (err) {
expect(err).toBeDefined()
}
})
}) | the_stack |
import * as pulumi from "@pulumi/pulumi";
import * as utilities from "../utilities";
/**
* An isolated set of Cloud Spanner resources on which databases can be
* hosted.
*
* To get more information about Instance, see:
*
* * [API documentation](https://cloud.google.com/spanner/docs/reference/rest/v1/projects.instances)
* * How-to Guides
* * [Official Documentation](https://cloud.google.com/spanner/)
*
* ## Example Usage
* ### Spanner Instance Basic
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const example = new gcp.spanner.Instance("example", {
* config: "regional-us-central1",
* displayName: "Test Spanner Instance",
* labels: {
* foo: "bar",
* },
* numNodes: 2,
* });
* ```
* ### Spanner Instance Processing Units
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const example = new gcp.spanner.Instance("example", {
* config: "regional-us-central1",
* displayName: "Test Spanner Instance",
* labels: {
* foo: "bar",
* },
* processingUnits: 200,
* });
* ```
* ### Spanner Instance Multi Regional
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const example = new gcp.spanner.Instance("example", {
* config: "nam-eur-asia1",
* displayName: "Multi Regional Instance",
* labels: {
* foo: "bar",
* },
* numNodes: 2,
* });
* ```
*
* ## Import
*
* Instance can be imported using any of these accepted formats
*
* ```sh
* $ pulumi import gcp:spanner/instance:Instance default projects/{{project}}/instances/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:spanner/instance:Instance default {{project}}/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:spanner/instance:Instance default {{name}}
* ```
*/
export class Instance extends pulumi.CustomResource {
/**
* Get an existing Instance resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: InstanceState, opts?: pulumi.CustomResourceOptions): Instance {
return new Instance(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:spanner/instance:Instance';
/**
* Returns true if the given object is an instance of Instance. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is Instance {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Instance.__pulumiType;
}
/**
* The name of the instance's configuration (similar but not
* quite the same as a region) which defines the geographic placement and
* replication of your databases in this instance. It determines where your data
* is stored. Values are typically of the form `regional-europe-west1` , `us-central` etc.
* In order to obtain a valid list please consult the
* [Configuration section of the docs](https://cloud.google.com/spanner/docs/instances).
*/
public readonly config!: pulumi.Output<string>;
/**
* The descriptive name for this instance as it appears in UIs. Must be
* unique per project and between 4 and 30 characters in length.
*/
public readonly displayName!: pulumi.Output<string>;
/**
* When deleting a spanner instance, this boolean option will delete all backups of this instance.
* This must be set to true if you created a backup manually in the console.
*/
public readonly forceDestroy!: pulumi.Output<boolean | undefined>;
/**
* An object containing a list of "key": value pairs.
* Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
*/
public readonly labels!: pulumi.Output<{[key: string]: string} | undefined>;
/**
* A unique identifier for the instance, which cannot be changed after
* the instance is created. The name must be between 6 and 30 characters
* in length.
*/
public readonly name!: pulumi.Output<string>;
/**
* The number of nodes allocated to this instance. Exactly one of either node_count or processing_units must be present in
* terraform.
*/
public readonly numNodes!: pulumi.Output<number>;
/**
* The number of processing units allocated to this instance. Exactly one of processing_units or node_count must be present
* in terraform.
*/
public readonly processingUnits!: pulumi.Output<number>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
public readonly project!: pulumi.Output<string>;
/**
* Instance status: 'CREATING' or 'READY'.
*/
public /*out*/ readonly state!: pulumi.Output<string>;
/**
* Create a Instance resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: InstanceArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: InstanceArgs | InstanceState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as InstanceState | undefined;
inputs["config"] = state ? state.config : undefined;
inputs["displayName"] = state ? state.displayName : undefined;
inputs["forceDestroy"] = state ? state.forceDestroy : undefined;
inputs["labels"] = state ? state.labels : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["numNodes"] = state ? state.numNodes : undefined;
inputs["processingUnits"] = state ? state.processingUnits : undefined;
inputs["project"] = state ? state.project : undefined;
inputs["state"] = state ? state.state : undefined;
} else {
const args = argsOrState as InstanceArgs | undefined;
if ((!args || args.config === undefined) && !opts.urn) {
throw new Error("Missing required property 'config'");
}
if ((!args || args.displayName === undefined) && !opts.urn) {
throw new Error("Missing required property 'displayName'");
}
inputs["config"] = args ? args.config : undefined;
inputs["displayName"] = args ? args.displayName : undefined;
inputs["forceDestroy"] = args ? args.forceDestroy : undefined;
inputs["labels"] = args ? args.labels : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["numNodes"] = args ? args.numNodes : undefined;
inputs["processingUnits"] = args ? args.processingUnits : undefined;
inputs["project"] = args ? args.project : undefined;
inputs["state"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(Instance.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering Instance resources.
*/
export interface InstanceState {
/**
* The name of the instance's configuration (similar but not
* quite the same as a region) which defines the geographic placement and
* replication of your databases in this instance. It determines where your data
* is stored. Values are typically of the form `regional-europe-west1` , `us-central` etc.
* In order to obtain a valid list please consult the
* [Configuration section of the docs](https://cloud.google.com/spanner/docs/instances).
*/
config?: pulumi.Input<string>;
/**
* The descriptive name for this instance as it appears in UIs. Must be
* unique per project and between 4 and 30 characters in length.
*/
displayName?: pulumi.Input<string>;
/**
* When deleting a spanner instance, this boolean option will delete all backups of this instance.
* This must be set to true if you created a backup manually in the console.
*/
forceDestroy?: pulumi.Input<boolean>;
/**
* An object containing a list of "key": value pairs.
* Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
*/
labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* A unique identifier for the instance, which cannot be changed after
* the instance is created. The name must be between 6 and 30 characters
* in length.
*/
name?: pulumi.Input<string>;
/**
* The number of nodes allocated to this instance. Exactly one of either node_count or processing_units must be present in
* terraform.
*/
numNodes?: pulumi.Input<number>;
/**
* The number of processing units allocated to this instance. Exactly one of processing_units or node_count must be present
* in terraform.
*/
processingUnits?: pulumi.Input<number>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
project?: pulumi.Input<string>;
/**
* Instance status: 'CREATING' or 'READY'.
*/
state?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a Instance resource.
*/
export interface InstanceArgs {
/**
* The name of the instance's configuration (similar but not
* quite the same as a region) which defines the geographic placement and
* replication of your databases in this instance. It determines where your data
* is stored. Values are typically of the form `regional-europe-west1` , `us-central` etc.
* In order to obtain a valid list please consult the
* [Configuration section of the docs](https://cloud.google.com/spanner/docs/instances).
*/
config: pulumi.Input<string>;
/**
* The descriptive name for this instance as it appears in UIs. Must be
* unique per project and between 4 and 30 characters in length.
*/
displayName: pulumi.Input<string>;
/**
* When deleting a spanner instance, this boolean option will delete all backups of this instance.
* This must be set to true if you created a backup manually in the console.
*/
forceDestroy?: pulumi.Input<boolean>;
/**
* An object containing a list of "key": value pairs.
* Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
*/
labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* A unique identifier for the instance, which cannot be changed after
* the instance is created. The name must be between 6 and 30 characters
* in length.
*/
name?: pulumi.Input<string>;
/**
* The number of nodes allocated to this instance. Exactly one of either node_count or processing_units must be present in
* terraform.
*/
numNodes?: pulumi.Input<number>;
/**
* The number of processing units allocated to this instance. Exactly one of processing_units or node_count must be present
* in terraform.
*/
processingUnits?: pulumi.Input<number>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
project?: pulumi.Input<string>;
} | the_stack |
import * as vscode from "vscode";
import { FountainStructureProperties } from "./extension";
import * as parser from "./afterwriting-parser";
import * as path from "path";
import * as telemetry from "./telemetry";
import * as sceneNumbering from './scenenumbering';
import * as fs from "fs";
//var syllable = require('syllable');
/**
* Trims character extensions, for example the parantheses part in `JOE (on the radio)`
*/
export const trimCharacterExtension = (character: string): string => character.replace(/[ \t]*(\(.*\))[ \t]*([ \t]*\^)?$/, "");
/**
* Trims the `@` symbol necesary in character names if they contain lower-case letters, i.e. `@McCONNOR`
*/
export const trimCharacterForceSymbol = (character: string): string => character.replace(/^[ \t]*@/, "");
/**
* Character names containing lowercase letters need to be prefixed with an `@` symbol
*/
export const addForceSymbolToCharacter = (characterName: string): string => {
const containsLowerCase = (text: string): boolean =>((/[\p{Ll}]/u).test(text));
return containsLowerCase(characterName) ? `@${characterName}` : characterName;
}
export const getCharactersWhoSpokeBeforeLast = (parsedDocument:any, position:vscode.Position) => {
let searchIndex = 0;
if(parsedDocument.tokenLines[position.line-1]){
searchIndex = parsedDocument.tokenLines[position.line-1];
}
let stopSearch = false;
let previousCharacters:string[] = []
let lastCharacter = undefined;
while(searchIndex>0 && !stopSearch){
var token = parsedDocument.tokens[searchIndex-1];
if(token.type=="character"){
var name = trimCharacterForceSymbol(trimCharacterExtension(token.text)).trim();
if(lastCharacter==undefined){
lastCharacter = name;
}
else if(name != lastCharacter && previousCharacters.indexOf(name)==-1){
previousCharacters.push(name);
}
}
else if(token.type=="scene_heading"){
stopSearch=true;
}
searchIndex--;
}
if(lastCharacter!=undefined)
previousCharacters.push(lastCharacter);
return previousCharacters;
}
export const findCharacterThatSpokeBeforeTheLast = (
document: vscode.TextDocument,
position: vscode.Position,
fountainDocProps: FountainStructureProperties,
): string => {
const isAlreadyMentionedCharacter = (text: string): boolean => fountainDocProps.characters.has(text);
let characterBeforeLast = "";
let lineToInspect = 1;
let foundLastCharacter = false;
do {
const beginningOfLineToInspect = new vscode.Position(position.line - lineToInspect, 0);
const endOfLineToInspect = new vscode.Position(position.line - (lineToInspect - 1), 0);
let potentialCharacterLine = document.getText(new vscode.Range(beginningOfLineToInspect, endOfLineToInspect)).trimRight();
potentialCharacterLine = trimCharacterExtension(potentialCharacterLine);
potentialCharacterLine = trimCharacterForceSymbol(potentialCharacterLine);
if (isAlreadyMentionedCharacter(potentialCharacterLine)) {
if (foundLastCharacter) {
characterBeforeLast = potentialCharacterLine;
} else {
foundLastCharacter = true;
}
}
lineToInspect++;
} while (!characterBeforeLast);
return characterBeforeLast;
}
/**
* Calculate an approximation of how long a line of dialogue would take to say
*/
export const calculateDialogueDuration = (dialogue:string): number =>{
var duration = 0;
//According to this paper: http://www.office.usp.ac.jp/~klinger.w/2010-An-Analysis-of-Articulation-Rates-in-Movies.pdf
//The average amount of syllables per second in the 14 movies analysed is 5.13994 (0.1945548s/syllable)
var sanitized = dialogue.replace(/[^\w]/gi, '');
duration+=((sanitized.length)/3)*0.1945548;
//duration += syllable(dialogue)*0.1945548;
//According to a very crude analysis involving watching random movie scenes on youtube and measuring pauses with a stopwatch
//A comma in the middle of a sentence adds 0.4sec and a full stop/excalmation/question mark adds 0.8 sec.
var punctuationMatches=dialogue.match(/(\.|\?|\!|\:) |(\, )/g);
if(punctuationMatches){
if(punctuationMatches[0]) duration+=0.75*punctuationMatches[0].length;
if(punctuationMatches[1]) duration+=0.3*punctuationMatches[1].length;
}
return duration
}
export const isMonologue = (seconds:number): boolean => {
if(seconds>30) return true;
else return false;
}
function padZero(i: any) {
if (i < 10) {
i = "0" + i;
}
return i;
}
export function secondsToString(seconds:number):string{
var time = new Date(null);
time.setHours(0);
time.setMinutes(0);
time.setSeconds(seconds);
return padZero(time.getHours()) + ":" + padZero(time.getMinutes()) + ":" + padZero(time.getSeconds());
}
export function secondsToMinutesString(seconds:number):string{
if(seconds<1) return undefined;
var time = new Date(null);
time.setHours(0);
time.setMinutes(0);
time.setSeconds(seconds);
if(seconds>=3600)
return padZero(time.getHours()) + ":" + padZero(time.getMinutes()) + ":" + padZero(time.getSeconds());
else
return padZero(time.getHours()*60 + time.getMinutes()) + ":" + padZero(time.getSeconds());
}
export const overwriteSceneNumbers = () => {
telemetry.reportTelemetry("command:fountain.overwriteSceneNumbers");
const fullText = vscode.window.activeTextEditor.document.getText()
const clearedText = clearSceneNumbers(fullText);
writeSceneNumbers(clearedText);
/* done like this because using vscode.window.activeTextEditor.edit()
* multiple times per callback is unpredictable; only writeSceneNumbers() does it
*/
}
export const updateSceneNumbers = () => {
telemetry.reportTelemetry("command:fountain.updateSceneNumbers");
const fullText = vscode.window.activeTextEditor.document.getText()
writeSceneNumbers(fullText);
}
const clearSceneNumbers = (fullText: string): string => {
const regexSceneHeadings = new RegExp(parser.regex.scene_heading.source, "igm");
const newText = fullText.replace(regexSceneHeadings, (heading: string) => heading.replace(/ #.*#$/, ""))
return newText
}
// rewrites/updates Scene Numbers using the configured Numbering Schema (currently only 'Standard', not yet configurable)
const writeSceneNumbers = (fullText: string) => {
// collect existing numbers (they mostly shouldn't change)
const oldNumbers: string[] = [];
const regexSceneHeadings = new RegExp(parser.regex.scene_heading.source, "igm");
const numberingSchema = sceneNumbering.makeSceneNumberingSchema(sceneNumbering.SceneNumberingSchemas.Standard);
var m;
while (m = regexSceneHeadings.exec(fullText)) {
const matchExisting = m[0].match(/#(.+)#$/);
if (!matchExisting) oldNumbers.push(null) /* no match = no number = new number required in this slot */
else if (numberingSchema.canParse(matchExisting[1])) oldNumbers.push(matchExisting[1]); /* existing scene number */
/* ELSE: didn't parse - custom scene numbers are skipped */
}
// work out what they should actually be, according to the schema
const newNumbers = sceneNumbering.generateSceneNumbers(oldNumbers);
if (newNumbers) {
// replace scene numbers
const newText = fullText.replace(regexSceneHeadings, (heading) => {
const matchExisting = heading.match(/#(.+)#$/);
if (matchExisting && !numberingSchema.canParse(matchExisting[1]))
return heading; /* skip re-writing custom scene numbers */
const noPrevHeadingNumbers = heading.replace(/ #.+#$/, "")
const newHeading = `${noPrevHeadingNumbers} #${newNumbers.shift()}#`
return newHeading
})
vscode.window.activeTextEditor.edit(editBuilder => editBuilder.replace(
new vscode.Range(new vscode.Position(0, 0), new vscode.Position(vscode.window.activeTextEditor.document.lineCount, 0)),
newText
))
}
}
/** Shifts scene/s at the selected text up or down */
export const shiftScenes = (editor: vscode.TextEditor, parsed: parser.parseoutput, direction: number) => {
var numNewlinesAtEndRequired = 0;
const selectSceneAt = (sel: vscode.Selection): vscode.Selection => {
// returns range that contains whole scenes that overlap with the selection
const headingsBefore = parsed.tokens
.filter(token => (token.is("scene_heading") || token.is("section"))
&& token.line <= sel.active.line
&& token.line <= sel.anchor.line)
.sort((a, b) => b.line - a.line);
const headingsAfter = parsed.tokens
.filter(token => (token.is("scene_heading") || token.is("section"))
&& token.line > sel.active.line
&& token.line > sel.anchor.line)
.sort((a, b) => a.line - b.line);
if (headingsBefore.length == 0) return null;
const selStart = +headingsBefore[0].line;
if (headingsAfter.length) {
const selEnd = +headingsAfter[0].line;
return new vscode.Selection(selStart, 0, selEnd, 0);
}
else {
// +2 is where the next scene would start if there was one. done to make it look consistent.
const selEnd = last(parsed.tokens.filter(token => token.line)).line + 2;
if (selEnd >= editor.document.lineCount) numNewlinesAtEndRequired = selEnd - editor.document.lineCount + 1;
return new vscode.Selection(selStart, 0, selEnd, 0);
}
}
// get range of scene/s that are shifting
var moveSelection = selectSceneAt(editor.selection);
if (moveSelection == null) return; // edge case: using command before the first scene
var moveText = editor.document.getText(moveSelection) + (new Array(numNewlinesAtEndRequired + 1).join("\n"));
numNewlinesAtEndRequired = 0;
// get range of scene being swapped with selected scene/s
var aboveSelection = (direction == -1) && selectSceneAt(new vscode.Selection(moveSelection.anchor.line - 1, 0, moveSelection.anchor.line - 1, 0));
var belowSelection = (direction == 1) && selectSceneAt(new vscode.Selection(moveSelection.active.line + 1, 0, moveSelection.active.line + 1, 0));
// edge cases: no scenes above or below to swap with
if (!belowSelection && !aboveSelection) return;
if (belowSelection && belowSelection.anchor.line < moveSelection.active.line) return;
var reselectDelta = 0;
const newLinePos = editor.document.lineAt(editor.document.lineCount - 1).range.end;
editor.edit(editBuilder => {
// going bottom-up to avoid re-aligning line numbers
// might need empty lines at the bottom so the cut-paste behaves the same as if there were more scenes
while (numNewlinesAtEndRequired) {
// vscode makes this \r\n when appropriate
editBuilder.insert(newLinePos, "\n");
numNewlinesAtEndRequired--;
}
// paste below?
if (belowSelection) {
editBuilder.insert(new vscode.Position(belowSelection.active.line, 0), moveText);
reselectDelta = belowSelection.active.line - belowSelection.anchor.line;
}
// delete original
editBuilder.delete(moveSelection)
// paste above?
if (aboveSelection) {
editBuilder.insert(new vscode.Position(aboveSelection.anchor.line, 0), moveText);
reselectDelta = aboveSelection.anchor.line - moveSelection.anchor.line;
}
});
// reselect any text that was originally selected / cursor position
editor.selection = new vscode.Selection(
editor.selection.anchor.translate(reselectDelta),
editor.selection.active.translate(reselectDelta));
editor.revealRange(editor.selection);
};
export const last = function (array: any[]): any {
return array[array.length - 1];
}
export function fileToBase64(fspath:string){
let data = fs.readFileSync(fspath);
return data.toString('base64');
}
export function openFile(p:string){
let cmd = "xdg-open"
switch (process.platform) {
case 'darwin' : cmd = 'open'; break;
case 'win32' : cmd = ''; break;
default : cmd = 'xdg-open';
}
var exec = require('child_process').exec;
exec(`${cmd} "${p}"`);
}
export function revealFile(p:string){
var cmd = "";
if(process.platform == "win32"){
cmd = `explorer.exe /select,${p}`
}
else if(process.platform == "darwin"){
cmd = `open -r ${p}`
}
else{
p = path.parse(p).dir;
cmd = `open "${p}"`
}
var exec = require('child_process').exec;
exec(cmd);
}
export function assetsPath(): string{
return __dirname;
}
interface IPackageInfo {
name: string;
version: string;
aiKey: string;
}
export function getPackageInfo(): IPackageInfo | null {
const extension = vscode.extensions.getExtension('piersdeseilligny.betterfountain');
if (extension && extension.packageJSON) {
return {
name: extension.packageJSON.name,
version: extension.packageJSON.version,
aiKey: extension.packageJSON.aiKey
};
}
return null;
}
//Simple n-bit hash
function nPearsonHash(message: string, n = 8): number {
// Ideally, this table would be shuffled...
// 256 will be the highest value provided by this hashing function
var table = [...new Array(2**n)].map((_, i) => i)
return message.split('').reduce((hash, c) => {
return table[(hash + c.charCodeAt(0)) % (table.length - 1)]
}, message.length % (table.length - 1))
}
function HSVToRGB(h: number, s: number, v: number): Array<number> {
var [r, g, b] = [0, 0 ,0];
var i = Math.floor(h * 6);
var f = h * 6 - i;
var p = v * (1 - s);
var q = v * (1 - f * s);
var t = v * (1 - (1 - f) * s);
switch (i % 6) {
case 0: r = v, g = t, b = p; break;
case 1: r = q, g = v, b = p; break;
case 2: r = p, g = v, b = t; break;
case 3: r = p, g = q, b = v; break;
case 4: r = t, g = p, b = v; break;
case 5: r = v, g = p, b = q; break;
}
return [Math.round(r * 255),Math.round(g * 255),Math.round(b * 255)]
}
//We are using colors with same value and saturation as highlighters
export function wordToColor(word: string, s:number = 0.5, v:number = 1): Array<number> {
const n = 5; //so that colors are spread apart
const h = nPearsonHash(word, n)/2**(8-n);
return HSVToRGB(h, s, v)
}
function componentToHex(c:number) {
var hex = c.toString(16);
return hex.length == 1 ? "0" + hex : hex;
}
export function rgbToHex(rgb:number[]):string {
return "#" + componentToHex(rgb[0]) + componentToHex(rgb[1]) + componentToHex(rgb[2]);
}
export function median (values:number[]):number {
if(values.length == 0) return 0;
values.sort( function(a,b) {return a - b;} );
var half = Math.floor(values.length/2);
if(values.length % 2)
return values[half];
else
return (values[half-1] + values[half]) / 2.0;
} | the_stack |
import { type ComputedRef, computed } from 'vue'
import { isNil, isString, isSymbol } from 'lodash-es'
import { type VKey, callEmit, useControlledProp } from '@idux/cdk/utils'
import { type TableLocale } from '@idux/components/i18n'
import { type MenuClickOptions, type MenuData } from '@idux/components/menu'
import { type TableProps } from '../types'
import { type TableColumnMerged, type TableColumnMergedSelectable } from './useColumns'
import { type DataSourceContext, type MergedData } from './useDataSource'
export function useSelectable(
props: TableProps,
locale: ComputedRef<TableLocale>,
flattedColumns: ComputedRef<TableColumnMerged[]>,
{ mergedMap, paginatedMap }: DataSourceContext,
): SelectableContext {
const selectable = computed(() =>
flattedColumns.value.find(column => 'type' in column && column.type === 'selectable'),
) as ComputedRef<TableColumnMergedSelectable | undefined>
const [selectedRowKeys, setSelectedRowKeys] = useControlledProp(props, 'selectedRowKeys', () => [])
const currentPageRowKeys = computed(() => {
const { disabled } = selectable.value || {}
const enabledRowKeys: VKey[] = []
const disabledRowKeys: VKey[] = []
paginatedMap.value.forEach((currData, key) => {
if (disabled?.(currData.record)) {
disabledRowKeys.push(key)
} else {
enabledRowKeys.push(key)
}
})
return { enabledRowKeys, disabledRowKeys }
})
const indeterminateRowKeys = computed(() => {
const indeterminateKeySet = new Set<VKey>()
const selectedKeys = selectedRowKeys.value
const { disabledRowKeys } = currentPageRowKeys.value
const dataMap = mergedMap.value
selectedKeys.forEach(key => {
const { parentKey } = dataMap.get(key) || {}
if (!isNil(parentKey)) {
let parent = dataMap.get(parentKey)
if (parent && !selectedKeys.includes(parent.rowKey)) {
while (parent && !isNil(parent?.rowKey)) {
if (!disabledRowKeys.includes(parent.rowKey)) {
indeterminateKeySet.add(parent.rowKey)
}
parent = !isNil(parent.parentKey) ? dataMap.get(parent.parentKey) : undefined
}
}
}
})
return [...indeterminateKeySet]
})
// 统计当前页中被选中行的数量(过滤掉被禁用的)
const countCurrentPageSelected = computed(() => {
const selectedKeys = selectedRowKeys.value
const { disabledRowKeys } = currentPageRowKeys.value
let total = 0
paginatedMap.value.forEach((_, key) => {
if (!disabledRowKeys.includes(key) && selectedKeys.includes(key)) {
total++
}
}, 0)
return total
})
// 当前页是否全部被选中
const currentPageAllSelected = computed(() => {
const dataCount = paginatedMap.value.size
const disabledCount = currentPageRowKeys.value.disabledRowKeys.length
if (dataCount === 0 || dataCount === disabledCount) {
return false
}
return dataCount === disabledCount + countCurrentPageSelected.value
})
// 当前页是否部分被选中
const currentPageSomeSelected = computed(() => !currentPageAllSelected.value && countCurrentPageSelected.value > 0)
const emitChange = (tempRowKeys: VKey[]) => {
setSelectedRowKeys(tempRowKeys)
const dataMap = mergedMap.value
const { onChange } = selectable.value || {}
if (onChange) {
const selectedRecords: unknown[] = []
tempRowKeys.forEach(key => {
const currData = dataMap.get(key)
currData && selectedRecords.push(currData.record)
})
callEmit(onChange, tempRowKeys, selectedRecords)
}
}
const handleSelectChange = (key: VKey, record: unknown) => {
const dataMap = mergedMap.value
const { disabledRowKeys } = currentPageRowKeys.value
const { multiple, onSelect } = selectable.value || {}
let tempRowKeys = [...selectedRowKeys.value]
const index = tempRowKeys.indexOf(key)
const selected = index >= 0
if (multiple) {
const currData = dataMap.get(key)
const childrenKeys = getChildrenKeys(currData, disabledRowKeys)
if (selected) {
tempRowKeys.splice(index, 1)
const parentKeys = getParentKeys(dataMap, currData, disabledRowKeys)
tempRowKeys = tempRowKeys.filter(key => !parentKeys.includes(key) && !childrenKeys.includes(key))
} else {
tempRowKeys.push(key)
tempRowKeys.push(...childrenKeys)
}
setParentSelected(dataMap, currData, tempRowKeys, disabledRowKeys)
} else {
tempRowKeys = selected ? [] : [key]
}
callEmit(onSelect, !selected, record)
emitChange(tempRowKeys)
}
const handleHeadSelectChange = () => {
const { enabledRowKeys } = currentPageRowKeys.value
const tempRowKeySet = new Set(selectedRowKeys.value)
if (currentPageAllSelected.value) {
enabledRowKeys.forEach(key => tempRowKeySet.delete(key))
} else {
enabledRowKeys.forEach(key => tempRowKeySet.add(key))
}
emitChange([...tempRowKeySet])
}
const mergedSelectableMenus = useMergedMenus(selectable, locale)
const handleSelectableMenuClick = useMenuClickHandle(selectable, mergedMap, paginatedMap, selectedRowKeys, emitChange)
return {
selectable,
selectedRowKeys,
indeterminateRowKeys,
currentPageRowKeys,
currentPageAllSelected,
currentPageSomeSelected,
handleSelectChange,
handleHeadSelectChange,
mergedSelectableMenus,
handleSelectableMenuClick,
}
}
export interface SelectableContext {
selectable: ComputedRef<TableColumnMergedSelectable | undefined>
selectedRowKeys: ComputedRef<VKey[]>
indeterminateRowKeys: ComputedRef<VKey[]>
currentPageRowKeys: ComputedRef<{
enabledRowKeys: VKey[]
disabledRowKeys: VKey[]
}>
currentPageAllSelected: ComputedRef<boolean>
currentPageSomeSelected: ComputedRef<boolean>
handleSelectChange: (key: VKey, record: unknown) => void
handleHeadSelectChange: () => void
mergedSelectableMenus: ComputedRef<MenuData[]>
handleSelectableMenuClick: (options: MenuClickOptions) => void
}
function getChildrenKeys(currData: MergedData | undefined, disabledRowKeys: VKey[]) {
const keys: VKey[] = []
const { children } = currData || {}
children &&
children.forEach(item => {
const { rowKey } = item
if (!disabledRowKeys.includes(rowKey)) {
keys.push(item.rowKey)
}
keys.push(...getChildrenKeys(item, disabledRowKeys))
})
return keys
}
function getParentKeys(dataMap: Map<VKey, MergedData>, currData: MergedData | undefined, disabledRowKeys: VKey[]) {
const keys: VKey[] = []
while (currData?.parentKey) {
const { parentKey } = currData
if (!disabledRowKeys.includes(currData.parentKey)) {
keys.push(parentKey)
}
currData = dataMap.get(parentKey)
}
return keys
}
function setParentSelected(
dataMap: Map<VKey, MergedData>,
currData: MergedData | undefined,
tempRowKeys: VKey[],
disabledRowKeys: VKey[],
) {
let parentSelected = true
while (parentSelected && currData && !isNil(currData.parentKey)) {
const parent = dataMap.get(currData.parentKey)
if (parent && !disabledRowKeys.includes(currData.parentKey)) {
parentSelected = parent.children!.every(
item => disabledRowKeys.includes(item.rowKey) || tempRowKeys.includes(item.rowKey),
)
const parentKeyIdx = tempRowKeys.findIndex(key => key === currData!.parentKey)
if (parentSelected) {
parentKeyIdx < 0 && tempRowKeys.push(currData.parentKey)
} else {
parentKeyIdx > -1 && tempRowKeys.splice(parentKeyIdx, 1)
}
}
currData = parent
}
}
const allMenuItemKey = Symbol('IDUX_TABLE_KEY_selectable-all')
const invertMenuItemKey = Symbol('IDUX_TABLE_KEY_selectable-invert')
const noneMenuItemKey = Symbol('IDUX_TABLE_KEY_selectable-none')
const pageInvertMenuItemKey = Symbol('IDUX_TABLE_KEY_selectable-pageInvert')
function useMergedMenus(
selectable: ComputedRef<TableColumnMergedSelectable | undefined>,
locale: ComputedRef<TableLocale>,
) {
return computed<MenuData[]>(() => {
const { menus } = selectable.value || {}
if (!menus || menus.length === 0) {
return []
}
const { selectAll, selectInvert, selectNone, selectPageInvert } = locale.value
return menus.map(item => {
if (isString(item)) {
if (item === 'all') {
return { type: 'item', key: allMenuItemKey, label: selectAll }
}
if (item === 'invert') {
return { type: 'item', key: invertMenuItemKey, label: selectInvert }
}
if (item === 'none') {
return { type: 'item', key: noneMenuItemKey, label: selectNone }
}
if (item === 'pageInvert') {
return { type: 'item', key: pageInvertMenuItemKey, label: selectPageInvert }
}
return { type: 'item', key: item, label: item }
}
return item
})
})
}
function useMenuClickHandle(
selectable: ComputedRef<TableColumnMergedSelectable | undefined>,
mergedMap: ComputedRef<Map<VKey, MergedData>>,
paginatedMap: ComputedRef<Map<VKey, MergedData>>,
selectedRowKeys: ComputedRef<VKey[]>,
emitChange: (tempRowKeys: VKey[]) => void,
) {
const handleSelectAll = () => {
const { disabled, onSelectAll } = selectable.value || {}
const tempRowKeys: VKey[] = []
mergedMap.value.forEach((currData, key) => {
if (!disabled?.(currData.record)) {
tempRowKeys.push(key)
}
})
callEmit(onSelectAll, tempRowKeys)
emitChange(tempRowKeys)
}
const handleSelectInvert = () => {
const { disabled, onSelectInvert } = selectable.value || {}
const tempRowKeys = [...selectedRowKeys.value]
mergedMap.value.forEach((currData, key) => {
if (disabled?.(currData.record)) {
return
}
const index = tempRowKeys.indexOf(key)
if (index >= 0) {
tempRowKeys.splice(index, 1)
} else {
tempRowKeys.push(key)
}
})
emitChange(tempRowKeys)
callEmit(onSelectInvert, tempRowKeys)
}
const handleSelectNone = () => {
const { onSelectNone } = selectable.value || {}
callEmit(onSelectNone)
emitChange([])
}
const handleSelectPageInvert = () => {
const { disabled, onSelectPageInvert } = selectable.value || {}
const tempRowKeys: VKey[] = []
const currSelectedRowKeys = selectedRowKeys.value
paginatedMap.value.forEach((currData, key) => {
if (disabled?.(currData.record) || currSelectedRowKeys.includes(key)) {
return
}
tempRowKeys.push(key)
})
callEmit(onSelectPageInvert, tempRowKeys)
emitChange(tempRowKeys)
}
const menuClickHandles = new Map<symbol, () => void>([
[allMenuItemKey, handleSelectAll],
[invertMenuItemKey, handleSelectInvert],
[noneMenuItemKey, handleSelectNone],
[pageInvertMenuItemKey, handleSelectPageInvert],
])
return (options: MenuClickOptions) => {
const key = options.key
if (isSymbol(key) && menuClickHandles.has(key)) {
const handle = menuClickHandles.get(key)!
handle()
return
}
const { disabled, onMenuClick } = selectable.value || {}
if (!onMenuClick) {
return
}
const tempRowKeys: VKey[] = []
paginatedMap.value.forEach((currData, key) => {
if (disabled?.(currData.record)) {
return
}
tempRowKeys.push(key)
})
callEmit(onMenuClick, options, tempRowKeys)
}
} | the_stack |
import { isNullOrUndefined, KeyboardEventArgs } from '@syncfusion/ej2-base';
import { MarkdownParser } from './../base/markdown-parser';
import * as CONSTANT from './../base/constant';
import { IMarkdownSubCommands, IMDKeyboardEvent, IMDFormats } from './../base/interface';
import { MarkdownSelection } from './markdown-selection';
import { extend } from '@syncfusion/ej2-base';
import * as EVENTS from './../../common/constant';
/**
* SelectionCommands internal component
*
* @hidden
* @deprecated
*/
export class MDSelectionFormats {
private parent: MarkdownParser;
private selection: MarkdownSelection;
public syntax: { [key: string]: string };
private currentAction: string;
public constructor(parent: IMDFormats) {
extend(this, this, parent, true);
this.selection = this.parent.markdownSelection;
this.addEventListener();
}
private addEventListener(): void {
this.parent.observer.on(CONSTANT.selectionCommand, this.applyCommands, this);
this.parent.observer.on(EVENTS.KEY_DOWN_HANDLER, this.keyDownHandler, this);
}
private keyDownHandler(e: IMDKeyboardEvent): void {
switch ((e.event as KeyboardEventArgs).action) {
case 'bold':
this.applyCommands({ subCommand: 'Bold', callBack: e.callBack });
e.event.preventDefault();
break;
case 'italic':
this.applyCommands({ subCommand: 'Italic', callBack: e.callBack });
e.event.preventDefault();
break;
case 'strikethrough':
this.applyCommands({ subCommand: 'StrikeThrough', callBack: e.callBack });
e.event.preventDefault();
break;
case 'uppercase':
this.applyCommands({ subCommand: 'UpperCase', callBack: e.callBack });
e.event.preventDefault();
break;
case 'lowercase':
this.applyCommands({ subCommand: 'LowerCase', callBack: e.callBack });
e.event.preventDefault();
break;
case 'superscript':
this.applyCommands({ subCommand: 'SuperScript', callBack: e.callBack });
e.event.preventDefault();
break;
case 'subscript':
this.applyCommands({ subCommand: 'SubScript', callBack: e.callBack });
e.event.preventDefault();
break;
}
}
private isBold(text: string, cmd: string): boolean {
return text.search('\\' + cmd + '\\' + cmd + '') !== -1;
}
private isItalic(text: string, cmd: string): boolean {
return text.search('\\' + cmd) !== -1;
}
private isMatch(text: string, cmd: string): string[] {
let matchText: string[] = [''];
switch (cmd) {
case this.syntax.Italic:
matchText = text.match(this.singleCharRegx(cmd));
break;
case this.syntax.InlineCode:
matchText = text.match(this.singleCharRegx(cmd));
break;
case this.syntax.StrikeThrough:
matchText = text.match(this.singleCharRegx(cmd));
break;
}
return matchText;
}
private multiCharRegx(cmd: string): RegExp {
return new RegExp('(\\' + cmd + '\\' + cmd + ')', 'g');
}
private singleCharRegx(cmd: string): RegExp {
return new RegExp('(\\' + cmd + ')', 'g');
}
public isAppliedCommand(cmd?: string): | boolean {
// eslint-disable-next-line
const selectCmd: string = '';
let isFormat: boolean = false;
const textArea: HTMLTextAreaElement = this.parent.element as HTMLTextAreaElement;
const start: number = textArea.selectionStart;
const splitAt: Function = (index: number) => (x: string) => [x.slice(0, index), x.slice(index)];
const splitText: string[] = splitAt(start)(textArea.value);
const cmdB: string = this.syntax.Bold.substr(0, 1);
const cmdI: string = this.syntax.Italic;
const selectedText: string = this.parent.markdownSelection.getSelectedText(textArea);
if (selectedText !== '' && selectedText === selectedText.toLocaleUpperCase() && cmd === 'UpperCase') {
return true;
} else if (selectedText === '') {
const beforeText: string = textArea.value.substr(splitText[0].length - 1, 1);
const afterText: string = splitText[1].substr(0, 1);
if ((beforeText !== '' && afterText !== '' && beforeText.match(/[a-z]/i)) &&
beforeText === beforeText.toLocaleUpperCase() && afterText === afterText.toLocaleUpperCase() && cmd === 'UpperCase') {
return true;
}
}
if (!(this.isBold(splitText[0], cmdB)) && !(this.isItalic(splitText[0], cmdI)) && !(this.isBold(splitText[1], cmdB)) &&
!(this.isItalic(splitText[1], cmdI))) {
if ((!isNullOrUndefined(this.isMatch(splitText[0], this.syntax.StrikeThrough)) &&
!isNullOrUndefined(this.isMatch(splitText[1], this.syntax.StrikeThrough))) &&
(this.isMatch(splitText[0], this.syntax.StrikeThrough).length % 2 === 1 &&
this.isMatch(splitText[1], this.syntax.StrikeThrough).length % 2 === 1) && cmd === 'StrikeThrough') {
isFormat = true;
}
if ((!isNullOrUndefined(this.isMatch(splitText[0], this.syntax.InlineCode)) &&
!isNullOrUndefined(this.isMatch(splitText[1], this.syntax.InlineCode))) &&
(this.isMatch(splitText[0], this.syntax.InlineCode).length % 2 === 1 &&
this.isMatch(splitText[1], this.syntax.InlineCode).length % 2 === 1) && cmd === 'InlineCode') {
isFormat = true;
}
/* eslint-disable */
if ((!isNullOrUndefined(splitText[0].match(/\<sub>/g)) && !isNullOrUndefined(splitText[1].match(/\<\/sub>/g))) &&
(splitText[0].match(/\<sub>/g).length % 2 === 1 &&
splitText[1].match(/\<\/sub>/g).length % 2 === 1) && cmd === 'SubScript') {
isFormat = true;
}
if ((!isNullOrUndefined(splitText[0].match(/\<sup>/g)) && !isNullOrUndefined(splitText[1].match(/\<\/sup>/g))) &&
(splitText[0].match(/\<sup>/g).length % 2 === 1 && splitText[1].match(/\<\/sup>/g).length % 2 === 1) &&
cmd === 'SuperScript') {
isFormat = true;
}
/* eslint-enable */
}
if ((this.isBold(splitText[0], cmdB) && this.isBold(splitText[1], cmdB)) &&
(splitText[0].match(this.multiCharRegx(cmdB)).length % 2 === 1 &&
splitText[1].match(this.multiCharRegx(cmdB)).length % 2 === 1) && cmd === 'Bold') {
isFormat = true;
}
splitText[0] = this.isBold(splitText[0], cmdB) ? splitText[0].replace(this.multiCharRegx(cmdB), '$%@') : splitText[0];
splitText[1] = this.isBold(splitText[1], cmdB) ? splitText[1].replace(this.multiCharRegx(cmdB), '$%@') : splitText[1];
if ((!isNullOrUndefined(this.isMatch(splitText[0], this.syntax.Italic)) &&
!isNullOrUndefined(this.isMatch(splitText[1], this.syntax.Italic))) &&
(this.isMatch(splitText[0], this.syntax.Italic).length % 2 === 1 &&
this.isMatch(splitText[1], this.syntax.Italic).length % 2 === 1) && cmd === 'Italic') {
isFormat = true;
}
if ((!isNullOrUndefined(this.isMatch(splitText[0], this.syntax.StrikeThrough)) &&
!isNullOrUndefined(this.isMatch(splitText[1], this.syntax.StrikeThrough))) &&
(this.isMatch(splitText[0], this.syntax.StrikeThrough).length % 2 === 1 &&
this.isMatch(splitText[1], this.syntax.StrikeThrough).length % 2 === 1) && cmd === 'StrikeThrough') {
isFormat = true;
}
if ((!isNullOrUndefined(this.isMatch(splitText[0], this.syntax.InlineCode)) &&
!isNullOrUndefined(this.isMatch(splitText[1], this.syntax.InlineCode))) &&
(this.isMatch(splitText[0], this.syntax.InlineCode).length % 2 === 1 &&
this.isMatch(splitText[1], this.syntax.InlineCode).length % 2 === 1) && cmd === 'InlineCode') {
isFormat = true;
}
/* eslint-disable */
if ((!isNullOrUndefined(splitText[0].match(/\<sub>/g)) && !isNullOrUndefined(splitText[1].match(/\<\/sub>/g))) &&
(splitText[0].match(/\<sub>/g).length % 2 === 1 && splitText[1].match(/\<\/sub>/g).length % 2 === 1) && cmd === 'SubScript') {
isFormat = true;
}
if ((!isNullOrUndefined(splitText[0].match(/\<sup>/g)) && !isNullOrUndefined(splitText[1].match(/\<\/sup>/g))) &&
(splitText[0].match(/\<sup>/g).length % 2 === 1 && splitText[1].match(/\<\/sup>/g).length % 2 === 1) && cmd === 'SuperScript') {
isFormat = true;
/* eslint-enable */
}
return isFormat;
}
private applyCommands(e: IMarkdownSubCommands): void {
this.currentAction = e.subCommand;
const textArea: HTMLTextAreaElement = this.parent.element as HTMLTextAreaElement;
this.selection.save(textArea.selectionStart, textArea.selectionEnd);
const start: number = textArea.selectionStart;
const end: number = textArea.selectionEnd;
let addedLength: number = 0;
const selection: { [key: string]: string | number } = this.parent.markdownSelection.getSelectedInlinePoints(textArea);
if (this.isAppliedCommand(e.subCommand) && selection.text !== '') {
const startCmd: string = this.syntax[e.subCommand];
const endCmd: string = e.subCommand === 'SubScript' ? '</sub>' :
e.subCommand === 'SuperScript' ? '</sup>' : this.syntax[e.subCommand];
const startLength: number = (e.subCommand === 'UpperCase' || e.subCommand === 'LowerCase') ? 0 : startCmd.length;
const startNo: number = textArea.value.substr(0, selection.start as number).lastIndexOf(startCmd);
let endNo: number = textArea.value.substr(selection.end as number, textArea.value.length).indexOf(endCmd);
endNo = endNo + (selection.end as number);
const repStartText: string = this.replaceAt(
textArea.value.substr(0, selection.start as number), startCmd, '', startNo, selection.start as number);
const repEndText: string = this.replaceAt(
textArea.value.substr(selection.end as number, textArea.value.length), endCmd, '', 0, endNo);
textArea.value = repStartText + selection.text + repEndText;
this.restore(textArea, start - startLength, end - startLength, e);
return;
}
if (selection.text !== '' && !this.isApplied(selection, e.subCommand)) {
addedLength = (e.subCommand === 'UpperCase' || e.subCommand === 'LowerCase') ? 0 :
this.syntax[e.subCommand].length;
const repStart: string = textArea.value.substr(
selection.start as number - this.syntax[e.subCommand].length, this.syntax[e.subCommand].length);
let repEnd: string;
if ((repStart === e.subCommand) || ((selection.start as number - this.syntax[e.subCommand].length ===
textArea.value.indexOf(this.syntax[e.subCommand])) && (selection.end as number === textArea.value.lastIndexOf(
this.syntax[e.subCommand]) || selection.end as number === textArea.value.lastIndexOf(
'</' + this.syntax[e.subCommand].substring(1, 5))))) {
if (e.subCommand === 'SubScript' || e.subCommand === 'SuperScript') {
repEnd = textArea.value.substr(selection.end as number, this.syntax[e.subCommand].length + 1);
} else {
repEnd = textArea.value.substr(selection.end as number, this.syntax[e.subCommand].length);
}
const repStartText: string = this.replaceAt(
textArea.value.substr(0, selection.start as number),
repStart, '', selection.start as number - this.syntax[e.subCommand].length, selection.start as number);
const repEndText: string = this.replaceAt(
textArea.value.substr(selection.end as number, textArea.value.length), repEnd, '', 0, repEnd.length);
textArea.value = repStartText + selection.text + repEndText;
this.restore(textArea, start - addedLength, end - addedLength, e);
} else {
if (e.subCommand === 'SubScript' || e.subCommand === 'SuperScript') {
selection.text = this.syntax[e.subCommand] + selection.text
+ '</' + this.syntax[e.subCommand].substring(1, 5);
} else if (e.subCommand === 'UpperCase' || e.subCommand === 'LowerCase') {
selection.text = (e.subCommand === 'UpperCase') ? (selection.text as string).toUpperCase()
: (selection.text as string).toLowerCase();
} else {
selection.text = this.syntax[e.subCommand] + selection.text + this.syntax[e.subCommand];
}
textArea.value = textArea.value.substr(0, selection.start as number) + selection.text +
textArea.value.substr(selection.end as number, textArea.value.length);
this.restore(textArea, start + addedLength, end + addedLength, e);
}
} else if (e.subCommand !== 'UpperCase' && e.subCommand !== 'LowerCase') {
if (e.subCommand === 'SubScript' || e.subCommand === 'SuperScript') {
selection.text = this.textReplace(selection.text as string, e.subCommand);
selection.text = this.syntax[e.subCommand] + selection.text
+ '</' + this.syntax[e.subCommand].substring(1, 5);
} else {
selection.text = this.textReplace(selection.text as string, e.subCommand);
selection.text = this.syntax[e.subCommand] + selection.text + this.syntax[e.subCommand];
}
textArea.value = textArea.value.substr(0, selection.start as number)
+ selection.text + textArea.value.substr(selection.end as number, textArea.value.length);
addedLength = this.syntax[e.subCommand].length;
if (selection.start === selection.end) {
this.restore(textArea, start + addedLength, end + addedLength, e);
} else {
this.restore(textArea, start + addedLength, end - addedLength, e);
}
} else {
this.restore(textArea, start, end, e);
}
this.parent.undoRedoManager.saveData();
}
private replaceAt(input: string, search: string, replace: string, start: number, end: number): string {
return input.slice(0, start)
+ input.slice(start, end).replace(search, replace)
+ input.slice(end);
}
private restore(textArea: HTMLTextAreaElement, start: number, end: number, event?: IMarkdownSubCommands | IMDKeyboardEvent): void {
this.selection.save(start, end);
this.selection.restore(textArea);
if (event && event.callBack) {
event.callBack({
requestType: this.currentAction,
selectedText: this.selection.getSelectedText(textArea),
editorMode: 'Markdown',
event: event.event
});
}
}
private textReplace(text: string, command: string): string {
let regx: RegExp = this.singleCharRegx(this.syntax[command]);
switch (command) {
case 'Bold':
regx = this.multiCharRegx(this.syntax[command].substr(0, 1));
text = text.replace(regx, '');
break;
case 'Italic':
if (!this.isBold(text, this.syntax[command].substr(0, 1))) {
text = text.replace(regx, '');
} else {
const regxB: RegExp = this.multiCharRegx(this.syntax[command].substr(0, 1));
let repText: string = text;
repText = repText.replace(regxB, '$%@').replace(regx, '');
const regxTemp: RegExp = new RegExp('\\$%@', 'g');
text = repText.replace(regxTemp, this.syntax[command].substr(0, 1) + this.syntax[command].substr(0, 1));
}
break;
case 'StrikeThrough':
text = text.replace(regx, '');
break;
case 'InlineCode':
text = text.replace(regx, '');
break;
case 'SubScript':
text = text.replace(/<sub>/g, '').replace(/<\/sub>/g, '');
break;
case 'SuperScript':
text = text.replace(/<sup>/g, '').replace(/<\/sup>/g, '');
break;
}
return text;
}
private isApplied(line: { [key: string]: string | number }, command: string): boolean | void {
let regx: RegExp = this.singleCharRegx(this.syntax[command]);
switch (command) {
case 'SubScript':
case 'SuperScript':
regx = this.singleCharRegx(this.syntax[command]);
return regx.test(line.text as string);
case 'Bold':
case 'StrikeThrough':
regx = this.multiCharRegx(this.syntax[command].substr(0, 1));
return regx.test(line.text as string);
case 'UpperCase':
case 'LowerCase':
regx = new RegExp('^[' + this.syntax[command] + ']*$', 'g');
return regx.test(line.text as string);
case 'Italic': {
let regTest: boolean;
const regxB: RegExp = this.multiCharRegx(this.syntax[command].substr(0, 1));
if (regxB.test(line.text as string)) {
let repText: string = line.text as string;
repText = repText.replace(regxB, '$%#');
regTest = regx.test(repText);
} else {
regTest = regx.test(line.text as string);
}
return regTest; }
case 'InlineCode':
return regx.test(line.text as string);
}
}
} | the_stack |
import path from 'path';
import minimatch from 'minimatch';
import isEmpty from 'lodash/isEmpty';
import {
getUrlToLocalDirectoryMapper,
prependProtocolToBareUrl,
} from './helpers';
import Downloader from './downloader';
import HtmlParser from './html-parser';
import type {
JamStackApp,
JamStackAppParams,
} from './jamstack-app';
import { GatsbyApp } from './jamstack-app';
import { PageCreator } from './page-creator';
import type { PageCreatorParams } from './page-creator';
import {
ScrapedPage,
Scraper,
} from './scraper';
import type { ScraperParams } from './scraper';
import {
replaceAttributes,
AttrTransformerDirecton,
} from './attr-transformer';
import {
ComponentScope,
HtmlToComponentsSettings,
} from './html-to-components';
import page404Handler, { Page404Params } from './page404-handler';
import debug from './debug';
import ResponseProcessor, { RedirectConfig } from './response-processor';
import type { PluginManagerType } from './pluginManager';
import { MigrationApi } from './migrationApi';
export enum TrailingSlash {
Skip = 'skip',
Remove = 'remove',
Add = 'add'
}
export enum TransformerRule {
Replace = 'replace',
ReplaceString = 'replaceString',
ToComponent = 'tocomponent',
RemoveAttribute = 'removeAttribute',
}
export type Transformer = {
rule: TransformerRule,
selector: string,
replacement: string,
context: string,
scope?: ComponentScope,
attributes: string[],
};
type ExposedPageCreatorParams = Pick<PageCreatorParams, 'isEnabled' | 'pageIndexFile'>;
type Exports = {
redirects: RedirectConfig,
};
export interface SiteFlattenerParams {
websiteUrl: string,
workDir: string,
gitRepository?: string,
trailingSlash?: TrailingSlash,
scraperParams: ScraperParams,
pageCreator?: ExposedPageCreatorParams,
page404Params: Page404Params,
transformers: Array<Transformer>,
htmltojsx: boolean,
useSourceHtml?: boolean,
disableTailwind?: boolean,
reservedPaths?: Array<string>,
allowFallbackHtml?: boolean,
pluginManager?: PluginManagerType,
exports?: Exports,
}
export class SiteFlattener {
params: SiteFlattenerParams;
app: JamStackApp;
constructor(params: SiteFlattenerParams) {
this.params = {
reservedPaths: [],
...params,
websiteUrl: prependProtocolToBareUrl(params.websiteUrl),
trailingSlash: params.trailingSlash || TrailingSlash.Add,
exports: params.exports,
scraperParams: {
...params.scraperParams,
pageUrls: params.scraperParams.pageUrls.map(pageUrl => prependProtocolToBareUrl(pageUrl)),
},
pluginManager: params.pluginManager,
};
const jamStackAppParams: JamStackAppParams = {
gitRepository: this.params.gitRepository,
workDir: this.params.workDir,
disableTailwind: this.params.disableTailwind,
};
this.app = new GatsbyApp(jamStackAppParams);
}
public async start() {
await this.scrape();
}
public async scrape() {
const scraperParams = {
...this.params.scraperParams,
enableFileDownload: false,
downloadPath: getUrlToLocalDirectoryMapper(this.app.getStaticDir()),
};
const { page404Params, exports } = this.params;
const responseProcessor = new ResponseProcessor({ websiteUrl: this.params.websiteUrl });
const downloader = new Downloader(
this.params.websiteUrl, this.app.getStaticDir(), this.params.reservedPaths,
);
const scraper = new Scraper(scraperParams);
scraper.on('pageReceived', async scrapedPage => {
try {
const { pageUrl } = scrapedPage;
debug(`scraped page from ${pageUrl}.`);
const scrapedPage$1 = page404Handler.processScrapedPage(scrapedPage, page404Params);
const scrapedPage$2 = this.transformScrapedPage(scrapedPage$1);
const migrationApi = MigrationApi.create({
app: this.app,
pageUrl,
});
const pageCreator = new PageCreator({
...this.getPageCreatorParams(scrapedPage$2),
migrationApi,
});
debug(`creating page for ${pageUrl}.`);
await pageCreator.createPage();
if (this.params.pluginManager !== undefined) {
this.params.pluginManager.onPageCreate({
pagePath: pageUrl,
document: (new HtmlParser(scrapedPage$2.processedHtml)).$,
api: migrationApi,
downloader,
});
}
} catch (error) {
debug(error);
}
});
scraper.on('error', error => {
debug(error.message);
});
scraper.on('fileReceived', async fileUrl => {
await downloader.downloadFiles([fileUrl]);
});
scraper.on('requestStarted', async fileUrl => {
await downloader.downloadFiles([fileUrl]);
});
scraper.on('responseReceived', async response => {
if (exports !== undefined && !isEmpty(exports) && !isEmpty(exports.redirects)) {
responseProcessor.processRedirect(response);
}
});
await scraper.Crawl();
if (exports !== undefined && !isEmpty(exports) && !isEmpty(exports.redirects)) {
responseProcessor.exportRedirects(exports.redirects);
}
}
private getConfPath(): string {
return path.resolve(__dirname, '..', 'conf');
}
private getPageTemplate(): string {
const templateName = this.params.htmltojsx ? 'template_html2jsx.jsx' : 'template_mono.jsx';
return path.resolve(this.getConfPath(), templateName);
}
private getComponentTemplate(templateName: string): string {
return path.resolve(this.getConfPath(), templateName);
}
private shouldUseSourceHtml() {
return this.params.useSourceHtml !== undefined ? this.params.useSourceHtml : true;
}
private removeIndexHtmlFromPageUrl(pageUrl: string): string {
return pageUrl.replace('/index.html', '/');
}
private shouldReplace(pageUrl: string, pattern: string): boolean {
return minimatch(pageUrl, pattern);
}
private transformAttributes(html: string) {
return replaceAttributes(html, AttrTransformerDirecton.Direct);
}
private transformScrapedHtml(html: string, pageUrl: string): string {
const htmlParser = new HtmlParser(html);
htmlParser.clean();
htmlParser.transformRelativeToInternal(pageUrl);
htmlParser.transformAbsoluteToRelative(pageUrl);
htmlParser.transformCfEmailToOrigin();
htmlParser.transformNewLineInInlineTags();
if (this.params.trailingSlash === TrailingSlash.Add) {
htmlParser.addTrailingSlash(pageUrl);
}
if (this.params.trailingSlash === TrailingSlash.Remove) {
htmlParser.removeTrailingSlash(pageUrl);
}
if (this.params.transformers) {
this.params.transformers
.filter(
item => (
item.rule === TransformerRule.ReplaceString && this.shouldReplace(pageUrl, item.context)
),
)
.forEach(item => htmlParser.replaceString(item.selector, item.replacement));
this.params.transformers
.filter(
item => item.rule === TransformerRule.Replace
&& this.shouldReplace(pageUrl, item.context),
)
.forEach(item => htmlParser.replace(item.selector, item.replacement));
}
// Cleanup primary attributes to avoid build issue from Helmet.
const emptyAttributeRemovalRules = [
{
selector: 'head link',
attributes: ['rel', 'href'],
},
{
selector: 'head meta',
attributes: ['name', 'charset', 'http-equiv', 'property', 'itemprop'],
},
{
selector: 'head noscript',
attributes: ['innerhtml'],
},
{
selector: 'head link',
attributes: ['rel', 'href'],
},
{
selector: 'head script',
attributes: ['src', 'innerhtml'],
},
{
selector: 'head style',
attributes: ['csstext'],
},
];
emptyAttributeRemovalRules.forEach(item => {
htmlParser.removeEmptyAttribute(item.selector, item.attributes);
});
const pageHtml = htmlParser.getPageHtml();
return this.transformAttributes(pageHtml);
}
private transformScrapedPage(scrapedPage: ScrapedPage): ScrapedPage {
const pageHtml = this.shouldUseSourceHtml() ? scrapedPage.rawHtml : scrapedPage.processedHtml;
const transformedPageUrl = this.removeIndexHtmlFromPageUrl(scrapedPage.pageUrl);
const transformedHtml = this.transformScrapedHtml(pageHtml, transformedPageUrl);
const htmlParser = new HtmlParser(transformedHtml);
return {
...scrapedPage,
pageUrl: transformedPageUrl,
processedHtml: htmlParser.getPageHtml(),
};
}
private getHtmlToComponentsSettings(): HtmlToComponentsSettings {
const tranformers = this.params.transformers || [];
const settings: HtmlToComponentsSettings = {
rules: tranformers
.filter(item => item.rule === TransformerRule.ToComponent)
.map(item => ({
selector: item.selector,
component: item.replacement,
scope: item.scope,
})),
};
return settings;
}
private getPageCreatorParams(scrapedPage: ScrapedPage): Omit<PageCreatorParams, 'migrationApi'> {
const images = scrapedPage.images.concat(scrapedPage.pictures || []);
const htmlParser = new HtmlParser(scrapedPage.processedHtml);
const htmlToComponentsSettings = this.getHtmlToComponentsSettings();
const pageCreatorParams: Omit<PageCreatorParams, 'migrationApi'> = {
...this.params.pageCreator,
pagesDir: this.app.getPagesDir(),
staticDir: this.app.getStaticDir(),
templatePath: this.getPageTemplate(),
templateDangerousHtml: this.getComponentTemplate('template_dangerous_html.jsx'),
pageUrl: scrapedPage.pageUrl,
headHtml: htmlParser.getHeadHtml(),
bodyHtml: htmlParser.getBodyHtml(),
scripts: scrapedPage.scripts,
inlineScripts: scrapedPage.inlineScripts,
styles: scrapedPage.styles,
inlineStyles: scrapedPage.inlineStyles,
images,
videos: scrapedPage.videos,
htmlTag: htmlParser.getHtmlTag(),
bodyTag: htmlParser.getBodyTag(),
downloadAssets: true,
htmlToComponents: this.params.htmltojsx,
allowFallbackHtml: this.params.allowFallbackHtml,
htmlToComponentsSettings,
reservedPaths: this.params.reservedPaths,
};
return pageCreatorParams;
}
}
export type {
ExposedPageCreatorParams as PageCreatorParams,
Exports,
}; | the_stack |
import LRU from 'lru-cache';
import type { RenderResult } from './renderPage';
import type { CompleteTestGroupAllocation, RelevantTestGroupAllocation } from '../../../lib/abTestImpl';
import { Globals } from '../../../lib/vulcan-lib';
import type { Request } from 'express';
import { getCookieFromReq, getPathFromReq } from '../../utils/httpUtil';
import { isValidSerializedThemeOptions, defaultThemeOptions } from '../../../themes/themeNames';
import sumBy from 'lodash/sumBy';
// Page cache. This applies only to logged-out requests, and exists primarily
// to handle the baseload of traffic going to the front page and to pages that
// have gotten linked from high-traffic places.
//
// Complexity here is driven by three things:
// 1. Users that don't share a time zone can't share a page cache, because
// dates that appear on the page differ;
// 2. Two visitors in different A/B test groups can't share a cache entry,
// but we don't know which A/B tests were relevant to a page until after
// we've rendered it; and
// 3. When a page that is getting a lot of traffic expires from the page
// cache, we don't want to start many rerenders of it in parallel
const maxPageCacheSizeBytes = 32*1024*1024; //32MB
const maxCacheAgeMs = 90*1000;
const pageCache = new LRU<string,RenderResult>({
max: maxPageCacheSizeBytes,
length: (page,key) => JSON.stringify(page).length + JSON.stringify(key).length,
maxAge: maxCacheAgeMs,
updateAgeOnGet: false,
dispose: (key: string, page) => {
const parsedKey: {cacheKey: string, abTestGroups: RelevantTestGroupAllocation} = JSON.parse(key);
const { cacheKey, abTestGroups } = parsedKey;
keysToCheckForExpiredEntries.push(cacheKey);
},
});
const jsonSerializableEstimateSize = (obj: any) => {
if (typeof obj === "object") {
let result = 0;
for (let key of Object.keys(obj)) {
result += jsonSerializableEstimateSize(key);
result += jsonSerializableEstimateSize(obj[key]);
}
return result;
} else if (typeof obj === "string") {
return obj.length;
} else {
return 8;
}
}
// FIXME: This doesn't get updated correctly. Previous iteration had entries
// removed when they should still be in cachedABtestsIndex; current iteration
// has duplicate entries accumulate over time.
const cachedABtestsIndex: Record<string,Array<RelevantTestGroupAllocation>> = {};
let keysToCheckForExpiredEntries: Array<string> = [];
export const cacheKeyFromReq = (req: Request): string => {
const timezoneCookie = getCookieFromReq(req, "timezone");
const themeCookie = getCookieFromReq(req, "theme");
const themeOptions = themeCookie && isValidSerializedThemeOptions(themeCookie) ? themeCookie : JSON.stringify(defaultThemeOptions);
const path = getPathFromReq(req);
if (timezoneCookie)
return `${path}&theme=${themeOptions}&timezone=${timezoneCookie}`;
else
return path;
}
type InProgressRender = {
cacheKey: string
abTestGroups: CompleteTestGroupAllocation
renderPromise: Promise<RenderResult>
};
const inProgressRenders: Record<string,Array<InProgressRender>> = {};
// Serve a page from cache, or render it if necessary. Takes a set of A/B test
// groups for this request, which covers *all* A/B tests (including ones that
// may not be relevant to the request).
export const cachedPageRender = async (req: Request, abTestGroups, renderFn: (req:Request)=>Promise<RenderResult>) => {
const path = getPathFromReq(req);
const cacheKey = cacheKeyFromReq(req);
const cached = cacheLookup(cacheKey, abTestGroups);
// If already cached, return the cached version
if (cached) {
recordCacheHit();
//eslint-disable-next-line no-console
console.log(`Serving ${path} from cache; hit rate=${getCacheHitRate()}`);
return {
...cached,
cached: true
};
}
if (cacheKey in inProgressRenders) {
for (let inProgressRender of inProgressRenders[cacheKey]) {
if (objIsSubset(abTestGroups, inProgressRender.abTestGroups)) {
//eslint-disable-next-line no-console
console.log(`Merging request for ${path} into in-progress render`);
const result = await inProgressRender.renderPromise;
return {
...result,
cached: true,
};
}
}
//eslint-disable-next-line no-console
console.log(`In progress render merge of ${cacheKey} missed: mismatched A/B test groups (requested: ${JSON.stringify(abTestGroups)}, available: ${JSON.stringify(inProgressRenders[cacheKey].map(r=>r.abTestGroups))})`);
}
recordCacheMiss();
//eslint-disable-next-line no-console
console.log(`Rendering ${path} (not in cache; hit rate=${getCacheHitRate()})`);
const renderPromise = renderFn(req);
const inProgressRender = { cacheKey, abTestGroups, renderPromise };
if (cacheKey in inProgressRenders) {
inProgressRenders[cacheKey].push(inProgressRender);
} else {
inProgressRenders[cacheKey] = [inProgressRender];
}
const rendered = await renderPromise;
// eslint-disable-next-line no-console
console.log(`Completed render with A/B test groups: ${JSON.stringify(rendered.relevantAbTestGroups)}`);
cacheStore(cacheKey, rendered.relevantAbTestGroups, rendered);
inProgressRenders[cacheKey] = inProgressRenders[cacheKey].filter(r => r!==inProgressRender);
if (!inProgressRenders[cacheKey].length)
delete inProgressRenders[cacheKey];
clearExpiredCacheEntries();
return {
...rendered,
cached: false
};
}
const cacheLookup = (cacheKey: string, abTestGroups: CompleteTestGroupAllocation): RenderResult|null|undefined => {
if (!(cacheKey in cachedABtestsIndex)) {
// eslint-disable-next-line no-console
console.log("Cache miss: no cached page with this cacheKey for any A/B test group combination");
return null;
}
const abTestCombinations: Array<RelevantTestGroupAllocation> = cachedABtestsIndex[cacheKey];
for (let i=0; i<abTestCombinations.length; i++) {
if (objIsSubset(abTestCombinations[i], abTestGroups)) {
const lookupResult = pageCache.get(JSON.stringify({
cacheKey: cacheKey,
abTestGroups: abTestCombinations[i]
}));
if (lookupResult)
return lookupResult;
}
}
// eslint-disable-next-line no-console
console.log(`Cache miss: page is cached, but with the wrong A/B test groups: wanted ${JSON.stringify(abTestGroups)}, had available ${JSON.stringify(cachedABtestsIndex[cacheKey])}`);
return null;
}
const objIsSubset = (subset: {}, superset: {}): boolean => {
for (let key in subset) {
if (!(key in superset) || subset[key] !== superset[key])
return false;
}
return true;
}
const cacheStore = (cacheKey: string, abTestGroups: RelevantTestGroupAllocation, rendered: RenderResult): void => {
pageCache.set(JSON.stringify({
cacheKey: cacheKey,
abTestGroups: abTestGroups
}), rendered);
if (cacheKey in cachedABtestsIndex)
cachedABtestsIndex[cacheKey].push(abTestGroups);
else
cachedABtestsIndex[cacheKey] = [abTestGroups];
}
const clearExpiredCacheEntries = (): void => {
for (let cacheKey of keysToCheckForExpiredEntries) {
const remainingEntries: Record<string,boolean> = {}
if (cachedABtestsIndex[cacheKey]) {
for (let abTestGroups of cachedABtestsIndex[cacheKey]) {
if (pageCache.get(JSON.stringify({ cacheKey, abTestGroups }))) {
remainingEntries[JSON.stringify(abTestGroups)] = true;
}
}
}
const remainingEntriesArray = Object.keys(remainingEntries).map(groups=>JSON.parse(groups));
if (remainingEntriesArray.length > 0)
cachedABtestsIndex[cacheKey] = remainingEntriesArray;
else
delete cachedABtestsIndex[cacheKey];
}
keysToCheckForExpiredEntries = [];
}
let cacheHits = 0;
let cacheQueriesTotal = 0;
export function recordCacheHit() {
cacheHits++;
cacheQueriesTotal++;
}
export function recordCacheMiss() {
cacheQueriesTotal++;
}
export function recordCacheBypass() {
cacheQueriesTotal++;
}
export function getCacheHitRate() {
return cacheHits / cacheQueriesTotal;
}
function printCacheState(options:any={}) {
const {pruneCache=false} = options;
// eslint-disable-next-line no-console
const log = console.log;
log('cachedABtestsIndex = {');
for (let cacheKey of Object.keys(cachedABtestsIndex)) {
log(` ${cacheKey}: [`);
for (let abTestGroup of cachedABtestsIndex[cacheKey]) {
log(` ${JSON.stringify(abTestGroup)}`);
}
log(` ],`);
}
log("}");
if (pruneCache)
pageCache.prune();
log(`pageCache (length=${pageCache.length}) = {`);
let directlyCalculatedLength = 0;
pageCache.forEach((value,key,cache) => {
log(` ${key} => ...`);
directlyCalculatedLength += JSON.stringify(value).length + JSON.stringify(key).length;
});
log("}");
if (pageCache.length !== directlyCalculatedLength) {
log("===============");
log("LENGTH MISMATCH");
log(`Expected: ${pageCache.length}, found: ${directlyCalculatedLength}`);
log("===============");
}
log("inProgressRenders = {");
for (let cacheKey of Object.keys(inProgressRenders)) {
log(` ${cacheKey}: [`);
for (let inProgressRender of inProgressRenders[cacheKey]) {
log(` ${JSON.stringify(inProgressRender.abTestGroups)}`);
}
log(" ]");
}
log("}");
}
Globals.printCacheState = printCacheState;
export function checkForMemoryLeaks() {
if (Object.keys(cachedABtestsIndex).length > 5000) {
// eslint-disable-next-line no-console
console.log(`Possible memory leak: cachedABtestsIndex has ${Object.keys(cachedABtestsIndex).length} entries`);
}
if (keysToCheckForExpiredEntries.length > 5000) {
// eslint-disable-next-line no-console
console.log(`Possible memory leak: keysToCheckForExpiredEntries has ${keysToCheckForExpiredEntries} entries`);
}
const cachedABtestsIndexArrayElements = sumBy(Object.keys(cachedABtestsIndex), key=>cachedABtestsIndex[key]?.length||0);
if (cachedABtestsIndexArrayElements > 5000) {
// eslint-disable-next-line no-console
console.log(`Possible memory leak: cachedABtestsIndexArrayElements=${cachedABtestsIndexArrayElements}`);
}
const inProgressRenderCount = sumBy(Object.keys(inProgressRenders), key=>inProgressRenders[key]?.length||0);
if (inProgressRenderCount > 100) {
// eslint-disable-next-line no-console
console.log(`Possible memory leak: inProgressRenderCount=${inProgressRenderCount}`);
}
const pageCacheContentsBytes = sumBy(pageCache.values(), v=>JSON.stringify(v).length);
if (pageCacheContentsBytes > 2*maxPageCacheSizeBytes) {
// eslint-disable-next-line no-console
console.log(`Possible memory leak: pageCacheContentsBytes=${pageCacheContentsBytes}`);
}
}
export function printInFlightRequests() {
let inProgressRenderKeys: string[] = [];
for (let cacheKey of Object.keys(inProgressRenders)) {
for (let render of inProgressRenders[cacheKey]) {
inProgressRenderKeys.push(render.cacheKey);
}
}
if (inProgressRenderKeys.length > 0) {
// eslint-disable-next-line no-console
console.log(`In progress: ${inProgressRenderKeys.join(", ")}`);
}
} | the_stack |
import { ErrorCodes } from "../errors";
import {
BinaryExpression,
BooleanLiteral,
MacroTimeFunctionInvocation,
ConditionalExpression,
Expression,
FunctionInvocation,
IdentifierNode,
NodePosition,
OperandType,
PartialZ80AssemblyLine,
Symbol,
UnaryExpression,
Z80AssemblyLine,
} from "../parser/tree-nodes";
import { HasUsageInfo } from "./assembler-types";
// --- Evaluation error messages
const STRING_CONVERSION_ERROR = "Cannot convert string to a number";
const DIV_BY_ZERO_ERROR = "Divide by zero error";
const ADD_ERROR = "Cannot add an integral value and a string";
const ADD_STRING_ERROR = "Only a string can be added to a string";
const COMPARE_ERROR = "Cannot compare a number with a string";
const COMPARE_STRING_ERROR = "String can be compared only to another string";
/**
* Represents the possible types of an expression value
*/
export enum ExpressionValueType {
Error = 0,
Bool,
Integer,
Real,
String,
NonEvaluated,
}
/**
* Represents the value of an evaluated expression
*/
export class ExpressionValue {
private _type: ExpressionValueType;
private _value: boolean | number | string | undefined;
/**
* Used in case of expression evaluation errors
*/
static Error = new ExpressionValue();
/**
* Represents a non-evaluated value
*/
static NonEvaluated = new ExpressionValue(ExpressionValueType.NonEvaluated);
/**
* Initializes a value expression
* @param value Value to initialize
*/
constructor(value?: ExpressionValueType | boolean | number | string) {
if (value === undefined) {
this._type = ExpressionValueType.Error;
return;
}
switch (typeof value) {
case "boolean":
this._type = ExpressionValueType.Bool;
break;
case "number":
this._type = Number.isInteger(value)
? ExpressionValueType.Integer
: ExpressionValueType.Real;
break;
case "string":
this._type = ExpressionValueType.String;
break;
}
this._value = value;
}
/**
* Gets the type of the expression
*/
get type(): ExpressionValueType {
return this._type;
}
/**
* Checks if the value of this expression is valid
*/
get isValid(): boolean {
return (
this !== ExpressionValue.NonEvaluated && this !== ExpressionValue.Error
);
}
/**
* Checks if the value of this expression is not evaluated
*/
get isNonEvaluated(): boolean {
return this === ExpressionValue.NonEvaluated;
}
/**
* Gets the value of this instance
*/
get value(): number {
return this.asWord();
}
/**
* Returns the value as a long integer
*/
asLong(): number {
switch (this.type) {
case ExpressionValueType.Bool:
return this._value ? 1 : 0;
case ExpressionValueType.Integer:
return this._value as number;
case ExpressionValueType.Real:
return Math.floor(this._value as number);
case ExpressionValueType.String:
const parsedValue = parseInt(this._value as string);
if (isNaN(parsedValue)) {
throw new Error("Cannot convert string to an integer value.");
} else {
return parsedValue;
}
default:
throw new Error("Unexpected expression value");
}
}
/**
* Returns the value as a real number
*/
asReal(): number {
switch (this.type) {
case ExpressionValueType.Bool:
return this._value ? 1 : 0;
case ExpressionValueType.Integer:
case ExpressionValueType.Real:
return this._value as number;
case ExpressionValueType.String:
const parsedValue = parseFloat(this._value as string);
if (isNaN(parsedValue)) {
throw new Error("Cannot convert string to a real value.");
} else {
return parsedValue;
}
default:
throw new Error("Unexpected expression value");
}
}
/**
* Returns the value as a string
*/
asString(): string {
switch (this.type) {
case ExpressionValueType.Bool:
return this._value ? "true" : "false";
case ExpressionValueType.Integer:
case ExpressionValueType.Real:
return (this._value as number).toString();
case ExpressionValueType.String:
return this._value as string;
default:
throw new Error("Unexpected expression value");
}
}
/**
* Returns the value as a Boolean
*/
asBool(): boolean {
switch (this.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
case ExpressionValueType.Real:
return !!this._value;
case ExpressionValueType.String:
return (this._value as string).trim() !== "";
default:
throw new Error("Unexpected expression value");
}
}
/**
* Returns the value as a 16-bit unsigned integer
*/
asWord(): number {
return this.asLong() & 0xffff;
}
/**
* Returns the value as an 8-bit unsigned integer
*/
asByte(): number {
return this.asLong() & 0xff;
}
}
/**
* Map of symbols
*/
export type SymbolValueMap = { [key: string]: ExpressionValue };
/**
* Information about a symbol's value
*/
export interface ValueInfo {
/**
* The value of the symbol
*/
value: ExpressionValue;
/**
* Symbol usage information
*/
usageInfo: HasUsageInfo;
}
/**
* Represents the context in which an expression is evaluated
*/
export interface EvaluationContext {
/**
* Gets the source line the evaluation context is bound to
*/
getSourceLine(): Z80AssemblyLine;
/**
* Sets the source line the evaluation context is bound to
* @param sourceLine Source line information
*/
setSourceLine(sourceLine: Z80AssemblyLine): void;
/**
* Gets the current assembly address
*/
getCurrentAddress(): number;
/**
* Gets the value of the specified symbol
* @param symbol Symbol name
* @param startFromGlobal Should resolution start from global scope?
*/
getSymbolValue(symbol: string, startFromGlobal?: boolean): ValueInfo | null;
/**
* Gets the current loop counter value
*/
getLoopCounterValue(): ExpressionValue;
/**
* Evaluates the value if the specified expression node
* @param expr Expression to evaluate
* @param context: Evaluation context
*/
doEvalExpression(expr: Expression): ExpressionValue;
/**
* Reports an error during evaluation
* @param code Error code
* @param node Error position
* @param parameters Optional error parameters
*/
reportEvaluationError(
code: ErrorCodes,
node: NodePosition,
...parameters: any[]
): void;
}
/**
* Base class that evaluates an expression in a specific contents
*/
export abstract class ExpressionEvaluator implements EvaluationContext {
/**
* Gets the source line the evaluation context is bound to
*/
abstract getSourceLine(): Z80AssemblyLine;
/**
* Sets the source line the evaluation context is bound to
* @param sourceLine Source line information
*/
abstract setSourceLine(sourceLine: Z80AssemblyLine): void;
/**
* Gets the current assembly address
*/
abstract getCurrentAddress(): number;
/**
* Gets the value of the specified symbol
* @param symbol Symbol name
* @param startFromGlobal Should resolution start from global scope?
*/
abstract getSymbolValue(
symbol: string,
startFromGlobal?: boolean
): ValueInfo | null;
/**
* Gets the current loop counter value
*/
abstract getLoopCounterValue(): ExpressionValue;
/**
* Evaluates the value if the specified expression node
* @param expr Expression to evaluate
* @param context: Evaluation context
*/
/**
* Evaluates the value if the specified expression node
* @param context The context to evaluate the expression
* @param expr Expression to evaluate
*/
doEvalExpression(expr: Expression): ExpressionValue {
try {
switch (expr.type) {
case "Identifier":
return evalIdentifierValue(this, expr);
case "Symbol":
return evalSymbolValue(this, expr);
case "IntegerLiteral":
case "RealLiteral":
case "CharLiteral":
case "StringLiteral":
case "BooleanLiteral":
return new ExpressionValue(expr.value);
case "BinaryExpression":
return evalBinaryOperationValue(this, expr);
case "UnaryExpression":
return evalUnaryOperationValue(this, expr);
case "ConditionalExpression":
return evalConditionalOperationValue(this, expr);
case "CurrentAddressLiteral":
return new ExpressionValue(this.getCurrentAddress());
case "CurrentCounterLiteral":
return this.getLoopCounterValue();
case "MacroTimeFunctionInvocation":
return evalMacroTimeFunctionInvocationValue(this, expr);
case "FunctionInvocation":
return evalFunctionInvocationValue(this, expr);
default:
return ExpressionValue.Error;
}
} catch (err) {
this.reportEvaluationError("Z0606", expr, (err as Error).message);
return ExpressionValue.Error;
}
/**
* Evaluate the value of an identifier
* @param context Evaluation context
* @param expr Expression to evaluate
*/
function evalIdentifierValue(
context: EvaluationContext,
expr: IdentifierNode
): ExpressionValue {
var valueInfo = context.getSymbolValue(expr.name);
if (valueInfo !== null) {
if (valueInfo.usageInfo !== null) {
valueInfo.usageInfo.isUsed = true;
}
return valueInfo.value;
}
context.reportEvaluationError("Z0605", expr, expr.name);
return ExpressionValue.NonEvaluated;
}
/**
* Evaluate the value of a symbol
* @param context Evaluation context
* @param expr Expression to evaluate
*/
function evalSymbolValue(
context: EvaluationContext,
expr: Symbol
): ExpressionValue {
var valueInfo = context.getSymbolValue(
expr.identifier.name,
expr.startsFromGlobal
);
if (valueInfo !== null) {
if (valueInfo.usageInfo !== null) {
valueInfo.usageInfo.isUsed = true;
}
return valueInfo.value;
}
context.reportEvaluationError(
"Z0605",
expr.identifier,
expr.identifier.name
);
return ExpressionValue.NonEvaluated;
}
/**
* Evaluates a binary operation
* @param context Evaluation context
* @param expr Bynary expression
* @returns The value of the evaluated expression
*/
function evalBinaryOperationValue(
context: EvaluationContext,
expr: BinaryExpression
): ExpressionValue {
const left = context.doEvalExpression(expr.left);
const right = context.doEvalExpression(expr.right);
if (!left.isValid || !right.isValid) {
return ExpressionValue.NonEvaluated;
}
switch (expr.operator) {
case "<?":
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
const rightNum = right.asLong();
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(
left.asLong() < rightNum ? left.asLong() : rightNum
);
case ExpressionValueType.Real:
return new ExpressionValue(
left.asReal() < rightNum ? left.asReal() : rightNum
);
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.Real:
const rightReal = right.asReal();
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(
left.asLong() < rightReal ? left.asLong() : rightReal
);
case ExpressionValueType.Real:
return new ExpressionValue(
left.asReal() < rightReal ? left.asReal() : rightReal
);
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.String:
throwStringError("right", expr.operator);
return ExpressionValue.Error;
}
break;
case ">?":
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
const rightNum = right.asLong();
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(
left.asLong() > rightNum ? left.asLong() : rightNum
);
case ExpressionValueType.Real:
return new ExpressionValue(
left.asReal() > rightNum ? left.asReal() : rightNum
);
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.Real:
const rightReal = right.asReal();
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(
left.asLong() > rightReal ? left.asLong() : rightReal
);
case ExpressionValueType.Real:
return new ExpressionValue(
left.asReal() > rightReal ? left.asReal() : rightReal
);
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.String:
throwStringError("right", expr.operator);
return ExpressionValue.Error;
}
break;
case "*":
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var rightNum = right.asLong();
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(left.asLong() * rightNum);
case ExpressionValueType.Real:
return new ExpressionValue(left.asReal() * rightNum);
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.Real:
var rightReal = right.asReal();
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(left.asLong() * rightReal);
case ExpressionValueType.Real:
return new ExpressionValue(left.asReal() * rightReal);
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.String:
throwStringError("right", expr.operator);
return ExpressionValue.Error;
}
break;
case "/":
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var rightNum = right.asLong();
if (rightNum === 0) {
throw new Error(DIV_BY_ZERO_ERROR);
}
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(left.asLong() / rightNum);
case ExpressionValueType.Real:
return new ExpressionValue(left.asReal() / rightNum);
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.Real:
var rightReal = right.asReal();
if (Math.abs(rightReal) < Number.EPSILON) {
throw new Error(DIV_BY_ZERO_ERROR);
}
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(left.asLong() / rightReal);
case ExpressionValueType.Real:
return new ExpressionValue(left.asReal() / rightReal);
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.String:
throwStringError("right", expr.operator);
return ExpressionValue.Error;
}
break;
case "%":
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var rightNum = right.asLong();
if (rightNum === 0) {
throw new Error(DIV_BY_ZERO_ERROR);
}
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(left.asLong() % rightNum);
case ExpressionValueType.Real:
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.Real:
case ExpressionValueType.String:
throwStringError("right", expr.operator);
return ExpressionValue.Error;
}
break;
case "+":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum + right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum + right.asReal());
case ExpressionValueType.String:
throw new Error(ADD_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal + right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal + right.asReal());
case ExpressionValueType.String:
throw new Error(ADD_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(
`${left.asString()}${right.asString()}`
);
} else {
throw new Error(ADD_STRING_ERROR);
}
}
break;
case "-":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum - right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum - right.asReal());
case ExpressionValueType.String:
throwStringError("right", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal - right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal - right.asReal());
case ExpressionValueType.String:
throwStringError("right", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.String:
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
break;
case "<<":
if (
left.type !== ExpressionValueType.Bool &&
left.type !== ExpressionValueType.Integer
) {
throwStringError("left", expr.operator);
return ExpressionValue.Error;
}
if (
right.type !== ExpressionValueType.Bool &&
right.type !== ExpressionValueType.Integer
) {
throwIntegralError("right", expr.operator);
return ExpressionValue.Error;
}
return new ExpressionValue(
left.asLong() << (right.asLong() & 0xffff)
);
case ">>":
if (
left.type !== ExpressionValueType.Bool &&
left.type !== ExpressionValueType.Integer
) {
throwIntegralError("left", expr.operator);
return ExpressionValue.Error;
}
if (
right.type !== ExpressionValueType.Bool &&
right.type !== ExpressionValueType.Integer
) {
throwIntegralError("right", expr.operator);
return ExpressionValue.Error;
}
return new ExpressionValue(
left.asLong() >> (right.asLong() & 0xffff)
);
case "<":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum < right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum < right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal < right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal < right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(left.asString() < right.asString());
}
throw new Error(COMPARE_STRING_ERROR);
}
break;
case "<=":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum <= right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum <= right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal <= right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal <= right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(left.asString() <= right.asString());
}
throw new Error(COMPARE_STRING_ERROR);
}
break;
case ">":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum > right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum > right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal > right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal > right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(left.asString() > right.asString());
}
throw new Error(COMPARE_STRING_ERROR);
}
break;
case ">=":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum >= right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum >= right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal >= right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal >= right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(left.asString() >= right.asString());
}
throw new Error(COMPARE_STRING_ERROR);
}
break;
case "==":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum === right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum === right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal === right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal === right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(
left.asString() === right.asString()
);
}
throw new Error(COMPARE_STRING_ERROR);
}
break;
case "===":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum === right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum === right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal === right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal === right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(
left.asString().toLowerCase() ===
right.asString().toLowerCase()
);
}
throw new Error(COMPARE_STRING_ERROR);
}
break;
case "!=":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum !== right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum !== right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal !== right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal !== right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(
left.asString() !== right.asString()
);
}
throw new Error(COMPARE_STRING_ERROR);
}
break;
case "!==":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum !== right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftNum !== right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.Real:
var leftReal = left.asReal();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftReal !== right.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(leftReal !== right.asReal());
case ExpressionValueType.String:
throw new Error(COMPARE_ERROR);
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(
left.asString().toLowerCase() !==
right.asString().toLowerCase()
);
}
throw new Error(COMPARE_STRING_ERROR);
}
break;
case "&":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum & right.asLong());
case ExpressionValueType.Real:
case ExpressionValueType.String:
throwStringError("right", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.String:
if (right.type === ExpressionValueType.String) {
return new ExpressionValue(
`${left.asString()}\r\n${right.asString()}`
);
}
throw new Error(
`The right side of ${expr.operator} must be a string`
);
case ExpressionValueType.Real:
throw new Error(
`The left side of ${expr.operator} must be an integral type or a string`
);
}
case "|":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum | right.asLong());
case ExpressionValueType.Real:
case ExpressionValueType.String:
throwIntegralError("right", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.Real:
case ExpressionValueType.String:
throwIntegralError("left", expr.operator);
return ExpressionValue.Error;
}
break;
case "^":
switch (left.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
var leftNum = left.asLong();
switch (right.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(leftNum ^ right.asLong());
case ExpressionValueType.Real:
case ExpressionValueType.String:
throwIntegralError("right", expr.operator);
return ExpressionValue.Error;
}
case ExpressionValueType.Real:
case ExpressionValueType.String:
throwIntegralError("left", expr.operator);
return ExpressionValue.Error;
}
break;
}
return ExpressionValue.NonEvaluated;
}
/**
* Evaluates a unary operation
* @param context Evaluation context
* @param expr Unary expression
* @returns The value of the evaluated expression
*/
function evalUnaryOperationValue(
context: EvaluationContext,
expr: UnaryExpression
): ExpressionValue {
const operand = context.doEvalExpression(expr.operand);
if (!operand.isValid) {
return ExpressionValue.NonEvaluated;
}
switch (expr.operator) {
case "+":
return operand;
case "-":
switch (operand.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(-operand.asLong());
case ExpressionValueType.Real:
return new ExpressionValue(-operand.asReal());
case ExpressionValueType.String:
const realValue = parseFloat(operand.asString());
if (!isNaN(realValue)) {
return new ExpressionValue(-realValue);
}
const intValue = parseInt(operand.asString());
if (!isNaN(intValue)) {
return new ExpressionValue(-intValue);
}
throw new Error(STRING_CONVERSION_ERROR);
}
break;
case "!":
switch (operand.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(operand.asLong() === 0);
case ExpressionValueType.Real:
case ExpressionValueType.String:
throw new Error(
"Unary logical not operation can be applied only on integral types"
);
}
break;
case "~":
switch (operand.type) {
case ExpressionValueType.Bool:
case ExpressionValueType.Integer:
return new ExpressionValue(~operand.asLong());
case ExpressionValueType.Real:
case ExpressionValueType.String:
throw new Error(
"Unary bitwise not operation can be applied only on integral types"
);
}
break;
}
return ExpressionValue.NonEvaluated;
}
/**
* Evaluates a unary operation
* @param context Evaluation context
* @param expr Unary expression
* @returns The value of the evaluated expression
*/
function evalConditionalOperationValue(
context: EvaluationContext,
expr: ConditionalExpression
): ExpressionValue {
const cond = context.doEvalExpression(expr.condition);
if (!cond.isValid) {
return ExpressionValue.NonEvaluated;
}
return cond.asBool()
? context.doEvalExpression(expr.consequent)
: context.doEvalExpression(expr.alternate);
}
function throwStringError(side: string, operator: string): void {
throw new Error(`The ${side} operand of ${operator} cannot be a string.`);
}
function throwIntegralError(side: string, operator: string): void {
throw new Error(
`The ${side} operand of ${operator} must be an integral type.`
);
}
}
/**
* Reports an error during evaluation
* @param code Error code
* @param node Error position
* @param parameters Optional error parameters
*/
abstract reportEvaluationError(
code: ErrorCodes,
node: NodePosition,
...parameters: any[]
): void;
}
/**
* This class implements a seedable random number generator
*/
class SeededRandom {
private _seed: number;
/**
* Initializes the random generator with the specified seed value
* @param seed Seed value
*/
constructor(seed: number) {
this._seed = seed % 2147483647;
if (this._seed <= 0) {
this._seed += 2147483646;
}
}
/**
* Generates the next 32-bit integer random number
*/
next(): number {
return (this._seed = (this._seed * 16807) % 2147483647);
}
/**
* Generates the next random number between 0.0 and 1.0 (exclusive)
*/
nextFloat(): number {
return (this.next() - 1) / 2147483646;
}
/**
* Generates an integer number within the specified range
* @param inclusiveFrom The inclusive start of the range
* @param exclusiveTo The exclusive end of the range
*/
integer(inclusiveFrom: number, exclusiveTo: number): number {
return Math.floor(
inclusiveFrom + this.nextFloat() * (exclusiveTo - inclusiveFrom)
);
}
}
let randomGenerator = new SeededRandom(Date.now());
export function setRandomSeed(seed: number): void {
randomGenerator = new SeededRandom(seed);
}
/**
* Represents a function evaluator class
*/
class FunctionEvaluator {
constructor(
public readonly evaluateFunc: (args: ExpressionValue[]) => ExpressionValue,
public readonly argTypes: ExpressionValueType[]
) {}
}
const FUNCTION_EVALUATORS: { [key: string]: FunctionEvaluator[] } = {
abs: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.abs(args[0].asLong())),
[ExpressionValueType.Integer]
),
new FunctionEvaluator(
(args) => new ExpressionValue(Math.abs(args[0].asReal())),
[ExpressionValueType.Real]
),
],
acos: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.acos(args[0].asReal())),
[ExpressionValueType.Real]
),
],
asin: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.asin(args[0].asReal())),
[ExpressionValueType.Real]
),
],
atan: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.atan(args[0].asReal())),
[ExpressionValueType.Real]
),
],
atan2: [
new FunctionEvaluator(
(args) =>
new ExpressionValue(Math.atan2(args[0].asReal(), args[1].asReal())),
[ExpressionValueType.Real, ExpressionValueType.Real]
),
],
ceiling: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.ceil(args[0].asReal())),
[ExpressionValueType.Real]
),
],
cos: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.cos(args[0].asReal())),
[ExpressionValueType.Real]
),
],
cosh: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.cosh(args[0].asReal())),
[ExpressionValueType.Real]
),
],
exp: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.exp(args[0].asReal())),
[ExpressionValueType.Real]
),
],
floor: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.floor(args[0].asReal())),
[ExpressionValueType.Real]
),
],
log: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.log(args[0].asReal())),
[ExpressionValueType.Real]
),
new FunctionEvaluator(
(args) =>
new ExpressionValue(
Math.log(args[0].asReal()) /
(args[1].asReal() === 0.0 ? 1 : Math.log(args[1].asReal()))
),
[ExpressionValueType.Real, ExpressionValueType.Real]
),
],
log10: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.log10(args[0].asReal())),
[ExpressionValueType.Real]
),
],
max: [
new FunctionEvaluator(
(args) =>
new ExpressionValue(Math.max(args[0].asLong(), args[1].asLong())),
[ExpressionValueType.Integer, ExpressionValueType.Integer]
),
new FunctionEvaluator(
(args) =>
new ExpressionValue(Math.max(args[0].asReal(), args[1].asReal())),
[ExpressionValueType.Real, ExpressionValueType.Real]
),
],
min: [
new FunctionEvaluator(
(args) =>
new ExpressionValue(Math.min(args[0].asLong(), args[1].asLong())),
[ExpressionValueType.Integer, ExpressionValueType.Integer]
),
new FunctionEvaluator(
(args) =>
new ExpressionValue(Math.min(args[0].asReal(), args[1].asReal())),
[ExpressionValueType.Real, ExpressionValueType.Real]
),
],
pow: [
new FunctionEvaluator(
(args) =>
new ExpressionValue(Math.pow(args[0].asReal(), args[1].asReal())),
[ExpressionValueType.Real, ExpressionValueType.Real]
),
],
round: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.round(args[0].asReal())),
[ExpressionValueType.Real]
),
],
sign: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.sign(args[0].asLong())),
[ExpressionValueType.Integer]
),
new FunctionEvaluator(
(args) => new ExpressionValue(Math.sign(args[0].asReal())),
[ExpressionValueType.Real]
),
],
sin: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.sin(args[0].asReal())),
[ExpressionValueType.Real]
),
],
sinh: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.sinh(args[0].asReal())),
[ExpressionValueType.Real]
),
],
sqrt: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.sqrt(args[0].asReal())),
[ExpressionValueType.Real]
),
],
tan: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.tan(args[0].asReal())),
[ExpressionValueType.Real]
),
],
tanh: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.tanh(args[0].asReal())),
[ExpressionValueType.Real]
),
],
truncate: [
new FunctionEvaluator(
(args) => new ExpressionValue(Math.trunc(args[0].asReal())),
[ExpressionValueType.Real]
),
],
pi: [new FunctionEvaluator((args) => new ExpressionValue(Math.PI), [])],
nat: [new FunctionEvaluator((args) => new ExpressionValue(Math.E), [])],
low: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asLong() & 0xff),
[ExpressionValueType.Integer]
),
],
high: [
new FunctionEvaluator(
(args) => new ExpressionValue((args[0].asLong() >> 8) & 0xff),
[ExpressionValueType.Integer]
),
],
word: [
new FunctionEvaluator((args) => new ExpressionValue(args[0].asWord()), [
ExpressionValueType.Integer,
]),
],
rnd: [
new FunctionEvaluator(
(args) => new ExpressionValue(randomGenerator.integer(0, 65536)),
[]
),
new FunctionEvaluator(
(args) =>
new ExpressionValue(
randomGenerator.integer(args[0].asLong(), args[1].asLong())
),
[ExpressionValueType.Integer, ExpressionValueType.Integer]
),
],
length: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asString().length),
[ExpressionValueType.String]
),
],
len: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asString().length),
[ExpressionValueType.String]
),
],
left: [
new FunctionEvaluator(
(args) => {
const str = args[0].asString();
const len = Math.min(str.length, args[1].asLong());
return new ExpressionValue(str.substr(0, len));
},
[ExpressionValueType.String, ExpressionValueType.Integer]
),
],
right: [
new FunctionEvaluator(
(args) => {
const str = args[0].asString();
const len = Math.min(str.length, args[1].asLong());
return new ExpressionValue(str.substr(str.length - len, len));
},
[ExpressionValueType.String, ExpressionValueType.Integer]
),
],
substr: [
new FunctionEvaluator(
(args) => {
const str = args[0].asString();
const start = Math.min(str.length, args[1].asLong());
const len = Math.min(str.length - start, args[2].asLong());
return new ExpressionValue(str.substr(start, len));
},
[
ExpressionValueType.String,
ExpressionValueType.Integer,
ExpressionValueType.Integer,
]
),
],
fill: [
new FunctionEvaluator(
(args) => {
const str = args[0].asString();
const count = args[1].asLong();
const resultLen = str.length * count;
if (resultLen > 0x4000) {
throw new Error(
"The result of the fill() function would be longer than #4000 bytes."
);
}
var result = "";
for (var i = 0; i < count; i++) {
result += str;
}
return new ExpressionValue(result);
},
[ExpressionValueType.String, ExpressionValueType.Integer]
),
],
int: [
new FunctionEvaluator((args) => new ExpressionValue(args[0].asLong()), [
ExpressionValueType.Real,
]),
],
frac: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asReal() - args[0].asLong()),
[ExpressionValueType.Real]
),
],
lowercase: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asString().toLowerCase()),
[ExpressionValueType.String]
),
],
lcase: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asString().toLowerCase()),
[ExpressionValueType.String]
),
],
uppercase: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asString().toUpperCase()),
[ExpressionValueType.String]
),
],
ucase: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asString().toUpperCase()),
[ExpressionValueType.String]
),
],
str: [
new FunctionEvaluator((args) => new ExpressionValue(args[0].asString()), [
ExpressionValueType.Bool,
]),
new FunctionEvaluator((args) => new ExpressionValue(args[0].asString()), [
ExpressionValueType.Integer,
]),
new FunctionEvaluator((args) => new ExpressionValue(args[0].asString()), [
ExpressionValueType.Real,
]),
new FunctionEvaluator((args) => new ExpressionValue(args[0].asString()), [
ExpressionValueType.String,
]),
],
scraddr: [
new FunctionEvaluator(
(args) => {
const line = args[0].asLong();
if (line < 0 || line > 191) {
throw new Error(
`The 'line' argument of scraddr must be between 0 and 191. It cannot be ${line}.`
);
}
const col = args[1].asLong();
if (col < 0 || col > 255) {
throw new Error(
`The 'col' argument of scraddr must be between 0 and 255. It cannot be ${col}.`
);
}
var da = 0x4000 | (col >> 3) | (line << 5);
var addr =
((da & 0xf81f) | ((da & 0x0700) >> 3) | ((da & 0x00e0) << 3)) &
0xffff;
return new ExpressionValue(addr);
},
[ExpressionValueType.Integer, ExpressionValueType.Integer]
),
],
attraddr: [
new FunctionEvaluator(
(args) => {
const line = args[0].asLong();
if (line < 0 || line > 191) {
throw new Error(
`The 'line' argument of scraddr must be between 0 and 191. It cannot be ${line}.`
);
}
const col = args[1].asLong();
if (col < 0 || col > 255) {
throw new Error(
`The 'col' argument of scraddr must be between 0 and 255. It cannot be ${col}.`
);
}
return new ExpressionValue(0x5800 + (line >> 3) * 32 + (col >> 3));
},
[ExpressionValueType.Integer, ExpressionValueType.Integer]
),
],
ink: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asLong() & 0x07),
[ExpressionValueType.Integer]
),
],
paper: [
new FunctionEvaluator(
(args) => new ExpressionValue((args[0].asLong() & 0x07) << 3),
[ExpressionValueType.Integer]
),
],
bright: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asLong() === 0 ? 0x00 : 0x40),
[ExpressionValueType.Integer]
),
],
flash: [
new FunctionEvaluator(
(args) => new ExpressionValue(args[0].asLong() === 0 ? 0x00 : 0x80),
[ExpressionValueType.Integer]
),
],
attr: [
new FunctionEvaluator(
(args) => {
const ink = args[0].asLong() & 0x07;
const paper = (args[1].asLong() & 0x07) << 3;
const bright = args[2].asLong() === 0 ? 0x00 : 0x40;
const flash = args[3].asLong() === 0 ? 0x00 : 0x80;
return new ExpressionValue((flash | bright | paper | ink) & 0xff);
},
[
ExpressionValueType.Integer,
ExpressionValueType.Integer,
ExpressionValueType.Integer,
ExpressionValueType.Integer,
]
),
new FunctionEvaluator(
(args) => {
const ink = args[0].asLong() & 0x07;
const paper = (args[1].asLong() & 0x07) << 3;
const bright = args[2].asLong() === 0 ? 0x00 : 0x40;
return new ExpressionValue((bright | paper | ink) & 0xff);
},
[
ExpressionValueType.Integer,
ExpressionValueType.Integer,
ExpressionValueType.Integer,
]
),
new FunctionEvaluator(
(args) => {
const ink = args[0].asLong() & 0x07;
const paper = (args[1].asLong() & 0x07) << 3;
return new ExpressionValue((paper | ink) & 0xff);
},
[ExpressionValueType.Integer, ExpressionValueType.Integer]
),
],
};
/**
* Evaluates a unary operation
* @param context Evaluation context
* @param expr Unary expression
* @returns The value of the evaluated expression
*/
export function evalFunctionInvocationValue(
context: EvaluationContext,
funcExpr: FunctionInvocation
): ExpressionValue {
// --- Evaluate all arguments from left to right
const argValues: ExpressionValue[] = [];
let errorMessage = "";
let index = 0;
let errCount = 0;
for (const expr of funcExpr.args) {
index++;
const argValue = context.doEvalExpression(expr);
if (argValue.isValid) {
argValues.push(argValue);
} else {
errCount++;
}
}
// --- Check for evaluation errors
if (errCount > 0) {
return ExpressionValue.Error;
}
// --- Function must be defined
const evaluator = FUNCTION_EVALUATORS[funcExpr.functionName.name];
if (!evaluator) {
throw new Error(`Unknown function $'{FunctionName}'`);
}
// --- Find the apropriate signature
let evaluatorFound: FunctionEvaluator | null = null;
for (const evalOption of evaluator) {
if (evalOption.argTypes.length !== funcExpr.args.length) {
continue;
}
// --- A viable option found
let match = true;
for (let i = 0; i < evalOption.argTypes.length; i++) {
const type = argValues[i].type;
switch (evalOption.argTypes[i]) {
case ExpressionValueType.Bool:
match = type === ExpressionValueType.Bool;
break;
case ExpressionValueType.Integer:
match =
type === ExpressionValueType.Bool ||
type === ExpressionValueType.Integer;
break;
case ExpressionValueType.Real:
match =
type === ExpressionValueType.Bool ||
type === ExpressionValueType.Integer ||
type === ExpressionValueType.Real;
break;
case ExpressionValueType.String:
match = type === ExpressionValueType.String;
break;
default:
return ExpressionValue.Error;
}
// --- Abort search if the current argumernt type does not match
if (!match) {
break;
}
}
if (match) {
// --- We have found a matching signature
evaluatorFound = evalOption;
break;
}
}
// --- Check whether we found an option
if (evaluatorFound === null) {
throw new Error(
`The arguments of '${funcExpr.functionName.name}' do not match any acceptable signatures`
);
}
// --- Now, it is time to evaluate the function
try {
var functionValue = evaluatorFound.evaluateFunc(argValues);
return functionValue;
} catch (err) {
throw new Error(
`Function value cannot be evaluated: '${funcExpr.functionName.name}': ${err.message}`
);
}
}
/**
* Evaluates a built-in function invocation
* @param context Evaluation context
* @param expr Unary expression
* @returns The value of the evaluated expression
*/
export function evalMacroTimeFunctionInvocationValue(
context: EvaluationContext,
funcExpr: MacroTimeFunctionInvocation
): ExpressionValue {
switch (funcExpr.functionName.toLowerCase()) {
case "def":
return new ExpressionValue(
!!(
funcExpr.operand &&
funcExpr.operand.operandType !== OperandType.NoneArg
)
);
case "isreg8":
return new ExpressionValue(
!!(
funcExpr.operand &&
(funcExpr.operand.operandType === OperandType.Reg8 ||
funcExpr.operand.operandType === OperandType.Reg8Spec ||
funcExpr.operand.operandType === OperandType.Reg8Idx)
)
);
case "iscport":
return new ExpressionValue(
!!(
funcExpr.operand && funcExpr.operand.operandType === OperandType.CPort
)
);
case "iscondition":
return new ExpressionValue(
!!(
funcExpr.operand &&
(funcExpr.operand.operandType === OperandType.Condition ||
funcExpr.operand?.register === "c")
)
);
case "isexpr":
return new ExpressionValue(
!!(
funcExpr.operand &&
funcExpr.operand.operandType === OperandType.Expression
)
);
case "isindexedaddr":
return new ExpressionValue(
!!(
funcExpr.operand &&
funcExpr.operand.operandType === OperandType.IndexedIndirect
)
);
case "isreg16":
return new ExpressionValue(
!!(
funcExpr.operand &&
(funcExpr.operand.operandType === OperandType.Reg16 ||
funcExpr.operand.operandType === OperandType.Reg16Spec ||
funcExpr.operand.operandType === OperandType.Reg16Idx)
)
);
case "isreg16idx":
return new ExpressionValue(
!!(
funcExpr.operand &&
funcExpr.operand.operandType === OperandType.Reg16Idx
)
);
case "isreg16std":
return new ExpressionValue(
!!(
funcExpr.operand && funcExpr.operand.operandType === OperandType.Reg16
)
);
case "isreg8idx":
return new ExpressionValue(
!!(
funcExpr.operand &&
funcExpr.operand.operandType === OperandType.Reg8Idx
)
);
case "isreg8spec":
return new ExpressionValue(
!!(
funcExpr.operand &&
funcExpr.operand.operandType === OperandType.Reg8Spec
)
);
case "isreg8std":
return new ExpressionValue(
!!(
funcExpr.operand && funcExpr.operand.operandType === OperandType.Reg8
)
);
case "isregindirect":
return new ExpressionValue(
!!(
funcExpr.operand &&
funcExpr.operand.operandType === OperandType.RegIndirect
)
);
case "isrega":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "a")
);
case "isregb":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "b")
);
case "isregc":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "c")
);
case "isregd":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "d")
);
case "isrege":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "e")
);
case "isregh":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "h")
);
case "isregl":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "l")
);
case "isregi":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "i")
);
case "isregr":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "r")
);
case "isregbc":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "bc")
);
case "isregde":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "de")
);
case "isreghl":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "hl")
);
case "isregsp":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "sp")
);
case "isregxh":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register.indexOf("xh") >= 0)
);
case "isregxl":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register.indexOf("xl") >= 0)
);
case "isregyh":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register.indexOf("yh") >= 0)
);
case "isregyl":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register.indexOf("yl") >= 0)
);
case "isregix":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "ix")
);
case "isregiy":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "iy")
);
case "isregaf":
return new ExpressionValue(
!!(funcExpr.operand && funcExpr.operand?.register === "af")
);
case "hreg": {
let op: string | undefined;
switch (funcExpr.operand.register) {
case "af":
op = "a";
case "bc":
op = "b";
case "de":
op = "d";
case "hl":
op = "h";
case "ix":
op = "xh";
case "iy":
op = "yh";
}
if (op) {
return new ExpressionValue(op);
}
break;
}
case "lreg": {
let op: string | undefined;
switch (funcExpr.operand.register) {
case "bc":
op = "c";
case "de":
op = "e";
case "hl":
op = "l";
case "ix":
op = "xl";
case "iy":
op = "yl";
}
if (op) {
return new ExpressionValue(op);
}
break;
}
}
context.reportEvaluationError(
"Z0606",
funcExpr,
null,
`Cannot evaluate ${funcExpr.functionName}(${funcExpr.operand.register})`
);
} | the_stack |
import * as nodes from "./nodes"
// "-" 45
// "[" 91
// "]" 93
// " " 32
// "/" 47
// "*" 42
// "(" 40
// ")" 41
// "!" 33
// '"' 34
// "'" 39
/** Try to find right bracket from left bracket, return `undefind` if not found. */
export function findRightBracket({
text,
start = 0,
end = text.length,
brackets = [40, 41],
}: {
text: string
start?: number
end?: number
brackets?: [number, number]
}): number | undefined {
let stack = 0
const [lbrac, rbrac] = brackets
let comment = 0
let string = 0
let url = 0
for (let i = start; i < end; i++) {
const char = text.charCodeAt(i)
if (char === lbrac) {
if (string === 0 && comment === 0) {
stack++
}
} else if (char === rbrac) {
if (string === 0 && comment === 0) {
if (stack === 1) {
return i
}
if (stack < 1) {
return undefined
}
stack--
}
}
if (string === 0 && comment === 0) {
if (url === 0 && char === 117 && /\W/.test(text[i - 1] || " ")) {
url = 1
} else if (url === 1 && char === 114) {
url = 2
} else if (url === 2 && char === 108) {
url = 3
} else if (url < 3 || (url === 3 && char === 41)) {
url = 0
}
}
if (url < 3 && comment === 0) {
if (string === 0) {
if (char === 47 && text.charCodeAt(i + 1) === 47) {
comment = 1
} else if (char === 47 && text.charCodeAt(i + 1) === 42) {
comment = 2
}
}
} else if (comment === 1 && char === 10) {
comment = 0
} else if (comment === 2 && char === 42 && text.charCodeAt(i + 1) === 47) {
comment = 0
i += 1
}
if (string === 0) {
if (comment === 0) {
if (char === 34) {
string = 1
} else if (char === 39) {
string = 2
}
}
} else if (string === 1 && char === 34) {
string = 0
} else if (string === 2 && char === 39) {
string = 0
}
}
return undefined
}
function findRightBlockComment(text: string, start = 0, end = text.length): number | undefined {
for (let i = start + 2; i < end; i++) {
if (text.charCodeAt(i) === 42 && text.charCodeAt(i + 1) === 47) {
return i + 1
}
}
return undefined
}
function isSpace(char: number) {
if (Number.isNaN(char)) return true
switch (char) {
case 32:
case 12:
case 10:
case 13:
case 9:
case 11:
return true
default:
return false
}
}
export function parse({
text,
start = 0,
end = text.length,
breac = Infinity,
}: {
text: string
start?: number
end?: number
breac?: number
}): nodes.Program {
return {
type: nodes.NodeType.Program,
range: [start, end],
expressions: parseExpressions({ text, breac, start, end }),
}
}
export function parseExpressions({
text,
start = 0,
end = text.length,
breac = Infinity,
}: {
text: string
start?: number
end?: number
breac?: number
}) {
const expressions: nodes.TwExpression[] = []
while (start < end) {
const { expr, lastIndex } = parseExpression({ text, breac, start, end })
if (expr) {
if (expr instanceof Array) expressions.push(...expr)
else expressions.push(expr)
}
if (lastIndex > breac) break
start = lastIndex
if (!start) break
}
return expressions
}
function escapeRegexp(value: string) {
return value.replace(/[/\\^$+?.()|[\]{}]/g, "\\$&")
}
function compileRegexp(sep: string) {
return new RegExp(
`(\\/\\/[^\\n]*\\n?)|(\\/\\*)|([\\w-]+${sep})|(\\[)|!?((?!\\/)(?:(?!\\/\\/{1,2})[\\w-/])+)\\[|!?((?:(?!\\/\\/|\\/\\*)[\\w-./])+)!?|(!?\\()|(\\S+)`,
"gs",
)
}
let separator = escapeRegexp(":")
let regexp = compileRegexp(separator)
export function setSeparator(sep: string) {
separator = escapeRegexp(sep)
regexp = compileRegexp(separator)
}
function parseExpression({
text,
start = 0,
end = text.length,
}: {
text: string
start?: number
end?: number
breac?: number
}): { expr?: nodes.TwExpression; lastIndex: number } {
let match: RegExpExecArray | null
regexp.lastIndex = start
text = text.slice(0, end)
if ((match = regexp.exec(text))) {
const [, lineComment, blockComment, variant, ar_variant, arbitrary, classnames, group, others] = match
start = match.index
if (variant) {
start += variant.length
const simpleVariant: nodes.SimpleVariant = {
type: nodes.NodeType.SimpleVariant,
range: [match.index, start],
id: {
type: nodes.NodeType.Identifier,
range: [match.index, start - separator.length],
value: text.slice(match.index, start - separator.length),
},
}
if (isSpace(text.charCodeAt(start)) || isComment(start)) {
const span: nodes.VariantSpan = {
type: nodes.NodeType.VariantSpan,
variant: simpleVariant,
range: [match.index, regexp.lastIndex],
}
return { expr: span, lastIndex: regexp.lastIndex }
}
const { expr, lastIndex } = parseExpression({ text, start })
const span: nodes.VariantSpan = {
type: nodes.NodeType.VariantSpan,
variant: simpleVariant,
range: [match.index, lastIndex],
child: expr,
}
return { expr: span, lastIndex }
} else if (ar_variant) {
const ar_rb = findRightBracket({ text, start, end, brackets: [91, 93] })
if (ar_rb == undefined) {
const variant: nodes.ArbitraryVariant = {
type: nodes.NodeType.ArbitraryVariant,
range: [match.index, end],
selector: {
type: nodes.NodeType.CssSelector,
range: [match.index + 1, end],
value: text.slice(match.index + 1, end),
},
closed: false,
}
const span: nodes.VariantSpan = {
type: nodes.NodeType.VariantSpan,
variant,
range: [match.index, end],
}
return { expr: span, lastIndex: end }
}
for (let i = 0; i < separator.length; i++) {
if (text.charCodeAt(i + ar_rb + 1) !== separator.charCodeAt(i)) {
// unknown
const classname: nodes.Classname = {
type: nodes.NodeType.ClassName,
important: false,
range: [match.index, ar_rb + 1],
value: text.slice(match.index, ar_rb + 1),
}
return { expr: classname, lastIndex: ar_rb + 1 }
}
}
start = ar_rb + 1 + separator.length
regexp.lastIndex = start
const variant: nodes.ArbitraryVariant = {
type: nodes.NodeType.ArbitraryVariant,
range: [match.index, regexp.lastIndex],
selector: {
type: nodes.NodeType.CssSelector,
range: [match.index + 1, ar_rb],
value: text.slice(match.index + 1, ar_rb),
},
closed: true,
}
if (isSpace(text.charCodeAt(start)) || isComment(start)) {
const span: nodes.VariantSpan = {
type: nodes.NodeType.VariantSpan,
variant,
range: [match.index, regexp.lastIndex],
}
return { expr: span, lastIndex: regexp.lastIndex }
}
const { expr, lastIndex } = parseExpression({ text, start })
const span: nodes.VariantSpan = {
type: nodes.NodeType.VariantSpan,
variant,
range: [match.index, lastIndex],
child: expr,
}
return { expr: span, lastIndex }
}
let exclamationLeft = false
if (text.charCodeAt(start) === 33) {
exclamationLeft = true
start += 1
}
if (classnames) {
let exclamationRight = false
let _end = regexp.lastIndex
if (text.charCodeAt(regexp.lastIndex - 1) === 33) {
exclamationRight = true
_end -= 1
}
const classname: nodes.Classname = {
type: nodes.NodeType.ClassName,
range: [start, _end],
value: text.slice(start, _end),
important: exclamationLeft || exclamationRight,
}
return { expr: classname, lastIndex: regexp.lastIndex }
} else if (arbitrary) {
const prop: nodes.Identifier = {
type: nodes.NodeType.Identifier,
range: [start, start + arbitrary.length],
value: text.slice(start, start + arbitrary.length),
}
// text-[], text-[]/opacity, text-[]/[opacity]
const hyphen = text.charCodeAt(regexp.lastIndex - 2) === 45
// text-color/[opacity]
const slash = text.charCodeAt(regexp.lastIndex - 2) === 47
// NOTE: text-color/opacity is a normal classname.
const rb = findRightBracket({ text, start: regexp.lastIndex - 1, end, brackets: [91, 93] })
const expr: nodes.CssExpression = {
type: nodes.NodeType.CssExpression,
range: [regexp.lastIndex, rb ? rb : end],
value: text.slice(regexp.lastIndex, rb ? rb : end),
}
if (rb != undefined) regexp.lastIndex = rb + 1
else regexp.lastIndex = end
if (!slash && !hyphen) {
const exclamationRight = text.charCodeAt(regexp.lastIndex) === 33
if (exclamationRight) regexp.lastIndex += 1
const decl: nodes.CssDeclaration = {
type: nodes.NodeType.CssDeclaration,
prop,
expr,
important: exclamationLeft || exclamationRight,
range: [start, exclamationRight ? regexp.lastIndex - 1 : regexp.lastIndex],
closed: rb != undefined,
}
return { expr: decl, lastIndex: regexp.lastIndex }
}
let e: nodes.WithOpacity | nodes.EndOpacity | undefined
let exclamationRight = false
if (rb != undefined) {
// text-[]/xxx
if (hyphen) {
if (text.charCodeAt(regexp.lastIndex) === 47) {
regexp.lastIndex += 1
if (text.charCodeAt(regexp.lastIndex) === 91) {
const rb = findRightBracket({
text,
start: regexp.lastIndex,
end,
brackets: [91, 93],
})
if (rb != undefined) {
e = {
type: nodes.NodeType.WithOpacity,
range: [regexp.lastIndex, rb + 1],
opacity: {
type: nodes.NodeType.Identifier,
range: [regexp.lastIndex + 1, rb],
value: text.slice(regexp.lastIndex + 1, rb),
},
closed: true,
}
regexp.lastIndex = rb + 1
if (text.charCodeAt(regexp.lastIndex) === 33) {
exclamationRight = true
regexp.lastIndex += 1
}
} else {
e = {
type: nodes.NodeType.WithOpacity,
range: [regexp.lastIndex, end],
opacity: {
type: nodes.NodeType.Identifier,
range: [regexp.lastIndex + 1, end],
value: text.slice(regexp.lastIndex + 1, end),
},
closed: false,
}
regexp.lastIndex = end
}
} else {
let k = regexp.lastIndex
for (; k < end; k++) {
if (isSpace(text.charCodeAt(k))) {
break
}
}
e = {
type: nodes.NodeType.EndOpacity,
range: [regexp.lastIndex, k],
value: text.slice(regexp.lastIndex, k),
}
if (text.charCodeAt(k - 1) === 33) exclamationRight = true
regexp.lastIndex = k + 1
}
} else if (text.charCodeAt(regexp.lastIndex) === 33) {
exclamationRight = true
regexp.lastIndex += 1
}
} else if (slash) {
e = {
type: nodes.NodeType.WithOpacity,
range: [regexp.lastIndex, rb + 1],
opacity: {
...expr,
type: nodes.NodeType.Identifier,
},
closed: true,
}
if (text.charCodeAt(regexp.lastIndex) === 33) {
exclamationRight = true
regexp.lastIndex += 1
}
}
}
const arbi: nodes.ArbitraryClassname = {
type: nodes.NodeType.ArbitraryClassname,
important: exclamationLeft || exclamationRight,
prop,
expr: slash ? undefined : expr,
e,
closed: rb != undefined,
range: [start, exclamationRight ? regexp.lastIndex - 1 : regexp.lastIndex],
}
return { expr: arbi, lastIndex: regexp.lastIndex }
} else if (lineComment) {
return { lastIndex: regexp.lastIndex }
} else if (blockComment) {
const closeComment = findRightBlockComment(text, match.index)
if (closeComment != undefined) {
regexp.lastIndex = closeComment + 1
} else {
regexp.lastIndex = end
}
return { lastIndex: regexp.lastIndex }
} else if (group) {
let exclamationRight = false
const rb = findRightBracket({ text, start, end })
if (rb != undefined) {
regexp.lastIndex = rb + 1
if (text.charCodeAt(rb + 1) === 33) {
exclamationRight = true
regexp.lastIndex += 1
}
} else {
regexp.lastIndex = end
}
const _end = rb != undefined ? rb : end
const lastIndex = regexp.lastIndex
const expressions = parseExpressions({ text, start: start + 1, end: _end })
const group: nodes.Group = {
type: nodes.NodeType.Group,
closed: rb != undefined,
important: exclamationLeft || exclamationRight,
range: [match.index, lastIndex],
expressions,
}
return { expr: group, lastIndex }
} else if (others) {
const classname: nodes.Classname = {
type: nodes.NodeType.ClassName,
important: false,
range: [match.index, regexp.lastIndex],
value: text.slice(match.index, regexp.lastIndex),
}
return { expr: classname, lastIndex: regexp.lastIndex }
}
}
return { lastIndex: regexp.lastIndex }
function isComment(i: number) {
if (text.charCodeAt(i) === 47) {
return text.charCodeAt(i) === 47 || text.charCodeAt(i) === 42
}
return false
}
} | the_stack |
import {
ResourceName, SpecialStatus, TaggedResource, TaggedSequence,
} from "farmbot";
import { combineReducers, ReducersMapObject } from "redux";
import { helpReducer as help } from "../help/reducer";
import { designer as farm_designer } from "../farm_designer/reducer";
import { photosReducer as photos } from "../photos/reducer";
import { farmwareReducer as farmware } from "../farmware/reducer";
import { regimensReducer as regimens } from "../regimens/reducer";
import { sequenceReducer as sequences } from "../sequences/reducer";
import { RestResources, ResourceIndex } from "./interfaces";
import { isTaggedResource } from "./tagged_resources";
import { arrayWrap, arrayUnwrap } from "./util";
import {
sanitizeNodes,
} from "../sequences/locals_list/sanitize_nodes";
import {
selectAllFarmEvents,
selectAllPinBindings,
selectAllLogs,
selectAllRegimens,
selectAllFolders,
selectAllSequences,
} from "./selectors_by_kind";
import { findUuid } from "./selectors";
import { ExecutableType } from "farmbot/dist/resources/api_resources";
import { betterCompact, unpackUUID } from "../util";
import { createSequenceMeta } from "./sequence_meta";
import { alertsReducer as alerts } from "../messages/reducer";
import { warning } from "../toast/toast";
import { ReduxAction } from "../redux/interfaces";
import { ActionHandler } from "../redux/generate_reducer";
import { get } from "lodash";
import { Actions } from "../constants";
import { getFbosConfig } from "./getters";
import { ingest, PARENTLESS as NO_PARENT } from "../folders/data_transfer";
import {
FolderNode, FolderMeta, FolderNodeTerminal, FolderNodeMedial,
} from "../folders/interfaces";
import { climb } from "../folders/climb";
export function findByUuid(index: ResourceIndex, uuid: string): TaggedResource {
const x = index.references[uuid];
if (x && isTaggedResource(x)) {
return x;
} else {
throw new Error("BAD UUID- CANT FIND RESOURCE: " + uuid);
}
}
type IndexerCallback = (self: TaggedResource, index: ResourceIndex) => void;
export interface Indexer {
/** Resources entering index */
up: IndexerCallback;
/** Resources leaving index */
down: IndexerCallback;
}
export const reindexFolders = (i: ResourceIndex) => {
const folders = betterCompact(selectAllFolders(i)
.map((x): FolderNode | undefined => {
const { body } = x;
if (typeof body.id === "number") {
const fn: FolderNode = { id: body.id, ...body };
return fn;
}
}));
const allSequences = selectAllSequences(i);
const oldMeta = i.sequenceFolders.localMetaAttributes;
/** Open folder edit mode when adding a new folder (& not during init all). */
const editing = !!oldMeta[-1];
const localMetaAttributes: Record<number, FolderMeta> = {};
folders.map(x => {
localMetaAttributes[x.id] = {
...(oldMeta[x.id] || { editing }),
sequences: [], // Clobber and re-init
};
});
allSequences.map((s) => {
const { folder_id } = s.body;
const parentId = folder_id || NO_PARENT;
if (!localMetaAttributes[parentId]) {
localMetaAttributes[parentId] = {
sequences: [],
open: true,
editing: false
};
}
localMetaAttributes[parentId].sequences.push(s.uuid);
});
const { searchTerm } = i.sequenceFolders;
/** Perform tree search for search term O(n)
* complexity plz send help. */
if (searchTerm) {
const sequenceHits = new Set<string>();
const folderHits = new Set<number>();
climb(i.sequenceFolders.folders, (node) => {
node.content.map(x => {
const s = i.references[x];
if (s &&
s.kind == "Sequence" &&
s.body.name.toLowerCase().includes(searchTerm.toLowerCase())) {
sequenceHits.add(s.uuid);
folderHits.add(node.id);
}
});
const nodes: (FolderNodeMedial | FolderNodeTerminal)[] =
node.children || [];
nodes.map(_x => { });
});
}
i.sequenceFolders = {
folders: ingest({ folders, localMetaAttributes }),
localMetaAttributes,
searchTerm: searchTerm,
filteredFolders: searchTerm
? i.sequenceFolders.filteredFolders
: undefined
};
};
export const folderIndexer: IndexerCallback = (r, i) => {
if (r.kind === "Folder" || r.kind === "Sequence") {
reindexFolders(i);
}
};
const SEQUENCE_FOLDERS: Indexer = { up: folderIndexer, down: () => { } };
const REFERENCES: Indexer = {
up: (r, i) => i.references[r.uuid] = r,
down: (r, i) => delete i.references[r.uuid],
};
const ALL: Indexer = {
up: (r, s) => s.all[r.uuid] = true,
down: (r, i) => delete i.all[r.uuid],
};
const BY_KIND: Indexer = {
up(r, i) {
i.byKind[r.kind]
? i.byKind[r.kind][r.uuid] = r.uuid
: console.error(`${r.kind} is not an indexed resource.`);
},
down(r, i) { delete i.byKind[r.kind][r.uuid]; },
};
const BY_KIND_AND_ID: Indexer = {
up: (r, i) => {
if (r.body.id) {
i.byKindAndId[joinKindAndId(r.kind, r.body.id)] = r.uuid;
}
},
down(r, i) {
delete i.byKindAndId[joinKindAndId(r.kind, r.body.id)];
delete i.byKindAndId[joinKindAndId(r.kind, 0)];
},
};
export function updateSequenceUsageIndex(
myUuid: string, ids: number[], i: ResourceIndex) {
ids.map(id => {
const uuid = i.byKindAndId[joinKindAndId("Sequence", id)];
if (uuid) { // `undefined` usually means "not ready".
const inUse = i.inUse["Sequence.Sequence"][uuid] || {};
i.inUse["Sequence.Sequence"][uuid] = { ...inUse, ...{ [myUuid]: true } };
}
});
}
export const updateOtherSequenceIndexes =
(tr: TaggedSequence, i: ResourceIndex) => {
i.references[tr.uuid] = tr;
i.sequenceMetas[tr.uuid] = createSequenceMeta(i, tr);
};
const reindexSequences = (i: ResourceIndex) => (s: TaggedSequence) => {
// STEP 1: Sanitize nodes, tag them with unique UUIDs (for React),
// collect up sequence_id's, etc. NOTE: This is CPU expensive,
// so if you need to do tree traversal, do it now.
const { thisSequence, callsTheseSequences } = sanitizeNodes(s.body);
// STEP 2: Add sequence to index.references, update variable reference
// indexes
updateSequenceUsageIndex(s.uuid, callsTheseSequences, i);
// Step 3: Update the in_use stats for Sequence-to-Sequence usage.
updateOtherSequenceIndexes({ ...s, body: thisSequence }, i);
};
const reindexAllSequences = (i: ResourceIndex) => {
i.inUse["Sequence.Sequence"] = {};
const mapper = reindexSequences(i);
betterCompact(Object.keys(i.byKind["Sequence"]).map(uuid => {
const resource = i.references[uuid];
return (resource?.kind == "Sequence") ? resource : undefined;
})).map(mapper);
};
export function reindexAllFarmEventUsage(i: ResourceIndex) {
i.inUse["Regimen.FarmEvent"] = {};
i.inUse["Sequence.FarmEvent"] = {};
const whichOne: Record<ExecutableType, typeof i.inUse["Regimen.FarmEvent"]> = {
"Regimen": i.inUse["Regimen.FarmEvent"],
"Sequence": i.inUse["Sequence.FarmEvent"],
};
// Which FarmEvents use which resource?
betterCompact(selectAllFarmEvents(i)
.map(fe => {
const { executable_type, executable_id } = fe.body;
const uuid = findUuid(i, executable_type, executable_id);
return { exe_type: executable_type, exe_uuid: uuid, fe_uuid: fe.uuid };
}))
.map(({ exe_type, exe_uuid, fe_uuid }) => {
whichOne[exe_type] = whichOne[exe_type] || {};
whichOne[exe_type][exe_uuid] = whichOne[exe_type][exe_uuid] || {};
whichOne[exe_type][exe_uuid][fe_uuid] = true;
});
}
export const INDEXERS: Indexer[] = [
REFERENCES,
ALL,
BY_KIND,
BY_KIND_AND_ID,
SEQUENCE_FOLDERS,
];
type IndexerHook = Partial<Record<TaggedResource["kind"], Reindexer>>;
type Reindexer = (i: ResourceIndex, strategy: "ongoing" | "initial") => void;
export function joinKindAndId(kind: ResourceName, id: number | undefined) {
return `${kind}.${id || 0}`;
}
/** Any reducer that uses TaggedResources (or UUIDs) must live within the
* resource reducer. Failure to do so can result in stale UUIDs, referential
* integrity issues and other bad stuff. The variable below contains all
* resource consuming reducers. */
const consumerReducer = combineReducers<RestResources["consumers"]>({
regimens,
sequences,
farm_designer,
photos,
farmware,
help,
alerts
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as ReducersMapObject<RestResources["consumers"], any>);
/** The resource reducer must have the first say when a resource-related action
* fires off. Afterwards, sub-reducers are allowed to make sense of data
* changes. A common use case for sub-reducers is to listen for
* `DESTROY_RESOURCE_OK` and clean up stale UUIDs. */
export const afterEach = (state: RestResources, a: ReduxAction<unknown>) => {
state.consumers = consumerReducer({
sequences: state.consumers.sequences,
regimens: state.consumers.regimens,
farm_designer: state.consumers.farm_designer,
photos: state.consumers.photos,
farmware: state.consumers.farmware,
help: state.consumers.help,
alerts: state.consumers.alerts
}, a);
return state;
};
/** Helper method to change the `specialStatus` of a resource in the index */
export const mutateSpecialStatus =
(uuid: string, index: ResourceIndex, status = SpecialStatus.SAVED) => {
findByUuid(index, uuid).specialStatus = status;
};
export function initResourceReducer(s: RestResources,
{ payload }: ReduxAction<TaggedResource>): RestResources {
indexUpsert(s.index, [payload], "ongoing");
return s;
}
const BEFORE_HOOKS: IndexerHook = {
Log(_index, strategy) {
// IMPLEMENTATION DETAIL: When the app downloads a *list* of logs, we
// replaces the entire logs collection.
(strategy === "initial") &&
selectAllLogs(_index).map(log => indexRemove(_index, log));
},
};
const AFTER_HOOKS: IndexerHook = {
FbosConfig: (i) => {
const conf = getFbosConfig(i);
if (conf?.body.boot_sequence_id) {
const { boot_sequence_id } = conf.body;
const tracker = i.inUse["Sequence.FbosConfig"];
const uuid = i.byKindAndId[joinKindAndId("Sequence", boot_sequence_id)];
if (uuid) {
tracker[uuid] = tracker[uuid] || {};
tracker[uuid][conf.uuid] = true;
}
} else {
i.inUse["Sequence.FbosConfig"] = {};
}
},
PinBinding: (i) => {
i.inUse["Sequence.PinBinding"] = {};
const tracker = i.inUse["Sequence.PinBinding"];
selectAllPinBindings(i)
.map(pinBinding => {
if (pinBinding.body.binding_type === "standard") {
const { sequence_id } = pinBinding.body;
const uuid = i.byKindAndId[joinKindAndId("Sequence", sequence_id)];
if (uuid) {
tracker[uuid] = tracker[uuid] || {};
tracker[uuid][pinBinding.uuid] = true;
}
}
});
},
FarmEvent: reindexAllFarmEventUsage,
Sequence: reindexAllSequences,
Regimen: (i) => {
i.inUse["Sequence.Regimen"] = {};
const tracker = i.inUse["Sequence.Regimen"];
selectAllRegimens(i)
.map(reg => {
reg.body.regimen_items.map(ri => {
const sequenceUuid = findUuid(i, "Sequence", ri.sequence_id);
tracker[sequenceUuid] = tracker[sequenceUuid] || {};
tracker[sequenceUuid][reg.uuid] = true;
});
});
}
};
const ups = INDEXERS.map(x => x.up);
const downs = INDEXERS.map(x => x.down).reverse();
type UpsertStrategy =
/** Do not throw away pre-existing resources. */
| "ongoing"
/** Replace everything in the index. */
| "initial";
type IndexUpsert = (db: ResourceIndex,
resources: TaggedResource[],
strategy: UpsertStrategy) => void;
export const indexUpsert: IndexUpsert = (db, resources, strategy) => {
if (resources.length == 0) {
return;
}
const { kind } = arrayUnwrap(resources);
// Clean up indexes (if needed)
const before = BEFORE_HOOKS[kind];
before?.(db, strategy);
// Run indexers
ups.map(callback => {
resources.map(resource => callback(resource, db));
});
// Finalize indexing (if needed)
const after = AFTER_HOOKS[kind];
after?.(db, strategy);
};
export function indexRemove(db: ResourceIndex, resource: TaggedResource) {
downs.map(callback => arrayWrap(resource).map(r => callback(r, db)));
// Finalize indexing (if needed)
const after = AFTER_HOOKS[resource.kind];
after?.(db, "ongoing");
}
export const beforeEach = (state: RestResources,
action: ReduxAction<unknown>,
handler: ActionHandler<RestResources, unknown>) => {
const { byKind, references } = state.index;
const w = references[Object.keys(byKind.WebAppConfig)[0]];
const readOnly = w &&
w.kind == "WebAppConfig" &&
w.body.user_interface_read_only_mode;
if (!readOnly) {
return handler(state, action);
}
const fail = (place: string) => {
warning(`(${place}) Can't modify account data when in read-only mode.`);
};
const { kind } = unpackUUID(get(action, "payload.uuid", "x.y.z") as string);
switch (action.type) {
case Actions.EDIT_RESOURCE:
if (kind === "WebAppConfig") {
// User is trying to exit read-only mode.
return handler(state, action);
} else {
fail("1");
return state;
}
case Actions.SAVE_RESOURCE_START:
case Actions.DESTROY_RESOURCE_START:
if (kind !== "WebAppConfig") {
// User is trying to make HTTP requests.
fail("3");
}
// User is trying to exit read-only mode.
return handler(state, action);
case Actions.BATCH_INIT:
case Actions.INIT_RESOURCE:
case Actions.OVERWRITE_RESOURCE:
fail("2");
return state;
default:
return handler(state, action);
}
}; | the_stack |
/// <reference lib="esnext.asynciterable" />
import { OperationOptions } from "@azure/core-client";
import { SpanStatusCode } from "@azure/core-tracing";
import "@azure/core-paging";
import { PageSettings, PagedAsyncIterableIterator } from "@azure/core-paging";
import { GeneratedClient, RepositoryWriteableProperties } from "./generated";
import { createSpan } from "./tracing";
import {
ManifestOrderBy,
ContainerRepositoryProperties,
ArtifactManifestProperties,
ManifestPageResponse,
} from "./models";
import { RegistryArtifact, RegistryArtifactImpl } from "./registryArtifact";
import { toArtifactManifestProperties, toServiceManifestOrderBy } from "./transformations";
import { extractNextLink } from "./utils/helpers";
/**
* Options for delete repository operation.
*/
export interface DeleteRepositoryOptions extends OperationOptions {}
/**
* Options for the `listRegistryArtifacts` method of `ContainerRepository`.
*/
export interface ListManifestPropertiesOptions extends OperationOptions {
/** orderby query parameter */
orderBy?: ManifestOrderBy;
}
/**
* Options for the `getProperties` method of `ContainerRepository`.
*/
export interface GetRepositoryPropertiesOptions extends OperationOptions {}
/**
* Options for the `setProperties` method of `ContainerRepository`.
*/
export interface UpdateRepositoryPropertiesOptions extends OperationOptions {
/** Whether or not this repository can be deleted */
canDelete?: boolean;
/** Whether or not this repository can be written to */
canWrite?: boolean;
/** Whether or not include this repository when listing repositories */
canList?: boolean;
/** Whether or not this repository can be read */
canRead?: boolean;
}
/**
* A `repository` in a container registry is a logical grouping of images or artifacts that share the same name. For example,
* different versions of a `hello-world` application could have tags `v1` and `v2`, and be grouped by the repository `hello-world`.
*
* The {@link ContainerRepository} interface is a helper that groups information and operations about a repository in this
* container registry.
*/
export interface ContainerRepository {
/**
* The Azure Container Registry endpoint.
*/
readonly registryEndpoint: string;
/**
* Repository name.
*/
readonly name: string;
/**
* Deletes this repository and all artifacts that are part of its logical group.
*
* @param options - optional configuration for the operation
*/
delete(options?: DeleteRepositoryOptions): Promise<void>;
/**
* Returns an helper instance of {@link RegistryArtifact} for the given tag or digest.
* @param tagOrDigest - the tag or digest of the artifact
*/
getArtifact(tagOrDigest: string): RegistryArtifact;
/**
* Retrieves the properties of this repository.
* @param options -
*/
getProperties(options?: GetRepositoryPropertiesOptions): Promise<ContainerRepositoryProperties>;
/**
* Updates the properties of this repository.
*
* Example usage:
*
* ```javascript
* const client = new ContainerRegistryClient(url, credential);
* const repository = client.getRepository(repositoryName)
* const updated = await repository.updateProperties({
* canDelete: false,
* canList: false,
* canRead: false,
* canWrite: false
* });
* ```
* @param options -
*/
updateProperties(
options: UpdateRepositoryPropertiesOptions
): Promise<ContainerRepositoryProperties>;
/**
* Returns an async iterable iterator to list manifest properties.
* This is useful for determining the collection of artifacts associated with
* this repository, as each artifact is uniquely identified by its manifest.
*
* Example using `for-await-of` syntax:
*
* ```javascript
* const client = new ContainerRegistryClient(url, credential);
* const repository = client.getRepository(repositoryName)
* for await (const manifest of repository.listManifestProperties()) {
* console.log("manifest: ", manifest);
* }
* ```
*
* Example using `iter.next()`:
*
* ```javascript
* const iter = repository.listManifestProperties();
* let item = await iter.next();
* while (!item.done) {
* console.log("manifest properties: ", item.value);
* item = await iter.next();
* }
* ```
*
* Example using `byPage()`:
*
* ```javascript
* const pages = repository.listManifestProperties().byPage({ maxPageSize: 2 });
* let page = await pages.next();
* let i = 1;
* while (!page.done) {
* if (page.value) {
* console.log(`-- page ${i++}`);
* for (const manifestProperties of page.value) {
* console.log(` manifest properties: ${manifestProperties}`);
* }
* }
* page = await pages.next();
* }
* ```
* @param options -
*/
listManifestProperties(
options?: ListManifestPropertiesOptions
): PagedAsyncIterableIterator<ArtifactManifestProperties>;
}
/**
* The client class used to interact with the Container Registry service.
* @internal
*/
export class ContainerRepositoryImpl {
private readonly client: GeneratedClient;
/**
* The Azure Container Registry endpoint.
*/
public readonly registryEndpoint: string;
/**
* Repository name.
*/
public readonly name: string;
/**
* Creates an instance of a ContainerRepository.
* @param registryEndpoint - the URL to the Container Registry endpoint
* @param name - the name of the repository
* @param client - the generated client that interacts with service
*/
constructor(registryEndpoint: string, name: string, client: GeneratedClient) {
this.registryEndpoint = registryEndpoint;
this.name = name;
this.client = client;
}
/**
* Deletes this repository and all artifacts that are part of its logical group.
*
* @param options - optional configuration for the operation
*/
public async delete(options: DeleteRepositoryOptions = {}): Promise<void> {
const { span, updatedOptions } = createSpan("ContainerRepository-delete", options);
try {
await this.client.containerRegistry.deleteRepository(this.name, updatedOptions);
} catch (e) {
span.setStatus({ code: SpanStatusCode.ERROR, message: e.message });
throw e;
} finally {
span.end();
}
}
/**
* Returns an helper instance of {@link RegistryArtifact} for the given tag or digest.
* @param tagOrDigest - the tag or digest of the artifact
*/
public getArtifact(tagOrDigest: string): RegistryArtifact {
if (!tagOrDigest) {
throw new Error("invalid tagOrDigest");
}
return new RegistryArtifactImpl(this.registryEndpoint, this.name, tagOrDigest, this.client);
}
/**
* Retrieves the properties of this repository.
* @param options -
*/
public async getProperties(
options: GetRepositoryPropertiesOptions = {}
): Promise<ContainerRepositoryProperties> {
const { span, updatedOptions } = createSpan("ContainerRepository-getProperties", options);
try {
return await this.client.containerRegistry.getProperties(this.name, updatedOptions);
} catch (e) {
span.setStatus({ code: SpanStatusCode.ERROR, message: e.message });
throw e;
} finally {
span.end();
}
}
/**
* Updates the properties of this repository.
*
* Example usage:
*
* ```javascript
* const client = new ContainerRegistryClient(url, credential);
* const repository = client.getRepository(repositoryName)
* const updated = await repository.updateProperties({
* canDelete: false,
* canList: false,
* canRead: false,
* canWrite: false
* });
* ```
* @param options -
*/
public async updateProperties(
options: UpdateRepositoryPropertiesOptions
): Promise<ContainerRepositoryProperties> {
const value: RepositoryWriteableProperties = {
canDelete: options.canDelete,
canWrite: options.canWrite,
canList: options.canList,
canRead: options.canRead,
};
const { span, updatedOptions } = createSpan("ContainerRepository-updateProperties", {
...options,
value,
});
try {
return await this.client.containerRegistry.updateProperties(this.name, updatedOptions);
} catch (e) {
span.setStatus({ code: SpanStatusCode.ERROR, message: e.message });
throw e;
} finally {
span.end();
}
}
/**
* Returns an async iterable iterator to list manifest properties.
* This is useful for determining the collection of artifacts associated with
* this repository, as each artifact is uniquely identified by its manifest.
*
* Example using `for-await-of` syntax:
*
* ```javascript
* const client = new ContainerRegistryClient(url, credential);
* const repository = client.getRepository(repositoryName)
* for await (const manifest of repository.listManifestProperties()) {
* console.log("manifest: ", manifest);
* }
* ```
*
* Example using `iter.next()`:
*
* ```javascript
* const iter = repository.listManifestProperties();
* let item = await iter.next();
* while (!item.done) {
* console.log("manifest properties: ", item.value);
* item = await iter.next();
* }
* ```
*
* Example using `byPage()`:
*
* ```javascript
* const pages = repository.listManifestProperties().byPage({ maxPageSize: 2 });
* let page = await pages.next();
* let i = 1;
* while (!page.done) {
* if (page.value) {
* console.log(`-- page ${i++}`);
* for (const manifestProperties of page.value) {
* console.log(` manifest properties: ${manifestProperties}`);
* }
* }
* page = await pages.next();
* }
* ```
* @param options -
*/
public listManifestProperties(
options: ListManifestPropertiesOptions = {}
): PagedAsyncIterableIterator<ArtifactManifestProperties, ManifestPageResponse> {
const iter = this.listManifestsItems(options);
return {
next() {
return iter.next();
},
[Symbol.asyncIterator]() {
return this;
},
byPage: (settings: PageSettings = {}) => this.listManifestsPage(settings, options),
};
}
private async *listManifestsItems(
options: ListManifestPropertiesOptions = {}
): AsyncIterableIterator<ArtifactManifestProperties> {
for await (const page of this.listManifestsPage({}, options)) {
yield* page;
}
}
private async *listManifestsPage(
continuationState: PageSettings,
options: ListManifestPropertiesOptions = {}
): AsyncIterableIterator<ManifestPageResponse> {
const orderby = toServiceManifestOrderBy(options.orderBy);
if (!continuationState.continuationToken) {
const optionsComplete = {
...options,
n: continuationState.maxPageSize,
orderby,
};
const currentPage = await this.client.containerRegistry.getManifests(
this.name,
optionsComplete
);
continuationState.continuationToken = extractNextLink(currentPage.link);
if (currentPage.manifests) {
const array = currentPage.manifests.map((t) =>
toArtifactManifestProperties(t, this.name, currentPage.registryLoginServer!)
);
yield Object.defineProperty(array, "continuationToken", {
value: continuationState.continuationToken,
enumerable: true,
});
}
}
while (continuationState.continuationToken) {
const currentPage = await this.client.containerRegistry.getManifestsNext(
this.name,
continuationState.continuationToken,
options
);
continuationState.continuationToken = extractNextLink(currentPage.link);
if (currentPage.manifests) {
const array = currentPage.manifests.map((t) =>
toArtifactManifestProperties(t, this.name, currentPage.registryLoginServer!)
);
yield Object.defineProperty(array, "continuationToken", {
value: continuationState.continuationToken,
enumerable: true,
});
}
}
}
} | the_stack |
export interface paths {
"/videos/upload": {
/** アップロードしたビデオファイルを追加する */
post: {
parameters: {};
responses: {
/** アップロードしたビデオファイルを追加しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"multipart/form-data": components["schemas"]["UploadVideoFileOption"];
};
};
};
parameters: {};
};
"/version": {
/** バージョン情報を取得する */
get: {
parameters: {};
responses: {
/** バージョン情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Version"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/thumbnails/cleanup": {
/** サムネイルをクリーンアップする */
post: {
parameters: {};
responses: {
/** サムネイルをクリーンアップしました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/thumbnails": {
/** サムネイルの追加で再生成を開始する */
post: {
parameters: {};
responses: {
/** サムネイルの再生成を開始しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/tags": {
/** タグ情報を取得する */
get: {
parameters: {
query: {
/** offset */
offset?: components["parameters"]["Offset"];
/** limit */
limit?: components["parameters"]["Limit"];
/** name */
name?: components["parameters"]["QueryName"];
/** 除外する RecordedTagId */
excludeTagId?: components["parameters"]["QueryExcludeRecordedTagId"];
};
};
responses: {
/** タグ情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["RecordedTags"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** タグを追加する */
post: {
parameters: {};
responses: {
/** タグの追加に成功した */
201: {
content: {
"application/json": components["schemas"]["AddedRecordedTag"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AddRecordedTagOption"];
};
};
};
parameters: {};
};
"/streams": {
/** ストリーム情報を取得する */
get: {
parameters: {
query: {
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
};
};
responses: {
/** ストリーム情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["StreamInfo"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** 全てのストリームを停止する */
delete: {
parameters: {};
responses: {
/** 全てのストリームを停止しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/storages": {
/** ストレージ情報を取得する */
get: {
parameters: {};
responses: {
/** ストレージ情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["StorageInfo"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/schedules/search": {
/** 番組検索結果を取得する */
post: {
parameters: {};
responses: {
/** 番組検索結果を取得しました */
200: {
content: {
"application/json": components["schemas"]["ScheduleProgramItem"][];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["ScheduleSearchOption"];
};
};
};
parameters: {};
};
"/schedules/broadcasting": {
/** 放映中の番組情報を取得する */
get: {
parameters: {
query: {
/** 追加時間 (ms) */
time?: components["parameters"]["AddtionTime"];
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
};
};
responses: {
/** 放映中の番組情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Schedules"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/schedules": {
/** 番組表情報を取得する */
get: {
parameters: {
query: {
/** 開始時刻 */
startAt: components["parameters"]["StartAt"];
/** 終了時刻 */
endAt: components["parameters"]["EndAt"];
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
/** rawExtended が必要か */
needsRawExtended?: components["parameters"]["NeedsRawExtended"];
/** 無料放送のみ取得するか (true: 無料放送, false: 有料放送, 無指定: 全て) */
isFree?: components["parameters"]["IsFreeProgram"];
/** GR */
GR: components["parameters"]["requiredGR"];
/** BS */
BS: components["parameters"]["requiredBS"];
/** CS */
CS: components["parameters"]["requiredCS"];
/** SKY */
SKY: components["parameters"]["requiredSKY"];
};
};
responses: {
/** 番組表情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Schedules"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/rules/keyword": {
/** ルールをキーワード検索する */
get: {
parameters: {
query: {
/** offset */
offset?: components["parameters"]["Offset"];
/** limit */
limit?: components["parameters"]["Limit"];
/** キーワード */
keyword?: components["parameters"]["QueryKeyword"];
};
};
responses: {
/** ルールをキーワード検索結果を取得しました */
200: {
content: {
"application/json": components["schemas"]["RuleKeywordInfo"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** ルールを追加する */
post: {
parameters: {};
responses: {
/** ルールの追加に成功した */
201: {
content: {
"application/json": components["schemas"]["AddedRule"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AddRuleOption"];
};
};
};
parameters: {};
};
"/rules": {
/** ルール情報を取得する */
get: {
parameters: {
query: {
/** offset */
offset?: components["parameters"]["Offset"];
/** limit */
limit?: components["parameters"]["Limit"];
/** 予約情報取得タイプ */
type?: components["parameters"]["GetReserveType"];
/** キーワード */
keyword?: components["parameters"]["QueryKeyword"];
};
};
responses: {
/** ルール情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Rules"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** ルールを追加する */
post: {
parameters: {};
responses: {
/** ルールの追加に成功した */
201: {
content: {
"application/json": components["schemas"]["AddedRule"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AddRuleOption"];
};
};
};
parameters: {};
};
"/reserves/update": {
/** 予約情報の更新を開始する */
post: {
parameters: {};
responses: {
/** 予約情報の更新を開始しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/reserves/lists": {
/** 予約リスト情報を取得する */
get: {
parameters: {
query: {
/** 開始時刻 */
startAt: components["parameters"]["StartAt"];
/** 終了時刻 */
endAt: components["parameters"]["EndAt"];
};
};
responses: {
/** 予約リスト情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["ReserveLists"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/reserves/cnts": {
/** 予約数を取得する */
get: {
parameters: {};
responses: {
/** 予約数を取得しました */
200: {
content: {
"application/json": components["schemas"]["ReserveCnts"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/reserves": {
/** 予約情報を取得する */
get: {
parameters: {
query: {
/** offset */
offset?: components["parameters"]["Offset"];
/** limit */
limit?: components["parameters"]["Limit"];
/** 予約情報取得タイプ */
type?: components["parameters"]["GetReserveType"];
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
/** ルールid */
ruleId?: components["parameters"]["QueryRuleId"];
};
};
responses: {
/** 予約情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Reserves"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** 予約を追加する */
post: {
parameters: {};
responses: {
/** 予約の追加に成功した */
201: {
content: {
"application/json": components["schemas"]["AddedReserve"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["ManualReserveOption"];
};
};
};
parameters: {};
};
"/recording/resettimer": {
/** 予約タイマーを再設定する */
post: {
parameters: {};
responses: {
/** 予約タイマーを再設定しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/recording": {
/** 録画中情報を取得する */
get: {
parameters: {
query: {
/** offset */
offset?: components["parameters"]["Offset"];
/** limit */
limit?: components["parameters"]["Limit"];
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
};
};
responses: {
/** 録画中情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Records"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/recorded/options": {
/** 録画検索オプションを取得する */
get: {
parameters: {};
responses: {
/** 録画検索オプションを取得しました */
200: {
content: {
"application/json": components["schemas"]["RecordedSearchOptions"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/recorded/cleanup": {
/** 録画をクリーンアップする */
post: {
parameters: {};
responses: {
/** 録画をクリーンアップしました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/recorded": {
/** 録画情報を取得する */
get: {
parameters: {
query: {
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
/** offset */
offset?: components["parameters"]["Offset"];
/** limit */
limit?: components["parameters"]["Limit"];
/** 逆順で取得するか */
isReverse?: components["parameters"]["IsReverse"];
/** ルールid */
ruleId?: components["parameters"]["QueryRuleId"];
/** 放送局 id */
channelId?: components["parameters"]["QueryChannelId"];
/** ジャンル */
genre?: components["parameters"]["QueryProgramGenre"];
/** キーワード */
keyword?: components["parameters"]["QueryKeyword"];
/** オリジナルファイルを含むか */
hasOriginalFile?: components["parameters"]["QueryHasOriginalFile"];
};
};
responses: {
/** 録画情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Records"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** 録画番組情報を新規作成する */
post: {
parameters: {};
responses: {
/** 録画番組情報の新規作成に成功した */
201: {
content: {
"application/json": components["schemas"]["CreatedNewRecorded"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["CreateNewRecordedOption"];
};
};
};
parameters: {};
};
"/iptv/epg.xml": {
/** IPTV epg を取得する */
get: {
parameters: {
query: {
/** 半角文字で取得するか */
isHalfWidth?: components["parameters"]["IPTVIsHalfWidth"];
/** 取得日数 */
days?: components["parameters"]["IPTVDays"];
};
};
responses: {
/** epg を取得しました */
200: {
content: {
"application/xml": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/iptv/channel.m3u8": {
/** IPTV channel list を取得する */
get: {
parameters: {
query: {
/** 半角文字で取得するか */
isHalfWidth?: components["parameters"]["IPTVIsHalfWidth"];
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** channel list を取得しました */
200: {
content: {
"application/x-mpegURL": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/encode": {
/** エンコード情報を取得する */
get: {
parameters: {
query: {
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
};
};
responses: {
/** エンコード情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["EncodeInfo"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** エンコードを追加する */
post: {
parameters: {};
responses: {
/** エンコードの追加に成功した */
201: {
content: {
"application/json": components["schemas"]["AddedEncode"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AddManualEncodeProgramOption"];
};
};
};
parameters: {};
};
"/config": {
/** config 情報を取得する */
get: {
parameters: {};
responses: {
/** config 情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Config"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/channels": {
/** 放送局情報を取得する */
get: {
parameters: {};
responses: {
/** 放送局情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["ChannelItems"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/videos/{videoFileId}/playlist": {
/** ビデオプレイリストを取得する */
get: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
};
responses: {
/** ビデオプレイリストを取得しました */
200: {
content: {
"application/x-mpegURL": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/videos/{videoFileId}/kodi": {
/** ビデオリンクを kodi へ送信する */
post: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
};
responses: {
/** ビデオリンクを kodi へ送信するしました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["SendVideoLinkToKodiOption"];
};
};
};
parameters: {};
};
"/videos/{videoFileId}/duration": {
/** 動画の長さを取得する */
get: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
};
responses: {
/** 動画の長さを取得しました */
200: {
content: {
"application/json": components["schemas"]["VideoFileDuration"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/videos/{videoFileId}": {
/** ビデオファイルを取得する */
get: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
query: {
/** ファイルをダウンロードするか */
isDownload?: components["parameters"]["IsDownload"];
};
};
responses: {
/** ビデオファイルを取得しました */
200: {
content: {
"video/mp2t": unknown;
"video/mp4": unknown;
"video/x-matroska": unknown;
"video/webm": unknown;
"application/octet-stream": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** ビデオファイルを削除する */
delete: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
};
responses: {
/** ビデオファイルを削除しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/thumbnails/videos/{videoFileId}": {
/** サムネイルの生成を開始させる */
post: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
};
responses: {
/** 追加サムネイルの生成を開始しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/thumbnails/{thumbnailId}": {
/** サムネイルを取得する */
get: {
parameters: {
path: {
/** thumbnail id */
thumbnailId: components["parameters"]["PathThumbnailId"];
};
};
responses: {
/** サムネイルを取得しました */
200: {
content: {
"image/jpeg": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** サムネイルを削除する */
delete: {
parameters: {
path: {
/** thumbnail id */
thumbnailId: components["parameters"]["PathThumbnailId"];
};
};
responses: {
/** サムネイルを削除しました */
200: {
content: {
"image/jpeg": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/tags/{tagId}/relate": {
/** 録画番組とタグを関連付けする */
put: {
parameters: {
path: {
/** recorded tag id */
tagId: components["parameters"]["PathRecordedTagId"];
};
};
responses: {
/** 録画番組とタグの関連付けに成功した */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["RelateRecordedTagOption"];
};
};
};
/** 録画番組とタグの関連付けを削除する */
delete: {
parameters: {
path: {
/** recorded tag id */
tagId: components["parameters"]["PathRecordedTagId"];
};
query: {
/** recorded id */
recordedId?: components["parameters"]["QueryRecordedId"];
};
};
responses: {
/** 録画番組とタグの関連付けを削除しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/tags/{tagId}": {
/** タグ名を変更する */
put: {
parameters: {
path: {
/** recorded tag id */
tagId: components["parameters"]["PathRecordedTagId"];
};
};
responses: {
/** タグの更新に成功した */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AddRecordedTagOption"];
};
};
};
/** タグを削除する */
delete: {
parameters: {
path: {
/** recorded tag id */
tagId: components["parameters"]["PathRecordedTagId"];
};
};
responses: {
/** タグを削除しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/recorded/{videoFileId}/webm": {
/** 録画 WebM ストリームを取得する */
get: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
query: {
/** 再生位置 */
ss: components["parameters"]["StreamPlayPosition"];
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** 録画 WebM ストリーム */
200: {
content: {
"video/webm": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/recorded/{videoFileId}/mp4": {
/** 録画 mp4 ストリームを取得する */
get: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
query: {
/** 再生位置 */
ss: components["parameters"]["StreamPlayPosition"];
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** 録画 mp4 ストリーム */
200: {
content: {
"video/mp4": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/recorded/{videoFileId}/hls": {
/** 録画 HLS ストリームを開始する */
get: {
parameters: {
path: {
/** video file id */
videoFileId: components["parameters"]["PathVideoFileId"];
};
query: {
/** 再生位置 */
ss: components["parameters"]["StreamPlayPosition"];
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** 録画 HLS ストリームを開始しました */
200: {
content: {
"application/json": components["schemas"]["StartStreamInfo"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/live/{channelId}/webm": {
/** ライブ WebM ストリームを取得する */
get: {
parameters: {
path: {
/** 放送局 id */
channelId: components["parameters"]["PathChannelId"];
};
query: {
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** ライブ WebM ストリーム */
200: {
content: {
"video/webm": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/live/{channelId}/mp4": {
/** ライブ mp4 ストリームを取得する */
get: {
parameters: {
path: {
/** 放送局 id */
channelId: components["parameters"]["PathChannelId"];
};
query: {
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** ライブ mp4 ストリーム */
200: {
content: {
"video/mp4": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/live/{channelId}/m2tsll": {
/** ライブ M2TS Low Latency ストリームを取得する */
get: {
parameters: {
path: {
/** 放送局 id */
channelId: components["parameters"]["PathChannelId"];
};
query: {
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** ライブ M2TS Low Latency ストリーム */
200: {
content: {
"video/mp2t": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/live/{channelId}/m2ts/playlist": {
/** ライブ M2TS ストリームプレイリストを取得する */
get: {
parameters: {
path: {
/** 放送局 id */
channelId: components["parameters"]["PathChannelId"];
};
query: {
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** ライブ M2TS ストリームプレイリストを取得しました */
200: {
content: {
"application/x-mpegURL": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/live/{channelId}/m2ts": {
/** ライブ M2TS ストリームを取得する */
get: {
parameters: {
path: {
/** 放送局 id */
channelId: components["parameters"]["PathChannelId"];
};
query: {
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** ライブ M2TS ストリーム */
200: {
content: {
"video/mp2t": unknown;
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/live/{channelId}/hls": {
/** ライブ HLS ストリームを開始する */
get: {
parameters: {
path: {
/** 放送局 id */
channelId: components["parameters"]["PathChannelId"];
};
query: {
/** ストリーミング設定 */
mode: components["parameters"]["StreamMode"];
};
};
responses: {
/** ライブ HLS ストリームを開始しました */
200: {
content: {
"application/json": components["schemas"]["StartStreamInfo"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/{streamId}/keep": {
/** ストリーム停止タイマーを更新する */
put: {
parameters: {
path: {
/** ストリーム id */
streamId: components["parameters"]["PathStreamId"];
};
};
responses: {
/** ストリーム停止タイマーを更新しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/streams/{streamId}": {
/** ストリームを停止する */
delete: {
parameters: {
path: {
/** ストリーム id */
streamId: components["parameters"]["PathStreamId"];
};
};
responses: {
/** ストリームを停止しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/schedules/detail/{programId}": {
/** 指定された番組表情報を取得する */
get: {
parameters: {
path: {
/** program id */
programId: components["parameters"]["PathProgramId"];
};
query: {
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
};
};
responses: {
/** 指定された番組表情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["ScheduleProgramItem"];
};
};
/** Not Found */
404: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/schedules/{channelId}": {
/** 指定された放送局の番組表情報を取得する */
get: {
parameters: {
path: {
/** 放送局 id */
channelId: components["parameters"]["PathChannelId"];
};
query: {
/** 開始時刻 */
startAt: components["parameters"]["StartAt"];
/** 取得日数 */
days: components["parameters"]["Days"];
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
/** rawExtended が必要か */
needsRawExtended?: components["parameters"]["NeedsRawExtended"];
/** 無料放送のみ取得するか (true: 無料放送, false: 有料放送, 無指定: 全て) */
isFree?: components["parameters"]["IsFreeProgram"];
};
};
responses: {
/** 指定された放送局の番組表情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["Schedules"];
};
};
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/rules/{ruleId}/enable": {
/** ルールを有効化する */
put: {
parameters: {
path: {
/** ルールid */
ruleId: components["parameters"]["PathRuleId"];
};
};
responses: {
/** ルールを有効化しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/rules/{ruleId}/disable": {
/** ルールを無効化する */
put: {
parameters: {
path: {
/** ルールid */
ruleId: components["parameters"]["PathRuleId"];
};
};
responses: {
/** ルールを無効化しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/rules/{ruleId}": {
/** ルールを取得する */
get: {
parameters: {
path: {
/** ルールid */
ruleId: components["parameters"]["PathRuleId"];
};
};
responses: {
/** ルールを削除しました */
200: {
content: {
"application/json": components["schemas"]["Rule"];
};
};
/** 指定された id の rule がない */
404: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** ルールを更新する */
put: {
parameters: {
path: {
/** ルールid */
ruleId: components["parameters"]["PathRuleId"];
};
};
responses: {
/** ルールの更新に成功した */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AddRuleOption"];
};
};
};
/** ルールを削除する */
delete: {
parameters: {
path: {
/** ルールid */
ruleId: components["parameters"]["PathRuleId"];
};
};
responses: {
/** ルールを削除しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/reserves/{reserveId}/skip": {
/** 予約の除外状態を解除する */
delete: {
parameters: {
path: {
/** 予約id */
reserveId: components["parameters"]["PathReserveId"];
};
};
responses: {
/** 予約の除外状態を解除しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/reserves/{reserveId}/overlap": {
/** 予約の重複状態を解除する */
delete: {
parameters: {
path: {
/** 予約id */
reserveId: components["parameters"]["PathReserveId"];
};
};
responses: {
/** 予約の重複状態を解除しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/reserves/{reserveId}": {
/** 指定された予約情報を取得する */
get: {
parameters: {
path: {
/** 予約id */
reserveId: components["parameters"]["PathReserveId"];
};
query: {
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
};
};
responses: {
/** 指定された予約情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["ReserveItem"];
};
};
/** Not Found */
404: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** 手動予約を更新する */
put: {
parameters: {
path: {
/** 予約id */
reserveId: components["parameters"]["PathReserveId"];
};
};
responses: {
/** 手動予約の更新に成功した */
201: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
requestBody: {
content: {
"application/json": components["schemas"]["EditManualReserveOption"];
};
};
};
/** 予約を削除する */
delete: {
parameters: {
path: {
/** 予約id */
reserveId: components["parameters"]["PathReserveId"];
};
};
responses: {
/** 予約を削除しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/recorded/{recordedId}/unprotect": {
/** 録画を自動削除対象に戻す */
put: {
parameters: {
path: {
/** recorded id */
recordedId: components["parameters"]["PathRecordedId"];
};
};
responses: {
/** 録画を自動削除対象に戻しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/recorded/{recordedId}/protect": {
/** 録画を自動削除対象から除外する */
put: {
parameters: {
path: {
/** recorded id */
recordedId: components["parameters"]["PathRecordedId"];
};
};
responses: {
/** 録画を自動削除対象から除外しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/recorded/{recordedId}/encode": {
/** エンコードを停止する */
delete: {
parameters: {
path: {
/** recorded id */
recordedId: components["parameters"]["PathRecordedId"];
};
};
responses: {
/** エンコードを停止しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/recorded/{recordedId}": {
/** 録画詳細情報を取得する */
get: {
parameters: {
path: {
/** recorded id */
recordedId: components["parameters"]["PathRecordedId"];
};
query: {
/** 半角文字で取得するか */
isHalfWidth: components["parameters"]["IsHalfWidth"];
};
};
responses: {
/** 録画詳細情報を取得しました */
200: {
content: {
"application/json": components["schemas"]["RecordedItem"];
};
};
/** 指定された id の 録画詳細情報がない */
404: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
/** 録画を削除する */
delete: {
parameters: {
path: {
/** recorded id */
recordedId: components["parameters"]["PathRecordedId"];
};
};
responses: {
/** 録画を削除しました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/encode/{encodeId}": {
/** エンコードをキャンセルする */
delete: {
parameters: {
path: {
/** エンコード id */
encodeId: components["parameters"]["PathEncodeId"];
};
};
responses: {
/** エンコードをキャンセルしました */
200: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/dropLogs/{dropLogFileId}": {
/** ドロップログを取得する */
get: {
parameters: {
path: {
/** drop log file id */
dropLogFileId: components["parameters"]["PathDropLogFileId"];
};
query: {
/** ファイル最大サイズ (kByte) */
maxsize?: components["parameters"]["LogFileMaxSize"];
};
};
responses: {
/** ドロップログを取得しました */
200: {
content: {
"text/plain": unknown;
};
};
/** Not Found */
404: unknown;
/** ファイルサイズが大きすぎる */
416: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
"/channels/{channelId}/logo": {
/** 放送局のロゴを取得する */
get: {
parameters: {
path: {
/** 放送局 id */
channelId: components["parameters"]["PathChannelId"];
};
};
responses: {
/** 放送局のロゴを取得しました */
200: {
content: {
"image/png": unknown;
};
};
/** Not Found */
404: unknown;
/** 予期しないエラー */
default: {
content: {
"application/json": components["schemas"]["Error"];
};
};
};
};
parameters: {};
};
}
export interface components {
schemas: {
/** @description 時刻 (ms) */
UnixtimeMS: number;
/** @description 放送局 id */
ChannelId: number;
/** @description service id */
ServiceId: number;
/** @description network id */
NetworkId: number;
/** @description program id */
ProgramId: number;
/** @description ルール id */
RuleId: number;
/** @description 予約 id */
ReserveId: number;
/** @description 録画済み番組 id */
RecordedId: number;
/** @description 録画済み番組履歴管理 id */
RecordedHistoryId: number;
/** @description ビデオファイル id */
VideoFileId: number;
/**
* @description ビデオファイル形式
* @enum {string}
*/
VideoFileType: "ts" | "encoded";
/** @description サムネイル id */
ThumbnailId: number;
/** @description ドロップログファイル id */
DropLogFileId: number;
/** @description 録画 tag id */
RecordedTagId: number;
/** @description エンコード id */
EncodeId: number;
/**
* @description 放送波タイプ
* @enum {string}
*/
ChannelType: "GR" | "BS" | "CS" | "SKY";
/** @description ジャンル */
ProgramGenreLv1: number;
/** @description サブジャンル */
ProgramGenreLv2: number;
/**
* @description 番組ビデオコーデック
* @enum {string}
*/
ProgramVideoType: "mpeg2" | "h.264" | "h.265";
/**
* @description 番組ビデオ解像度
* @enum {string}
*/
ProgramVideoResolution:
| "240p"
| "480i"
| "480p"
| "720p"
| "1080i"
| "1080p"
| "2160p"
| "4320p";
/**
* @description 番組オーディオサンプリングレート
* @enum {integer}
*/
ProgramAudioSamplingRate: 16000 | 22050 | 24000 | 32000 | 44100 | 48000;
/** @description ストリーム id */
StreamId: number;
/**
* @description ストリームの種類
* @enum {string}
*/
StreamType: "LiveStream" | "LiveHLS" | "RecordedStream" | "RecordedHLS";
/** @description URL Scheme */
URLSchemeInfo: {
ios?: string;
android?: string;
mac?: string;
win?: string;
};
/** @description M2TS形式ストリーミングパラメータ */
M2TSStreamParam: {
/** @description 表示名 */
name: string;
/** @description 無変換か */
isUnconverted: boolean;
};
/** @description コンフィグ */
Config: {
/** @description socket.io 通信で使用するポート */
socketIOPort: number;
/** @description 有効な放送波情報 */
broadcast: {
GR: boolean;
BS: boolean;
CS: boolean;
SKY: boolean;
};
/** @description 指定可能な録画ディレクトリ名 */
recorded: string[];
/** @description エンコードモード */
encode: string[];
/** @description URL Scheme 情報 */
urlscheme: {
m2ts: components["schemas"]["URLSchemeInfo"];
video: components["schemas"]["URLSchemeInfo"];
download: components["schemas"]["URLSchemeInfo"];
};
/** @description ライブ視聴が有効か */
isEnableLiveStream: boolean;
/** @description 録画済みの TS ファイルのストリーミングが有効か */
isEnableTSRecordedStream: boolean;
/** @description 録画済みのエンコード済みファイルのストリーミングが有効か */
isEnableEncodedRecordedStream: boolean;
/** @description ストリーミング設定 */
streamConfig?: {
/** @description ライブストリーミング設定 */
live?: {
/** @description ライブ M2TS ストリーミング設定 */
m2ts?: components["schemas"]["M2TSStreamParam"][];
/** @description ライブ M2TS Low Latency (mpegts.js 用) ストリーミング設定 */
m2tsll?: string[];
/** @description ライブ WebM ストリーミング設定 */
webm?: string[];
/** @description ライブ MP4 ストリーミング設定 */
mp4?: string[];
/** @description ライブ HLS ストリーミング設定 */
hls?: string[];
};
/** @description 録画済みストリーミング設定 */
recorded?: {
/** @description 録画済み TS ファイルのストリーミング設定 */
ts?: {
/** @description 録画済み TS ファイルの WebM ストリーミング設定 */
webm?: string[];
/** @description 録画済み TS ファイルの MP4 ストリーミング設定 */
mp4?: string[];
/** @description 録画済み TS ファイルの HLS ストリーミング設定 */
hls?: string[];
};
/** @description 録画エンコード済みファイルのストリーミング設定 */
encoded?: {
/** @description 録画エンコード済みファイルの WebM ストリーミング設定 */
webm?: string[];
/** @description 録画エンコード済みファイルの MP4 ストリーミング設定 */
mp4?: string[];
/** @description 録画エンコード済みファイルの HLS ストリーミング設定 */
hls?: string[];
};
};
};
/** @description kodi hosts */
kodiHosts?: string[];
};
ChannelItems: components["schemas"]["ChannelItem"][];
/** @description チャンネル情報 */
ChannelItem: {
id: components["schemas"]["ChannelId"];
serviceId: components["schemas"]["ServiceId"];
networkId: components["schemas"]["NetworkId"];
/** @description 放送局名 */
name: string;
/** @description 放送局名(半角) */
halfWidthName: string;
/** @description ロゴデータを持っているか */
hasLogoData: boolean;
channelType: components["schemas"]["ChannelType"];
channel: string;
} & {
remoteControlKeyId: unknown;
};
/** @description 番組表の放送局データ */
ScheduleChannleItem: {
id: components["schemas"]["ChannelId"];
serviceId: components["schemas"]["ServiceId"];
networkId: components["schemas"]["NetworkId"];
/** @description 放送局名 */
name: string;
/** @description リモコン番号 */
remoteControlKeyId?: number;
/** @description ロゴデータを持っているか */
hasLogoData: boolean;
channelType: components["schemas"]["ChannelType"];
};
/** @description 番組表の番組データ */
ScheduleProgramItem: {
id: components["schemas"]["ProgramId"];
channelId: components["schemas"]["ChannelId"];
startAt: components["schemas"]["UnixtimeMS"];
endAt: components["schemas"]["UnixtimeMS"];
/** @description 無料放送か */
isFree: boolean;
/** @description 番組名 */
name: string;
/** @description 番組詳細 */
description?: string;
/** @description 番組拡張 */
extended?: string;
/** @description 番組拡張 (Mirakurun の extended) */
rawExtended?: { [key: string]: unknown };
genre1?: components["schemas"]["ProgramGenreLv1"];
subGenre1?: components["schemas"]["ProgramGenreLv2"];
genre2?: components["schemas"]["ProgramGenreLv1"];
subGenre2?: components["schemas"]["ProgramGenreLv2"];
genre3?: components["schemas"]["ProgramGenreLv1"];
subGenre3?: components["schemas"]["ProgramGenreLv2"];
videoType?: components["schemas"]["ProgramVideoType"];
videoResolution?: components["schemas"]["ProgramVideoResolution"];
videoStreamContent?: number;
videoComponentType?: number;
audioSamplingRate?: components["schemas"]["ProgramAudioSamplingRate"];
audioComponentType?: number;
};
ScheduleProgramItems: components["schemas"]["ScheduleProgramItem"][];
/** @description 番組表データ */
Schedule: {
channel: components["schemas"]["ScheduleChannleItem"];
programs: components["schemas"]["ScheduleProgramItems"];
};
Schedules: components["schemas"]["Schedule"][];
/** @description 番組検索オプション */
ScheduleSearchOption: {
option: components["schemas"]["RuleSearchOption"];
/** @description 半角文字で取得するか */
isHalfWidth: boolean;
/** @description 検索結果取得最大件数 */
limit?: number;
};
/** @description ルール情報 */
Rules: {
rules: components["schemas"]["Rule"][];
/** @description ルール総件数 */
total: number;
};
/** @description ルール */
Rule: components["schemas"]["AddRuleOption"] & {
id: components["schemas"]["RuleId"];
/** @description 予約件数 */
reservesCnt?: number;
};
RuleKeywordItem: {
id: components["schemas"]["RuleId"];
keyword: string;
};
/** @description ルールキーワード検索結果 */
RuleKeywordInfo: {
items: components["schemas"]["RuleKeywordItem"][];
};
/** @description ルール追加プション */
AddRuleOption: {
/** @description 時刻指定予約か */
isTimeSpecification: boolean;
searchOption: components["schemas"]["RuleSearchOption"];
reserveOption: components["schemas"]["RuleReserveOption"];
saveOption?: components["schemas"]["ReserveSaveOption"];
encodeOption?: components["schemas"]["ReserveEncodedOption"];
};
/** @description ルール検索オプション */
RuleSearchOption: {
/** @description 検索キーワード */
keyword?: string;
/** @description 除外検索キーワード */
ignoreKeyword?: string;
/** @description 大文字小文字区別有効化 (検索キーワード) */
keyCS?: boolean;
/** @description 正規表現 (検索キーワード) */
keyRegExp?: boolean;
/** @description 番組名 (検索キーワード) */
name?: boolean;
/** @description 概要 (検索キーワード) */
description?: boolean;
/** @description 詳細 (検索キーワード) */
extended?: boolean;
/** @description 大文字小文字区別有効化 (除外検索キーワード) */
ignoreKeyCS?: boolean;
/** @description 正規表現 (除外検索キーワード) */
ignoreKeyRegExp?: boolean;
/** @description 番組名 (除外検索キーワード) */
ignoreName?: boolean;
/** @description 概要 (除外検索キーワード) */
ignoreDescription?: boolean;
/** @description 詳細 (除外検索キーワード) */
ignoreExtended?: boolean;
/** @description GR */
GR?: boolean;
/** @description BS */
BS?: boolean;
/** @description CS */
CS?: boolean;
/** @description SKY */
SKY?: boolean;
/** @description 放送局 */
channelIds?: components["schemas"]["ChannelId"][];
/** @description ジャンル */
genres?: components["schemas"]["Genre"][];
/** @description 時刻範囲 */
times?: components["schemas"]["SearchTime"][];
/** @description 無料放送か */
isFree?: boolean;
/** @description 番組最小時間 (分) */
durationMin?: number;
/** @description 番組最大時間 (分) */
durationMax?: number;
/** @description 検索対象期間 */
searchPeriods?: components["schemas"]["SearchPeriod"][];
};
/** @description ジャンル設定 */
Genre: {
genre: components["schemas"]["ProgramGenreLv1"];
subGenre?: components["schemas"]["ProgramGenreLv2"];
};
/** @description 時刻範囲指定オプション */
SearchTime: {
/** @description 開始時刻 1 - 23, 時刻予約の場合は 0 時を 0 とした 0 ~ (60 * 50 * 24) - 1 秒までの開始時刻を指定する */
start?: number;
/** @description 開始時刻からの時刻範囲(時) 1 - 23, 時刻予約の場合は秒で時間の長さを指定する 1 ~ 60 * 50 * 24 秒 */
range?: number;
/** @description 曜日指定 0x01, 0x02, 0x04, 0x08, 0x10, 0x20 ,0x40 が日〜土に対応するので and 演算で曜日を指定する */
week: number;
};
/** @description 検索対象期間オプション */
SearchPeriod: {
startAt: components["schemas"]["UnixtimeMS"];
endAt: components["schemas"]["UnixtimeMS"];
};
/** @description ルール予約オプション */
RuleReserveOption: {
/** @description ルールが有効か */
enable: boolean;
/** @description 末尾切れを許可するか */
allowEndLack: boolean;
/** @description 録画済みの重複番組を排除するか */
avoidDuplicate: boolean;
/** @description 重複を避ける期間 */
periodToAvoidDuplicate?: number;
tags?: components["schemas"]["RecordedTagId"][];
};
/** @description 予約保存オプション */
ReserveSaveOption: {
/** @description 親保存ディレクトリ */
parentDirectoryName?: string;
/** @description 保存ディレクトリ */
directory?: string;
/** @description ファイル名フォーマット */
recordedFormat?: string;
};
/** @description 予約エンコードオプション */
ReserveEncodedOption: {
/** @description エンコードモード1 */
mode1?: string;
/** @description エンコードモード1親ディレクトリ */
encodeParentDirectoryName1?: string;
/** @description エンコードモード1ディレクトリ */
directory1?: string;
/** @description エンコードモード2 */
mode2?: string;
/** @description エンコードモード2親ディレクトリ */
encodeParentDirectoryName2?: string;
/** @description エンコードモード2ディレクトリ */
directory2?: string;
/** @description エンコードモード3 */
mode3?: string;
/** @description エンコードモード3親ディレクトリ */
encodeParentDirectoryName3?: string;
/** @description エンコードモード3ディレクトリ */
directory3?: string;
/** @description エンコード後に ts を削除するか */
isDeleteOriginalAfterEncode: boolean;
};
/** @description ルール追加成功応答データ */
AddedRule: {
ruleId: components["schemas"]["RuleId"];
};
/** @description 手動予約編集オプション */
EditManualReserveOption: {
/** @description 末尾切れを許すか */
allowEndLack: boolean;
tags?: components["schemas"]["RecordedTagId"][];
saveOption?: components["schemas"]["ReserveSaveOption"];
encodeOption?: components["schemas"]["ReserveEncodedOption"];
};
/** @description 手動予約オプション */
ManualReserveOption: components["schemas"]["EditManualReserveOption"] & {
programId?: components["schemas"]["ProgramId"];
/** @description 時刻指定オプション */
timeSpecifiedOption?: {
/** @description 番組名 */
name: string;
channelId: components["schemas"]["ChannelId"];
startAt: components["schemas"]["UnixtimeMS"];
endAt: components["schemas"]["UnixtimeMS"];
};
};
/** @description 予約成功応答データ */
AddedReserve: {
reserveId: components["schemas"]["ReserveId"];
};
/** @description 予約番組情報 */
ReserveItem: {
id: components["schemas"]["ReserveId"];
ruleId?: components["schemas"]["RuleId"];
isSkip: boolean;
isConflict: boolean;
isOverlap: boolean;
allowEndLack: boolean;
isTimeSpecified: boolean;
tags?: components["schemas"]["RecordedTagId"][];
parentDirectoryName?: string;
directory?: string;
recordedFormat?: string;
encodeMode1?: number;
encodeParentDirectoryName1?: string;
encodeDirectory1?: string;
encodeMode2?: number;
encodeParentDirectoryName2?: string;
encodeDirectory2?: string;
encodeMode3?: number;
encodeParentDirectoryName3?: string;
encodeDirectory3?: number;
isDeleteOriginalAfterEncode: boolean;
programId?: components["schemas"]["ProgramId"];
channelId: components["schemas"]["ChannelId"];
startAt: components["schemas"]["UnixtimeMS"];
endAt: components["schemas"]["UnixtimeMS"];
name: string;
description?: string;
extended?: string;
rawExtended?: { [key: string]: unknown };
genre1?: components["schemas"]["ProgramGenreLv1"];
subGenre1?: components["schemas"]["ProgramGenreLv2"];
genre2?: components["schemas"]["ProgramGenreLv1"];
subGenre2?: components["schemas"]["ProgramGenreLv2"];
genre3?: components["schemas"]["ProgramGenreLv1"];
subGenre3?: components["schemas"]["ProgramGenreLv2"];
videoType?: components["schemas"]["ProgramVideoType"];
videoResolution?: components["schemas"]["ProgramVideoResolution"];
videoStreamContent?: number;
videoComponentType?: number;
audioSamplingRate?: components["schemas"]["ProgramAudioSamplingRate"];
audioComponentType?: number;
};
/** @description 予約情報 */
Reserves: {
reserves: components["schemas"]["ReserveItem"][];
/** @description 予約総件数 */
total: number;
};
/** @description 予約リストitem */
ReserveListItem: {
reserveId: components["schemas"]["ReserveId"];
programId?: components["schemas"]["ProgramId"];
ruleId?: components["schemas"]["RuleId"];
};
/** @description 予約, 除外, 重複, 競合の reserveId のリスト */
ReserveLists: {
normal: components["schemas"]["ReserveListItem"];
conflicts: components["schemas"]["ReserveListItem"];
skips: components["schemas"]["ReserveListItem"];
overlaps: components["schemas"]["ReserveListItem"];
};
/** @description 予約カウント */
ReserveCnts: {
/** @description 通常予約数 */
normal: number;
/** @description 競合予約数 */
conflicts: number;
/** @description 競合予約数 */
skips: number;
/** @description 重複予約数 */
overlaps: number;
};
/** @description 録画情報 */
Records: {
records: components["schemas"]["RecordedItem"][];
/** @description 録画総件数 */
total: number;
};
/** @description 録画番組情報 */
RecordedItem: {
id: components["schemas"]["RecordedId"];
ruleId?: components["schemas"]["RuleId"];
programId?: components["schemas"]["ProgramId"];
channelId?: components["schemas"]["ChannelId"];
startAt: components["schemas"]["UnixtimeMS"];
endAt: components["schemas"]["UnixtimeMS"];
/** @description 番組名 */
name: string;
/** @description 番組詳細 */
description?: string;
/** @description 番組拡張 */
extended?: string;
/** @description 番組拡張 (Mirakurun の extended) */
rawExtended?: { [key: string]: unknown };
genre1?: components["schemas"]["ProgramGenreLv1"];
subGenre1?: components["schemas"]["ProgramGenreLv2"];
genre2?: components["schemas"]["ProgramGenreLv1"];
subGenre2?: components["schemas"]["ProgramGenreLv2"];
genre3?: components["schemas"]["ProgramGenreLv1"];
subGenre3?: components["schemas"]["ProgramGenreLv2"];
videoType?: components["schemas"]["ProgramVideoType"];
videoResolution?: components["schemas"]["ProgramVideoResolution"];
videoStreamContent?: number;
videoComponentType?: number;
audioSamplingRate?: components["schemas"]["ProgramAudioSamplingRate"];
audioComponentType?: number;
/** @description 録画中か */
isRecording: boolean;
thumbnails?: components["schemas"]["ThumbnailId"][];
videoFiles?: components["schemas"]["VideoFile"][];
dropLog?: components["schemas"]["DropLogFile"];
tags?: components["schemas"]["RecordedTag"][];
/** @description エンコード中か */
isEncoding: boolean;
/** @description 自動録画削除対象外か */
isProtected: boolean;
};
/** @description 新規追加する録画番組情報 */
CreateNewRecordedOption: {
ruleId?: components["schemas"]["RuleId"];
channelId: components["schemas"]["ChannelId"];
startAt: components["schemas"]["UnixtimeMS"];
endAt: components["schemas"]["UnixtimeMS"];
/** @description 番組名 */
name: string;
/** @description 番組詳細 */
description?: string;
/** @description 番組拡張 */
extended?: string;
genre1?: components["schemas"]["ProgramGenreLv1"];
subGenre1?: components["schemas"]["ProgramGenreLv2"];
genre2?: components["schemas"]["ProgramGenreLv1"];
subGenre2?: components["schemas"]["ProgramGenreLv2"];
genre3?: components["schemas"]["ProgramGenreLv1"];
subGenre3?: components["schemas"]["ProgramGenreLv2"];
};
/** @description 録画番組情報の新規作成成功応答データ */
CreatedNewRecorded: {
recordedId: components["schemas"]["RecordedId"];
};
/** @description ビデオファイル情報 */
VideoFile: {
id: components["schemas"]["VideoFileId"];
/** @description ビデオ名 (Web上の表示名) */
name: string;
/** @description ビデオファイル名 */
filename?: string;
type: components["schemas"]["VideoFileType"];
/** @description ファイルサイズ */
size: number;
};
/** @description ビデオファイルの長さ */
VideoFileDuration: {
/** @description 動画長(秒) */
duration: number;
};
/** @description ドロップログファイル情報 */
DropLogFile: {
id: components["schemas"]["DropLogFileId"];
/** @description エラーカウント */
errorCnt: number;
/** @description ドロップカウント */
dropCnt: number;
/** @description スクランブルカウント */
scramblingCnt: number;
};
/** @description タグ情報 */
RecordedTag: {
id: components["schemas"]["RecordedTagId"];
/** @description タグ名 */
name: string;
/** @description 色 */
color: string;
};
/** @description 録画タグ情報 */
RecordedTags: {
tags: components["schemas"]["RecordedTag"][];
/** @description 予約総件数 */
total: number;
};
/** @description 録画タグ追加プション */
AddRecordedTagOption: {
/** @description タグ名 */
name: string;
/** @description 色 */
color: string;
};
/** @description 録画番組とタグの関連付けオプション */
RelateRecordedTagOption: {
recordedId: components["schemas"]["RecordedId"];
};
/** @description タグ追加成功応答データ */
AddedRecordedTag: {
tagId: components["schemas"]["RecordedTagId"];
};
/** @description recorded が持つ channelId のリスト */
RecordedChannelListItem: {
/** @description 録画数 */
cnt: number;
channelId: components["schemas"]["ChannelId"];
};
/** @description recorded が持つ genre のリスト */
RecordedGenreListItem: {
/** @description 録画数 */
cnt: number;
genre: components["schemas"]["ProgramGenreLv1"];
};
/** @description recorded が持つ検索オプション情報 */
RecordedSearchOptions: {
channels: components["schemas"]["RecordedChannelListItem"][];
genres: components["schemas"]["RecordedGenreListItem"][];
};
/** @description kodiへビデオリンクを送信するときのオプション */
SendVideoLinkToKodiOption: {
/** @description config の kodi の name */
kodiName: string;
};
/** @description ビデオファイルをアップロード */
UploadVideoFileOption: {
recordedId: components["schemas"]["RecordedId"];
/** @description 親保存ディレクトリ */
parentDirectoryName: string;
/** @description 保存ディレクトリ */
subDirectory?: string;
/** @description 表示名 */
viewName: string;
fileType: components["schemas"]["VideoFileType"];
/** Format: binary */
file: string;
};
/** @description バージョン情報 */
Version: {
version: string;
};
/** @description エンコードプログラム情報 */
EncodeProgramItem: {
id: components["schemas"]["EncodeId"];
/** @description エンコード名 */
mode: string;
recorded: components["schemas"]["RecordedItem"];
/** @description 進捗 */
percent?: number;
/** @description ログ */
log?: string;
};
/** @description エンコード情報 */
EncodeInfo: {
runningItems: components["schemas"]["EncodeProgramItem"][];
waitItems: components["schemas"]["EncodeProgramItem"][];
};
/** @description エンコード追加時のオプション */
AddEncodeProgramOption: {
recordedId: components["schemas"]["RecordedId"];
sourceVideoFileId: components["schemas"]["VideoFileId"];
/** @description 親ディレクトリ名 config recorded の name */
parentDir: string;
/** @description 親ディレクトリ以下のディレクトリ設定 */
directory?: string;
/** @description エンコードプリセット名 config encode の name */
mode: string;
/** @description 元ファイルを削除するか */
removeOriginal: boolean;
};
/** @description エンコード手動追加時のオプション */
AddManualEncodeProgramOption: {
recordedId: components["schemas"]["RecordedId"];
sourceVideoFileId: components["schemas"]["VideoFileId"];
/** @description 親ディレクトリ名 config recorded の name, isSaveSameDirectory が false の場合は必須 */
parentDir?: string;
/** @description 親ディレクトリ以下のディレクトリ設定 */
directory?: string;
/** @description ソースビデオファイルと同じ場所に保存するか */
isSaveSameDirectory?: boolean;
/** @description エンコードプリセット名 config encode の name */
mode: string;
/** @description 元ファイルを削除するか */
removeOriginal: boolean;
};
/** @description エンコード追加成功応答データ */
AddedEncode: {
encodeId: components["schemas"]["EncodeId"];
};
/** @description ストリーム情報 */
StreamInfoItem: {
streamId: components["schemas"]["StreamId"];
type: components["schemas"]["StreamType"];
/** @description ストリーミング設定 */
mode: number;
/** @description 放送波が有効か (HLS 形式の場合有効) */
isEnable: boolean;
channelId: components["schemas"]["ChannelId"];
videoFileId?: components["schemas"]["VideoFileId"];
recordedId?: components["schemas"]["RecordedId"];
/** @description ストリーミング番組名 */
name: string;
startAt: components["schemas"]["UnixtimeMS"];
endAt: components["schemas"]["UnixtimeMS"];
/** @description ストリーミング番組詳細 */
description?: string;
/** @description ストリーミング番組拡張 */
extended?: string;
};
StreamInfo: {
items: components["schemas"]["StreamInfoItem"][];
};
/** @description ストリーム開始情報 */
StartStreamInfo: {
streamId: components["schemas"]["StreamId"];
};
/** @description ストレージ使用状況 */
StorageItem: {
/** @description ディスク名 */
name: string;
/** @description 空き容量 (byte) */
available: number;
/** @description 使用量 (byte) */
used: number;
/** @description 総容量 (byte) */
total: number;
};
/** @description ストレージ情報 */
StorageInfo: {
items: components["schemas"]["StorageItem"][];
};
Error: {
/**
* Format: int32
* @description HTTPステータスコード
*/
code: number;
/** @description エラーメッセージ */
message: string;
/** @description Error */
errors?: string;
};
};
parameters: {
/** @description offset */
Offset: number;
/** @description limit */
Limit: number;
/** @description 開始時刻 */
StartAt: components["schemas"]["UnixtimeMS"];
/** @description 終了時刻 */
EndAt: components["schemas"]["UnixtimeMS"];
/** @description 取得日数 */
Days: number;
/** @description 追加時間 (ms) */
AddtionTime: components["schemas"]["UnixtimeMS"];
/** @description 半角文字で取得するか */
IsHalfWidth: boolean;
/** @description rawExtended が必要か */
NeedsRawExtended: boolean;
/** @description 無料放送のみ取得するか (true: 無料放送, false: 有料放送, 無指定: 全て) */
IsFreeProgram: boolean;
/** @description 放送局 id */
QueryChannelId: components["schemas"]["ChannelId"];
/** @description recorded id */
QueryRecordedId: components["schemas"]["RecordedId"];
/** @description ルールid */
QueryRuleId: components["schemas"]["RuleId"];
/** @description ジャンル */
QueryProgramGenre: components["schemas"]["ProgramGenreLv1"];
/** @description キーワード */
QueryKeyword: string;
/** @description オリジナルファイルを含むか */
QueryHasOriginalFile: boolean;
/** @description name */
QueryName: string;
/** @description 除外する RecordedTagId */
QueryExcludeRecordedTagId: string[];
/** @description GR */
requiredGR: boolean;
/** @description BS */
requiredBS: boolean;
/** @description CS */
requiredCS: boolean;
/** @description SKY */
requiredSKY: boolean;
/** @description 予約情報取得タイプ */
GetReserveType: "all" | "normal" | "conflict" | "skip" | "overlap";
/** @description ファイルをダウンロードするか */
IsDownload: boolean;
/** @description 逆順で取得するか */
IsReverse: boolean;
/** @description ファイル最大サイズ (kByte) */
LogFileMaxSize: number;
/** @description 放送局 id */
PathChannelId: components["schemas"]["ChannelId"];
/** @description program id */
PathProgramId: components["schemas"]["ProgramId"];
/** @description 予約id */
PathReserveId: number;
/** @description ルールid */
PathRuleId: number;
/** @description thumbnail id */
PathThumbnailId: number;
/** @description recorded id */
PathRecordedId: number;
/** @description recorded tag id */
PathRecordedTagId: number;
/** @description video file id */
PathVideoFileId: number;
/** @description drop log file id */
PathDropLogFileId: number;
/** @description エンコード id */
PathEncodeId: number;
/** @description ストリーム id */
PathStreamId: components["schemas"]["StreamId"];
/** @description ストリーミング設定 */
StreamMode: number;
/** @description 再生位置 */
StreamPlayPosition: number;
/** @description 半角文字で取得するか */
IPTVIsHalfWidth: boolean;
/** @description 取得日数 */
IPTVDays: number;
};
}
export interface operations {}
export interface external {} | the_stack |
import CountPerDayContract from '@DataContracts/Aggregate/CountPerDayContract';
import ArtistContract from '@DataContracts/Artist/ArtistContract';
import CommentContract from '@DataContracts/CommentContract';
import NewSongCheckResultContract from '@DataContracts/NewSongCheckResultContract';
import PagingProperties from '@DataContracts/PagingPropertiesContract';
import PartialFindResultContract from '@DataContracts/PartialFindResultContract';
import LyricsForSongContract from '@DataContracts/Song/LyricsForSongContract';
import SongApiContract from '@DataContracts/Song/SongApiContract';
import SongContract from '@DataContracts/Song/SongContract';
import SongForEditContract from '@DataContracts/Song/SongForEditContract';
import SongWithPVPlayerAndVoteContract from '@DataContracts/Song/SongWithPVPlayerAndVoteContract';
import SongListBaseContract from '@DataContracts/SongListBaseContract';
import TagUsageForApiContract from '@DataContracts/Tag/TagUsageForApiContract';
import RatedSongForUserForApiContract from '@DataContracts/User/RatedSongForUserForApiContract';
import AjaxHelper from '@Helpers/AjaxHelper';
import TimeUnit from '@Models/Aggregate/TimeUnit';
import ContentLanguagePreference from '@Models/Globalization/ContentLanguagePreference';
import PVService from '@Models/PVs/PVService';
import SongVoteRating from '@Models/SongVoteRating';
import SongType from '@Models/Songs/SongType';
import functions from '@Shared/GlobalFunctions';
import HttpClient, { HeaderNames, MediaTypes } from '@Shared/HttpClient';
import UrlMapper from '@Shared/UrlMapper';
import AdvancedSearchFilter from '@ViewModels/Search/AdvancedSearchFilter';
import BaseRepository from './BaseRepository';
import { CommonQueryParams } from './BaseRepository';
import ICommentRepository from './ICommentRepository';
// Repository for managing songs and related objects.
// Corresponds to the SongController class.
export default class SongRepository
extends BaseRepository
implements ICommentRepository {
private readonly urlMapper: UrlMapper;
public constructor(private readonly httpClient: HttpClient, baseUrl: string) {
super(baseUrl);
this.urlMapper = new UrlMapper(baseUrl);
this.get = <T>(relative: string, params: any): Promise<T> => {
return this.httpClient.get<T>(this.mapUrl(relative), params);
};
this.getJSON = <T>(relative: string, params: any): Promise<T> => {
return this.httpClient.get<T>(this.mapUrl(relative), params);
};
this.mapUrl = (relative: string): string => {
return `${functions.mergeUrls(baseUrl, '/Song')}${relative}`;
};
this.post = <T>(relative: string, params: any): Promise<T> => {
return this.httpClient.post<T>(
this.mapUrl(relative),
AjaxHelper.stringify(params),
{
headers: {
[HeaderNames.ContentType]: MediaTypes.APPLICATION_FORM_URLENCODED,
},
},
);
};
this.pvForSongAndService = ({
songId,
pvService,
}: {
songId: number;
pvService: PVService;
}): Promise<string> => {
return this.get<string>('/PVForSongAndService', {
songId: songId,
service: PVService[pvService],
});
};
this.pvPlayerWithRating = ({
songId,
}: {
songId: number;
}): Promise<SongWithPVPlayerAndVoteContract> => {
return this.getJSON<SongWithPVPlayerAndVoteContract>(
'/PVPlayerWithRating',
{ songId: songId },
);
};
this.songListsForSong = ({
songId,
}: {
songId: number;
}): Promise<string> => {
return this.get<string>('/SongListsForSong', { songId: songId });
};
this.songListsForUser = ({
ignoreSongId,
}: {
ignoreSongId: number;
}): Promise<SongListBaseContract[]> => {
return this.post<SongListBaseContract[]>('/SongListsForUser', {
ignoreSongId: ignoreSongId,
});
};
}
public addSongToList = ({
listId,
songId,
notes,
newListName,
}: {
listId: number;
songId: number;
notes: string;
newListName: string;
}): Promise<void> => {
return this.post<void>('/AddSongToList', {
listId: listId,
songId: songId,
notes: notes,
newListName: newListName,
});
};
public createComment = ({
entryId: songId,
contract,
}: {
entryId: number;
contract: CommentContract;
}): Promise<CommentContract> => {
return this.httpClient.post<CommentContract>(
this.urlMapper.mapRelative(`/api/songs/${songId}/comments`),
contract,
);
};
public createReport = ({
songId,
reportType,
notes,
versionNumber,
}: {
songId: number;
reportType: string;
notes: string;
versionNumber?: number;
}): Promise<void> => {
return this.httpClient.post<void>(
this.urlMapper.mapRelative('/Song/CreateReport'),
AjaxHelper.stringify({
reportType: reportType,
notes: notes,
songId: songId,
versionNumber: versionNumber,
}),
{
headers: {
[HeaderNames.ContentType]: MediaTypes.APPLICATION_FORM_URLENCODED,
},
},
);
};
public deleteComment = ({
commentId,
}: {
commentId: number;
}): Promise<void> => {
return this.httpClient.delete<void>(
this.urlMapper.mapRelative(`/api/songs/comments/${commentId}`),
);
};
public findDuplicate = ({
params,
}: {
params: {
term: string[];
pv: string[];
artistIds: number[];
getPVInfo: boolean;
};
}): Promise<NewSongCheckResultContract> => {
return this.httpClient.get<NewSongCheckResultContract>(
this.urlMapper.mapRelative('/api/songs/findDuplicate'),
params,
);
};
private get: <T>(relative: string, params: any) => Promise<T>;
public getByNames({
names,
ignoreIds,
lang,
songTypes,
}: {
names: string[];
ignoreIds: number[];
lang: ContentLanguagePreference;
songTypes?: SongType[];
}): Promise<SongApiContract[]> {
const url = functions.mergeUrls(this.baseUrl, '/api/songs/by-names');
return this.httpClient.get<SongApiContract[]>(url, {
names: names,
songTypes: songTypes,
lang: lang,
ignoreIds: ignoreIds,
});
}
public getComments = ({
entryId: songId,
}: {
entryId: number;
}): Promise<CommentContract[]> => {
return this.httpClient.get<CommentContract[]>(
this.urlMapper.mapRelative(`/api/songs/${songId}/comments`),
);
};
public getForEdit = ({
id,
}: {
id: number;
}): Promise<SongForEditContract> => {
var url = functions.mergeUrls(this.baseUrl, `/api/songs/${id}/for-edit`);
return this.httpClient.get<SongForEditContract>(url);
};
public getLyrics = ({
lyricsId,
songVersion,
}: {
lyricsId: number;
songVersion: number;
}): Promise<LyricsForSongContract> => {
return this.httpClient.get<LyricsForSongContract>(
this.urlMapper.mapRelative(
`/api/songs/lyrics/${lyricsId}?v=${songVersion}`,
),
);
};
private getJSON: <T>(relative: string, params: any) => Promise<T>;
public getOneWithComponents = ({
id,
fields,
lang,
}: {
id: number;
fields: string;
lang: ContentLanguagePreference;
}): Promise<SongApiContract> => {
var url = functions.mergeUrls(this.baseUrl, `/api/songs/${id}`);
return this.httpClient.get<SongApiContract>(url, {
fields: fields,
lang: lang,
});
};
public getOne = ({
id,
lang,
}: {
id: number;
lang: ContentLanguagePreference;
}): Promise<SongContract> => {
var url = functions.mergeUrls(this.baseUrl, `/api/songs/${id}`);
return this.httpClient.get<SongContract>(url, {
fields: 'AdditionalNames',
lang: lang,
});
};
public getListByParams({
params,
}: {
params: SongQueryParams;
}): Promise<PartialFindResultContract<SongApiContract>> {
const url = functions.mergeUrls(this.baseUrl, '/api/songs');
return this.httpClient.get<PartialFindResultContract<SongApiContract>>(
url,
params,
);
}
public getList = ({
paging,
lang,
query,
sort,
songTypes,
afterDate,
beforeDate,
tagIds,
childTags,
unifyTypesAndTags,
artistIds,
artistParticipationStatus,
childVoicebanks,
includeMembers,
eventId,
onlyWithPvs,
pvServices,
since,
minScore,
userCollectionId,
parentSongId,
fields,
status,
advancedFilters,
minMilliBpm,
maxMilliBpm,
minLength,
maxLength,
}: {
paging: PagingProperties;
lang: ContentLanguagePreference;
query: string;
sort: string;
songTypes?: string;
afterDate?: Date;
beforeDate?: Date;
tagIds: number[];
childTags: boolean;
unifyTypesAndTags: boolean;
artistIds: number[];
artistParticipationStatus: string;
childVoicebanks: boolean;
includeMembers: boolean;
eventId?: number;
onlyWithPvs: boolean;
pvServices?: string;
since?: number;
minScore?: number;
userCollectionId?: number;
parentSongId?: number;
fields: string;
status?: string;
advancedFilters?: AdvancedSearchFilter[];
minMilliBpm?: number;
maxMilliBpm?: number;
minLength?: number;
maxLength?: number;
}): Promise<PartialFindResultContract<SongContract>> => {
var url = functions.mergeUrls(this.baseUrl, '/api/songs');
var data = {
start: paging.start,
getTotalCount: paging.getTotalCount,
maxResults: paging.maxEntries,
query: query,
fields: fields,
lang: lang,
nameMatchMode: 'Auto',
sort: sort,
songTypes: songTypes,
afterDate: this.getDate(afterDate),
beforeDate: this.getDate(beforeDate),
tagId: tagIds,
childTags: childTags,
unifyTypesAndTags: unifyTypesAndTags || undefined,
artistId: artistIds,
artistParticipationStatus: artistParticipationStatus || undefined,
childVoicebanks: childVoicebanks || undefined,
includeMembers: includeMembers || undefined,
releaseEventId: eventId,
onlyWithPvs: onlyWithPvs,
pvServices: pvServices,
since: since,
minScore: minScore,
userCollectionId: userCollectionId,
parentSongId: parentSongId || undefined,
status: status,
advancedFilters: advancedFilters,
minMilliBpm: minMilliBpm,
maxMilliBpm: maxMilliBpm,
minLength: minLength,
maxLength: maxLength,
};
return this.httpClient.get<PartialFindResultContract<SongContract>>(
url,
data,
);
};
public getOverTime = ({
timeUnit,
artistId,
}: {
timeUnit: TimeUnit;
artistId: number;
}): Promise<CountPerDayContract[]> => {
var url = this.urlMapper.mapRelative('/api/songs/over-time');
return this.httpClient.get<CountPerDayContract[]>(url, {
timeUnit: TimeUnit[timeUnit],
artistId: artistId,
});
};
// Get PV ID by song ID and PV service.
public getPvId = ({
songId,
pvService,
}: {
songId: number;
pvService: PVService;
}): Promise<string> => {
return this.httpClient.get<string>(
this.urlMapper.mapRelative(`/api/songs/${songId}/pvs`),
{ service: PVService[pvService] },
);
};
public getRatings = ({
songId,
}: {
songId: number;
}): Promise<RatedSongForUserForApiContract[]> => {
return this.httpClient.get<RatedSongForUserForApiContract[]>(
this.urlMapper.mapRelative(`/api/songs/${songId}/ratings`),
{ userFields: 'MainPicture' },
);
};
public getTagSuggestions = ({
songId,
}: {
songId: number;
}): Promise<TagUsageForApiContract[]> => {
return this.httpClient.get<TagUsageForApiContract[]>(
this.urlMapper.mapRelative(`/api/songs/${songId}/tagSuggestions`),
);
};
// Maps a relative URL to an absolute one.
private mapUrl: (relative: string) => string;
private post: <T>(relative: string, params: any) => Promise<T>;
public pvForSongAndService: ({
songId,
pvService,
}: {
songId: number;
pvService: PVService;
}) => Promise<string>;
public pvPlayer = ({
songId,
params,
}: {
songId: number;
params: PVEmbedParams;
}): Promise<SongWithPVPlayerAndVoteContract> => {
return this.getJSON<SongWithPVPlayerAndVoteContract>(
`/PVPlayer/${songId}`,
params,
);
};
public pvPlayerWithRating: ({
songId,
}: {
songId: number;
}) => Promise<SongWithPVPlayerAndVoteContract>;
//public songListsForSong: (songId: number, callback: (result: SongListContract[]) => void) => void;
public songListsForSong: ({ songId }: { songId: number }) => Promise<string>;
public songListsForUser: ({
ignoreSongId,
}: {
ignoreSongId: number;
}) => Promise<SongListBaseContract[]>;
public updateComment = ({
commentId,
contract,
}: {
commentId: number;
contract: CommentContract;
}): Promise<void> => {
return this.httpClient.post<void>(
this.urlMapper.mapRelative(`/api/songs/comments/${commentId}`),
contract,
);
};
public updatePersonalDescription = ({
songId,
text,
author,
}: {
songId: number;
text: string;
author: ArtistContract;
}): Promise<void> => {
return this.httpClient.post<void>(
this.urlMapper.mapRelative(`/api/songs/${songId}/personal-description/`),
{
personalDescriptionText: text,
personalDescriptionAuthor: author || undefined,
},
);
};
public updateSongRating = ({
songId,
rating,
}: {
songId: number;
rating: SongVoteRating;
}): Promise<void> => {
var url = this.urlMapper.mapRelative(`/api/songs/${songId}/ratings`);
return this.httpClient.post<void>(url, { rating: SongVoteRating[rating] });
};
}
export interface PVEmbedParams {
enableScriptAccess?: boolean;
elementId?: string;
pvServices?: string;
}
export interface SongQueryParams extends CommonQueryParams {
sort?: string;
songTypes?: string;
} | the_stack |
import { Quaternion } from './quaternion'
import { Matrix } from './matrix'
const f32 = { min: Number.MIN_VALUE, max: Number.MAX_VALUE }
export class Vector2 {
/** Returns a vector with values set to their maximum values. */
public static MAX_VALUE: Vector2 = new Vector2(f32.max, f32.max)
/** Returns a vector with values set to their minimum values. */
public static MIN_VALUE: Vector2 = new Vector2(f32.min, f32.min)
/** The internal elements for this type. */
public v: Float32Array
/** Creates a new Vector2. */
constructor(x: number, y: number) {
this.v = new Float32Array(2)
this.v[0] = x
this.v[1] = y
}
/** Returns the string representation of this object. */
public toString(): string {
return `[${this.v[0]}, ${this.v[1]}]`
}
/** Returns the type kind of this object. */
public kind(): string {
return 'Vector2'
}
/** Returns a clone of this vector. */
public clone(): Vector2 {
return Vector2.clone(this)
}
public get x(): number {
return this.v[0];
}
public get y(): number {
return this.v[1]
}
public set x(value: number) {
this.v[0] = value;
}
public set y(value: number) {
this.v[1] = value
}
/** Returns the length of this vector. */
public length(): number {
return Vector2.getLength(this)
}
/** Returns the length of this vector. */
public lengthSq(): number {
return Vector2.getLengthSq(this)
}
/** Returns this vector normalized. */
public normalize(): Vector2 {
return Vector2.normalize(this)
}
/** Returns the dot product between this and the given vector. */
public dot(v0: Vector2): number {
return Vector2.dot(this, v0)
}
/** Returns the addition between this and the given vector. */
public add(v0: Vector2): Vector2 {
return Vector2.add(this, v0)
}
/** Returns the addition between this and the given vector. */
public sub(v0: Vector2): Vector2 {
return Vector2.sub(this, v0)
}
/** Returns the multiplication between this and the given vector. */
public mul(v0: Vector2): Vector2 {
return Vector2.mul(this, v0)
}
/** Returns the division between this and the given vector. */
public div(v0: Vector2): Vector2 {
return Vector2.div(this, v0)
}
/** Returns a new scaled vector from the given scalar value. */
public scale(s0: number): Vector2 {
return Vector2.scale(this, s0)
}
/** Returns a new negated vector from this vector. */
public negate(): Vector2 {
return Vector2.negate(this)
}
/** Returns a new vector whose values are initialized to zero. */
public static zero(): Vector2 {
return new Vector2(0.0, 0.0)
}
/** Returns a new vector whose values are initialized to one. */
public static one(): Vector2 {
return new Vector2(1.0, 1.0)
}
/** Returns a new unit x vector. */
public static unitX(): Vector2 {
return new Vector2(1.0, 0.0)
}
/** Returns a new unit y vector. */
public static unitY(): Vector2 {
return new Vector2(0.0, 1.0)
}
/** Returns a new left vector. */
public static left(): Vector2 {
return new Vector2(-1.0, 0.0)
}
/** Returns a new right vector. */
public static right(): Vector2 {
return new Vector2(1.0, 0.0)
}
/** Returns a new up vector. */
public static up(): Vector2 {
return new Vector2(0.0, 1.0)
}
/** Returns a new up vector. */
public static down(): Vector2 {
return new Vector2(0.0, -1.0)
}
/** Compares the left and right vectors for equality. */
public static equals(v0: Vector2, v1: Vector2): boolean {
return (
v0.v[0] === v1.v[0] &&
v0.v[1] === v1.v[1]
)
}
/** Returns the length of the given vector. */
public static getLength(v0: Vector2): number {
return Math.sqrt(
(v0.v[0] * v0.v[0]) +
(v0.v[1] * v0.v[1])
)
}
/** Returns the square length of the given vector. */
public static getLengthSq(v0: Vector2): number {
return (
(v0.v[0] * v0.v[0]) +
(v0.v[1] * v0.v[1])
)
}
/** Returns the distance between the left and right vectors. */
public static distance(v0: Vector2, v1: Vector2): number {
const x = v0.v[0] - v1.v[0]
const y = v0.v[1] - v1.v[1]
return Math.sqrt((x * x) + (y * y))
}
/** Returns the squared distance between the left and right vectors. */
public static distanceSq(v0: Vector2, v1: Vector2): number {
const x = v0.v[0] - v1.v[0]
const y = v0.v[1] - v1.v[1]
return ((x * x) + (y * y))
}
/** Returns the dot product between the given two vectors. */
public static dot(v0: Vector2, v1: Vector2): number {
return (
(v0.v[0] * v1.v[0]) +
(v0.v[1] * v1.v[1])
)
}
/** Returns a normalized vector from the given vector. */
public static normalize(v0: Vector2): Vector2 {
const len = 1.0 / Math.sqrt(
(v0.v[0] * v0.v[0]) +
(v0.v[1] * v0.v[1])
)
return new Vector2(
v0.v[0] * len,
v0.v[1] * len
)
}
/** Returns the reflected vector about the given vector and normal. */
public static reflect(v0: Vector2, n0: Vector2): Vector2 {
const dot = (
(v0.v[0] * n0.v[0]) +
(v0.v[1] * n0.v[1])
)
return new Vector2(
v0.v[0] - ((2.0 * dot) * n0.v[0]),
v0.v[1] - ((2.0 * dot) * n0.v[1])
)
}
/** Returns a vectors whose values are absoluted from the given vector. */
public static abs(v0: Vector2): Vector2 {
return new Vector2(
Math.abs(v0.v[0]),
Math.abs(v0.v[1])
)
}
/** Returns the minimum components from the given to vectors. */
public static min(v0: Vector2, v1: Vector2): Vector2 {
return new Vector2(
(v0.v[0] < v1.v[0]) ? v0.v[0] : v1.v[0],
(v0.v[1] < v1.v[1]) ? v0.v[1] : v1.v[1]
)
}
/** Returns the maximum components from the given to vectors. */
public static max(v0: Vector2, v1: Vector2): Vector2 {
return new Vector2(
(v0.v[0] > v1.v[0]) ? v0.v[0] : v1.v[0],
(v0.v[1] > v1.v[1]) ? v0.v[1] : v1.v[1]
)
}
/** Returns a clamped vector within the given min and max range. */
public static clamp(v0: Vector2, min: Vector2, max: Vector2): Vector2 {
let x = v0.v[0]
let y = v0.v[1]
x = (x > max.v[0]) ? max.v[0] : x
x = (x < min.v[0]) ? min.v[0] : x
y = (y > max.v[1]) ? max.v[1] : y
y = (y < min.v[1]) ? min.v[1] : y
return new Vector2(x, y)
}
/** Returns the linear interpolation vector between the given two vectors and amount. */
public static lerp(v0: Vector2, v1: Vector2, amount: number): Vector2 {
return new Vector2(
v0.v[0] + ((v1.v[0] - v0.v[0]) * amount),
v0.v[1] + ((v1.v[1] - v0.v[1]) * amount)
)
}
/** Returns the barycentric coordinate between the given 3 vectors and amounts. */
public static barycentric(v0: Vector2, v1: Vector2, v2: Vector2, amount0: number, amount1: number): Vector2 {
return new Vector2(
(v0.v[0] + (amount0 * (v1.v[0] - v0.v[0]))) + (amount1 * (v2.v[0] - v0.v[0])),
(v0.v[1] + (amount0 * (v1.v[1] - v0.v[1]))) + (amount1 * (v2.v[1] - v0.v[1]))
)
}
/** Returns the smooth step interpolation between the given two vectors and amount. */
public static smoothstep(v0: Vector2, v1: Vector2, amount: number): Vector2 {
amount = (amount > 1.0) ? 1.0 : ((amount < 0.0) ? 0.0 : amount)
amount = (amount * amount) * (3.0 - (2.0 * amount))
return new Vector2(
v0.v[0] + ((v1.v[0] - v0.v[0]) * amount),
v0.v[1] + ((v1.v[1] - v0.v[1]) * amount)
)
}
/** Returns the catmull rom interpolation between the given vectors and amount. */
public static catmullrom(v0: Vector2, v1: Vector2, v2: Vector2, v3: Vector2, amount: number): Vector2 {
const n0 = amount * amount
const n1 = amount * n0
return new Vector2(
0.5 * ((((2.0 * v1.v[0])
+ ((-v0.v[0] + v2.v[0]) * amount))
+ (((((2.0 * v0.v[0]) - (5.0 * v1.v[0]))
+ (4.0 * v2.v[0])) - v3.v[0]) * n0))
+ ((((-v0.v[0] + (3.0 * v1.v[0]))
- (3.0 * v2.v[0])) + v3.v[0]) * n1)),
0.5 * ((((2.0 * v1.v[1])
+ ((-v0.v[1] + v2.v[1]) * amount))
+ (((((2.0 * v0.v[1]) - (5.0 * v1.v[1]))
+ (4.0 * v2.v[1])) - v3.v[1]) * n0))
+ ((((-v0.v[1] + (3.0 * v1.v[1]))
- (3.0 * v2.v[1])) + v3.v[1]) * n1))
)
}
/** Returns the hermite interpolation between the given vectors and amount. */
public static hermite(v0: Vector2, t0: Vector2, v1: Vector2, t1: Vector2, amount: number): Vector2 {
const n0 = amount * amount
const n1 = amount * n0
const n2 = ((2.0 * n1) - (3.0 * n0)) + 1.0
const n3 = (-2.0 * n1) + (3.0 * n0)
const n4 = (n1 - (2.0 * n0)) + amount
const n5 = n1 - n0
return new Vector2(
(((v0.v[0] * n2) + (v1.v[0] * n3)) + (t0.v[0] * n4)) + (t1.v[0] * n5),
(((v0.v[1] * n2) + (v1.v[1] * n3)) + (t0.v[1] * n4)) + (t1.v[1] * n5)
)
}
/** Returns the transformed vector from the given vector and Matrix. */
public static transform(v0: Vector2, m0: Matrix): Vector2 {
return new Vector2(
((v0.v[0] * m0.v[0]) + (v0.v[1] * m0.v[4])) + m0.v[12],
((v0.v[0] * m0.v[1]) + (v0.v[1] * m0.v[5])) + m0.v[13]
)
}
/** Returns the transformed normal (2 component) vector from the given normal and Matrix. */
public static transformNormal(n0: Vector2, m0: Matrix): Vector2 {
return new Vector2(
(n0.v[0] * m0.v[0]) + (n0.v[1] * m0.v[4]),
(n0.v[0] * m0.v[1]) + (n0.v[1] * m0.v[5])
)
}
/** Returns the transformed vector from the given normal and quaternion. */
public static transformQuaternion(v0: Vector2, q0: Quaternion): Vector2 {
const n0 = q0.v[0] + q0.v[0]
const n1 = q0.v[1] + q0.v[1]
const n2 = q0.v[2] + q0.v[2]
const n3 = q0.v[3] * n2
const n4 = q0.v[0] * n0
const n5 = q0.v[0] * n1
const n6 = q0.v[1] * n1
const n7 = q0.v[2] * n2
return new Vector2(
(v0.v[0] * ((1.0 - n6) - n7)) + (v0.v[1] * (n5 - n3)),
(v0.v[0] * (n5 + n3)) + (v0.v[1] * ((1.0 - n4) - n7))
)
}
/** Returns the addition of the given vectors. */
public static add(v0: Vector2, v1: Vector2): Vector2 {
return new Vector2(
v0.v[0] + v1.v[0],
v0.v[1] + v1.v[1]
)
}
/** Returns the subtraction of the given vectors. */
public static sub(v0: Vector2, v1: Vector2): Vector2 {
return new Vector2(
v0.v[0] - v1.v[0],
v0.v[1] - v1.v[1]
)
}
/** Multiplies the given two vectors. */
public static mul(v0: Vector2, v1: Vector2): Vector2 {
return new Vector2(
v0.v[0] * v1.v[0],
v0.v[1] * v1.v[1]
)
}
/** Divides the given two vectors. */
public static div(v0: Vector2, v1: Vector2): Vector2 {
return new Vector2(
v0.v[0] / v1.v[0],
v0.v[1] / v1.v[1]
)
}
/** Multiplies the given vector with the scalar. */
public static scale(v0: Vector2, scalar: number): Vector2 {
return new Vector2(
v0.v[0] * scalar,
v0.v[1] * scalar
)
}
/** Negates the given vector. */
public static negate(v0: Vector2): Vector2 {
return new Vector2(
-v0.v[0],
-v0.v[1]
)
}
/** Returns a clone of the given vector. */
public static clone(v0: Vector2): Vector2 {
return new Vector2(
v0.v[0],
v0.v[1]
)
}
/** Creates a new Vector2. */
public static create(x: number, y: number): Vector2 {
return new Vector2(x, y)
}
} | the_stack |
import 'reflect-metadata';
import * as ts from 'typescript';
import { WrappedAst, BooleanCompilerOptions, isCompilerOptionEnabled } from 'tsutils';
import * as path from 'path';
export class ConfigurationError extends Error {}
export abstract class GlobalOptions {
readonly [key: string]: {} | null | undefined;
}
export type LintResult = Iterable<[string, FileSummary]>;
export type FileSummary = LintAndFixFileResult;
export interface LintAndFixFileResult {
content: string;
findings: ReadonlyArray<Finding>;
fixes: number;
}
export interface Replacement {
readonly start: number;
readonly end: number;
readonly text: string;
}
export const Replacement = {
replace(start: number, end: number, text: string): Replacement {
return {start, end, text};
},
append(pos: number, text: string): Replacement {
return {start: pos, end: pos, text}; // tslint:disable-line:object-shorthand-properties-first
},
delete(start: number, end: number): Replacement {
return {start, end, text: ''};
},
};
export interface Fix {
readonly replacements: ReadonlyArray<Replacement>;
}
export interface Finding {
readonly start: FindingPosition;
readonly end: FindingPosition;
readonly message: string;
readonly ruleName: string;
readonly severity: Severity;
readonly fix: Fix | undefined;
}
export const Finding = {
/** Compare two Findings. Intended to be used in `Array.prototype.sort`. */
compare(a: Finding, b: Finding): number {
return a.start.position - b.start.position
|| a.end.position - b.end.position
|| compareStrings(a.ruleName, b.ruleName)
|| compareStrings(a.message, b.message);
},
};
function compareStrings(a: string, b: string): number {
return a < b
? -1
: a > b
? 1
: 0;
}
export interface FindingPosition {
readonly line: number;
readonly character: number;
readonly position: number;
}
export type Severity = 'error' | 'warning' | 'suggestion';
export interface RuleConstructor<T extends RuleContext = RuleContext> {
readonly requiresTypeInformation: boolean;
readonly deprecated?: boolean | string;
supports?: RulePredicate;
new(context: T): AbstractRule;
}
export interface RulePredicateContext {
readonly program?: ts.Program;
readonly compilerOptions?: ts.CompilerOptions;
readonly settings: Settings;
readonly options: {} | null | undefined;
}
export interface RuleContext extends RulePredicateContext {
readonly sourceFile: ts.SourceFile;
addFinding(start: number, end: number, message: string, fix?: Replacement | ReadonlyArray<Replacement>): void;
getFlatAst(): ReadonlyArray<ts.Node>;
getWrappedAst(): WrappedAst;
}
export interface TypedRuleContext extends RuleContext {
readonly program: ts.Program;
readonly compilerOptions: ts.CompilerOptions;
}
export type Settings = ReadonlyMap<string, {} | null | undefined>;
export function predicate(check: RulePredicate) {
return (target: typeof AbstractRule) => {
target.supports = combinePredicates(target.supports, check);
};
}
function combinePredicates(existing: RulePredicate | undefined, additonal: RulePredicate): RulePredicate {
if (existing === undefined)
return additonal;
return (sourceFile, context) => {
const result = additonal(sourceFile, context);
return result !== true ? result : existing(sourceFile, context);
};
}
export function typescriptOnly(target: typeof AbstractRule) {
target.supports = combinePredicates(target.supports, (sourceFile) => /\.tsx?$/.test(sourceFile.fileName) || 'TypeScript only');
}
export function excludeDeclarationFiles(target: typeof AbstractRule) {
target.supports = combinePredicates(target.supports, (sourceFile) => !sourceFile.isDeclarationFile || 'excludes declaration files');
}
export function requireLibraryFile(fileName: string) {
return (target: typeof TypedRule) => {
target.supports = combinePredicates(
target.supports,
(_, context) => programContainsLibraryFile(context.program!, fileName) || `requires library file '${fileName}'`,
);
};
}
function programContainsLibraryFile(program: ts.Program, fileName: string) {
const libFileDir = path.dirname(ts.getDefaultLibFilePath(program.getCompilerOptions()));
return program.getSourceFile(path.join(libFileDir, fileName)) !== undefined;
}
export function requiresCompilerOption(option: BooleanCompilerOptions) {
return (target: typeof TypedRule) => {
target.supports = combinePredicates(
target.supports,
(_, context) => isCompilerOptionEnabled(context.compilerOptions!, option) || `requires compilerOption '${option}'`,
);
};
}
/** @returns `true`, `false` or a reason */
export type RulePredicate = (sourceFile: ts.SourceFile, context: RulePredicateContext) => boolean | string;
export abstract class AbstractRule {
public static readonly requiresTypeInformation: boolean = false;
public static deprecated: boolean | string = false;
public static supports?: RulePredicate = undefined;
public static validateConfig?(config: any): string[] | string | undefined;
public readonly sourceFile: ts.SourceFile;
public get program() {
return this.context.program;
}
constructor(public readonly context: RuleContext) {
this.sourceFile = context.sourceFile;
}
public abstract apply(): void;
public addFinding(start: number, end: number, message: string, fix?: Replacement | ReadonlyArray<Replacement>) {
return this.context.addFinding(start, end, message, fix);
}
public addFindingAtNode(node: ts.Node, message: string, fix?: Replacement | ReadonlyArray<Replacement>) {
return this.addFinding(node.getStart(this.sourceFile), node.end, message, fix);
}
}
export abstract class ConfigurableRule<T> extends AbstractRule {
public options: T;
constructor(context: RuleContext) {
super(context);
this.options = this.parseOptions(context.options);
}
protected abstract parseOptions(options: {} | null | undefined): T;
}
export abstract class TypedRule extends AbstractRule {
public static readonly requiresTypeInformation = true;
declare public readonly context: TypedRuleContext;
public get program() {
return this.context.program;
}
/** Lazily evaluated getter for TypeChecker. Use this instead of `this.program.getTypeChecker()` to avoid wasting CPU cycles. */
public get checker() {
const checker = this.program.getTypeChecker();
Object.defineProperty(this, 'checker', {value: checker, writable: false});
return checker;
}
constructor(context: TypedRuleContext) {
super(context);
}
}
export abstract class ConfigurableTypedRule<T> extends TypedRule {
public options: T;
constructor(context: TypedRuleContext) {
super(context);
this.options = this.parseOptions(context.options);
}
protected abstract parseOptions(options: {} | null | undefined): T;
}
export abstract class AbstractFormatter {
public prefix?: string;
public abstract format(filename: string, summary: FileSummary): string | undefined;
public flush?(): string | undefined;
}
export interface FormatterConstructor {
new(): AbstractFormatter;
}
export interface Configuration {
readonly aliases?: ReadonlyMap<string, Configuration.Alias>;
readonly rules?: ReadonlyMap<string, Configuration.RuleConfig>;
readonly settings?: Settings;
readonly filename: string;
readonly overrides?: ReadonlyArray<Configuration.Override>;
readonly extends: ReadonlyArray<Configuration>;
readonly rulesDirectories?: Configuration.RulesDirectoryMap;
readonly processor?: string | null | false;
readonly exclude?: ReadonlyArray<string>;
}
export namespace Configuration {
export type RulesDirectoryMap = ReadonlyMap<string, ReadonlyArray<string>>;
export type RuleSeverity = 'off' | 'warning' | 'error' | 'suggestion';
export interface RuleConfig {
readonly severity?: RuleSeverity;
readonly options?: any;
readonly rulesDirectories: ReadonlyArray<string> | undefined;
readonly rule: string;
}
export interface Override {
readonly rules?: ReadonlyMap<string, RuleConfig>;
readonly settings?: ReadonlyMap<string, any>;
readonly files: ReadonlyArray<string>;
readonly processor?: string | null | false;
}
export interface Alias {
readonly rule: string;
readonly options?: any;
readonly rulesDirectories: ReadonlyArray<string> | undefined;
}
}
export interface EffectiveConfiguration {
rules: Map<string, EffectiveConfiguration.RuleConfig>;
settings: Map<string, any>;
}
export namespace EffectiveConfiguration {
export interface RuleConfig {
severity: Configuration.RuleSeverity;
options: any;
rulesDirectories: ReadonlyArray<string> | undefined;
rule: string;
}
}
export interface ReducedConfiguration extends EffectiveConfiguration {
processor: string | undefined;
}
export interface ConfigurationProvider {
find(fileToLint: string): string | undefined;
resolve(name: string, basedir: string): string;
load(fileName: string, context: LoadConfigurationContext): Configuration;
}
export abstract class ConfigurationProvider {}
export interface LoadConfigurationContext {
readonly stack: ReadonlyArray<string>;
/**
* Resolves the given name relative to the current configuration file and returns the parsed Configuration.
* This function detects cycles and caches already loaded configurations.
*/
load(name: string): Configuration;
}
export const enum Format {
Yaml = 'yaml',
Json = 'json',
Json5 = 'json5',
}
export interface ProcessorConstructor {
getSuffixForFile(context: ProcessorSuffixContext): string;
new(context: ProcessorContext): AbstractProcessor;
}
export interface ProcessorSuffixContext {
fileName: string;
getSettings(): Settings;
readFile(): string;
}
export interface ProcessorContext {
source: string;
sourceFileName: string;
targetFileName: string;
settings: Settings;
}
export interface ProcessorUpdateResult {
transformed: string;
changeRange?: ts.TextChangeRange;
}
export abstract class AbstractProcessor {
/**
* Returns a new primary extension that is appended to the file name, e.g. '.ts'.
* If the file should not get a new extension, just return an empty string.
*/
public static getSuffixForFile(_context: ProcessorSuffixContext): string {
return '';
}
protected source: string;
protected sourceFileName: string;
protected targetFileName: string;
protected settings: Settings;
constructor(context: ProcessorContext) {
this.source = context.source;
this.sourceFileName = context.sourceFileName;
this.targetFileName = context.targetFileName;
this.settings = context.settings;
}
public abstract preprocess(): string;
public abstract postprocess(findings: ReadonlyArray<Finding>): ReadonlyArray<Finding>;
public abstract updateSource(newSource: string, changeRange: ts.TextChangeRange): ProcessorUpdateResult;
}
export interface MessageHandler {
log(message: string): void;
warn(message: string): void;
error(e: Error): void;
}
export abstract class MessageHandler {}
export interface DeprecationHandler {
handle(target: DeprecationTarget, name: string, text?: string): void;
}
export abstract class DeprecationHandler {}
export const enum DeprecationTarget {
Rule = 'rule',
Processor = 'processor',
Formatter = 'formatter',
}
/**
* Low level file system access. All methods are supposed to throw an error on failure.
*/
export interface FileSystem {
/** Normalizes the path to enable reliable caching in consuming services. */
normalizePath(path: string): string;
/** Reads the given file. Tries to infer and convert encoding. */
readFile(file: string): string;
/** Reads directory entries. Returns only the basenames optionally with file type information. */
readDirectory(dir: string): Array<string | Dirent>;
/** Gets the status of a file or directory. */
stat(path: string): Stats;
/** Gets the realpath of a given file or directory. */
realpath?(path: string): string;
/** Writes content to the file, overwriting the existing content. Creates the file if necessary. */
writeFile(file: string, content: string): void;
/** Deletes a given file. Is not supposed to delete or clear a directory. */
deleteFile(path: string): void;
/** Creates a single directory and fails on error. Is not supposed to create multiple directories. */
createDirectory(dir: string): void;
}
export abstract class FileSystem {}
export interface Stats {
isDirectory(): boolean;
isFile(): boolean;
}
export interface Dirent extends Stats {
name: string;
isSymbolicLink(): boolean;
}
export interface RuleLoaderHost {
loadCoreRule(name: string): RuleConstructor | undefined;
loadCustomRule(name: string, directory: string): RuleConstructor | undefined;
}
export abstract class RuleLoaderHost {}
export interface FormatterLoaderHost {
loadCoreFormatter(name: string): FormatterConstructor | undefined;
loadCustomFormatter(name: string, basedir: string): FormatterConstructor | undefined;
}
export abstract class FormatterLoaderHost {}
// wotan-disable no-misused-generics
export interface CacheFactory {
/** Creates a new cache instance. */
create<K extends object, V = any>(weak: true): Cache<K, V>;
create<K = any, V = any>(weak?: false): Cache<K, V>;
}
// wotan-enable no-misused-generics
export abstract class CacheFactory {}
export interface Cache<K, V> {
get(key: K): V | undefined;
set(key: K, value: V): void;
delete(key: K): void;
has(key: K): boolean;
clear(): void;
}
export interface Resolver {
getDefaultExtensions(): ReadonlyArray<string>;
resolve(id: string, basedir?: string, extensions?: ReadonlyArray<string>, paths?: ReadonlyArray<string>): string;
require(id: string, options?: {cache?: boolean}): any;
}
export abstract class Resolver {}
export interface BuiltinResolver {
resolveConfig(name: string): string;
resolveRule(name: string): string;
resolveFormatter(name: string): string;
}
export abstract class BuiltinResolver {}
export interface DirectoryService {
getCurrentDirectory(): string;
getHomeDirectory?(): string;
}
export abstract class DirectoryService {}
export interface FindingFilterFactory {
create(context: FindingFilterContext): FindingFilter;
}
export abstract class FindingFilterFactory {}
export interface FindingFilterContext {
sourceFile: ts.SourceFile;
ruleNames: ReadonlyArray<string>;
getWrappedAst(): WrappedAst;
}
export interface FindingFilter {
/** @returns `true` if the finding should be used, false if it should be filtered out. Intended for use in `Array.prototype.filter`. */
filter(finding: Finding): boolean;
/**
* @returns Findings to report redundant or unused filter directives.
* This is called after calling `filter` for all findings in the file.
*/
reportUseless(severity: Severity): ReadonlyArray<Finding>;
}
export interface LineSwitchParser {
parse(context: LineSwitchParserContext): ReadonlyArray<RawLineSwitch>;
}
export abstract class LineSwitchParser {}
export interface LineSwitchParserContext {
sourceFile: ts.SourceFile;
getCommentAtPosition(pos: number): ts.CommentRange | undefined;
}
export interface RawLineSwitch {
readonly rules: ReadonlyArray<RawLineSwitchRule>;
readonly enable: boolean;
readonly pos: number;
readonly end?: number;
readonly location: Readonly<ts.TextRange>;
}
export interface RawLineSwitchRule {
readonly predicate: string | RegExp | ((ruleName: string) => boolean);
readonly location?: Readonly<ts.TextRange>;
readonly fixLocation?: Readonly<ts.TextRange>;
}
export interface FileFilterContext {
program: ts.Program;
host: Required<Pick<ts.CompilerHost, 'directoryExists'>>;
}
export interface FileFilterFactory {
create(context: FileFilterContext): FileFilter;
}
export abstract class FileFilterFactory {}
export interface FileFilter {
/** @returns `true` if the file should be linted, false if it should be filtered out. Intended for use in `Array.prototype.filter`. */
filter(file: ts.SourceFile): boolean;
}
export type ContentIdHost = Pick<ts.CompilerHost, 'readFile'>;
export interface ContentId {
forFile(fileName: string, host: ContentIdHost): string;
}
export abstract class ContentId {}
export interface StatePersistence {
loadState(project: string): StaticProgramState | undefined;
saveState(project: string, state: StaticProgramState): void;
}
export abstract class StatePersistence {}
export interface StaticProgramState {
/** Version of the cache format */
readonly v: number;
/** TypeScript version */
readonly ts: string;
/** Whether the state was created using case-sensitive file names */
readonly cs: boolean;
/** Hash of compilerOptions */
readonly options: string;
/** Maps filename to index in 'files' array */
readonly lookup: Readonly<Record<string, number>>;
/** Index of files that affect global scope */
readonly global: readonly number[];
/** Information about all files in the program */
readonly files: readonly StaticProgramState.FileState[];
}
export namespace StaticProgramState {
export interface FileState {
/** ID of file contents (typically a hash) */
readonly id: string;
/**
* Key: module specifier as referenced in the file, order may be random
* Value: - `null` if dependency could not be resolved
* - List of files (or rather their index) that the module specifier resolves to.
* That is the actual file at that path and/or files containing `declare module "..."` for that module specifier.
* May contain the current file.
* This list is ordered by the ID of the files ascending,
*/
readonly dependencies?: Readonly<Record<string, null | readonly number[]>>;
/** The list of findings if this file has up-to-date results */
readonly result?: readonly Finding[];
/** Hash of the configuration used to produce `result` for this file */
readonly config?: string;
}
} | the_stack |
import type {Fiber} from 'react-reconciler';
import type {ReactWrapper} from 'enzyme';
import React, {createRef} from 'react';
import {mount} from 'enzyme';
import {getChildrenIds, getFibersIndices, getFibersKeys} from '../__shared__';
import {addChild, getFiberFromElementInstance, removeChild} from '../../src';
import {invariant, Invariant} from '../../src/invariant';
import {warning} from '../../src/warning';
// Ref.
const parentRef = createRef<HTMLDivElement>();
// Wrapper.
let parentWrapper: ReactWrapper;
// Fiber.
let parent: Fiber;
beforeEach(() => {
// Mount the components.
parentWrapper = mount(
<div ref={parentRef}>
<div key="1" id="1" />
<div key="2" id="2" />
</div>
);
// (type fixing).
invariant(parentRef.current !== null);
parent = getFiberFromElementInstance(parentRef.current);
// Clear the mock.
(warning as jest.Mock).mockClear();
});
describe('How removeChild( ) works', () => {
test('Remove the first child', () => {
const child = removeChild(parent, 0);
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The key is correct.
expect(child.key).toBe('1');
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['2']);
});
test('Remove the second child', () => {
const child = removeChild(parent, 1);
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The key is correct.
expect(child.key).toBe('2');
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['1']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1']);
});
test('Remove the child with the key "1"', () => {
const child = removeChild(parent, '1');
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The key is correct.
expect(child.key).toBe('1');
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['2']);
});
test('Remove the child with the key "2"', () => {
const child = removeChild(parent, '2');
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The key is correct.
expect(child.key).toBe('2');
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['1']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1']);
});
test('(Provide a index bigger than the number of children) Not remove the child', () => {
const child = removeChild(parent, 5);
// The child is not found.
expect(child).toBe(null);
// Warning calls.
expect(warning).toHaveBeenCalledTimes(1);
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0, 1]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['1', '2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1', '2']);
});
test('(Provide a not valid key) Not remove the child', () => {
const child = removeChild(parent, '5');
// The child is not found.
expect(child).toBe(null);
// Warning calls.
expect(warning).toHaveBeenCalledTimes(1);
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0, 1]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['1', '2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1', '2']);
});
test('(With only parent alternate) Remove the first child', () => {
// Generate the parent alternate.
parentWrapper.setProps({});
invariant(parent.alternate !== null);
const ref = createRef<HTMLDivElement>();
// Generate a fiber without alternate.
mount(<div id="3" ref={ref} />);
// (type fixing).
invariant(ref.current !== null);
// Add the fiber.
addChild(parent, getFiberFromElementInstance(ref.current), 0);
const child = removeChild(parent, 0);
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The child has an alternate.
expect(child.alternate).toBe(null);
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent.alternate)).toEqual([0, 1]);
expect(getFibersIndices(parent)).toEqual([0, 1]);
// The keys are in the correct order.
expect(getFibersKeys(parent.alternate)).toEqual(['1', '2']);
expect(getFibersKeys(parent)).toEqual(['1', '2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1', '2']);
});
test('(With only child alternate) Remove the first child', () => {
const ref = createRef<HTMLDivElement>();
// Generate a fiber without alternate.
mount(<div id="3" ref={ref} />).setProps({});
// (type fixing).
invariant(ref.current !== null);
// Add the fiber.
addChild(parent, getFiberFromElementInstance(ref.current), 0);
const child = removeChild(parent, 0);
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The child has an alternate.
expect(child.alternate).not.toBe(null);
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0, 1]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['1', '2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1', '2']);
});
test('(With parent and child alternates) Remove the first child', () => {
// Generate the parent alternate.
parentWrapper.setProps({});
invariant(parent.alternate !== null);
const child = removeChild(parent, 0);
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The child has an alternate.
expect(child.alternate).not.toBe(null);
// The key is correct.
expect(child.key).toBe('1');
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent.alternate)).toEqual([0]);
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent.alternate)).toEqual(['2']);
expect(getFibersKeys(parent)).toEqual(['2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['2']);
});
test('(With parent and child alternates) Remove the child with the key "2"', () => {
parentWrapper.setProps({});
invariant(parent.alternate !== null);
const child = removeChild(parent, '2');
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The child has an alternate.
expect(child.alternate).not.toBe(null);
// The key is correct.
expect(child.key).toBe('2');
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent.alternate)).toEqual([0]);
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent.alternate)).toEqual(['1']);
expect(getFibersKeys(parent)).toEqual(['1']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1']);
});
test('(Enable skipUpdate option) Send a child but not update the DOM', () => {
const child = removeChild(parent, 0, true);
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The key is correct.
expect(child.key).toBe('1');
// Warning calls.
expect(warning).not.toHaveBeenCalled();
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1', '2']);
});
test('(The child element is not found) Send a child but not update the DOM', () => {
// (type fixing).
invariant(parent.child !== null);
parent.child.stateNode = null;
const child = removeChild(parent, 0);
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The key is correct.
expect(child.key).toBe('1');
// Warning calls.
expect(warning).toHaveBeenCalledTimes(1);
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1', '2']);
});
test('(The container element is not found) Send a child but not update the DOM', () => {
parent.stateNode = null;
const child = removeChild(parent, 0);
// The child is found.
expect(child).not.toBe(null);
// (type fixing).
invariant(child !== null);
// The key is correct.
expect(child.key).toBe('1');
// Warning calls.
expect(warning).toHaveBeenCalledTimes(1);
// The indices are updated.
expect(getFibersIndices(parent)).toEqual([0]);
// The keys are in the correct order.
expect(getFibersKeys(parent)).toEqual(['2']);
// The children are correct.
expect(getChildrenIds(parentWrapper.getDOMNode())).toEqual(['1', '2']);
});
test('(Provide an index less than 0) Throw an Invariant', () => {
expect(() => {
removeChild(parent, -1);
}).toThrow(Invariant);
});
}); | the_stack |
import os from 'os';
import url from 'url';
import fs from 'fs-extra';
import { join } from 'path';
import listen from 'async-listen';
import { createServer } from 'http';
const {
exec,
fetch,
fixture,
testFixture,
testFixtureStdio,
validateResponseHeaders,
} = require('./utils.js');
test('[vercel dev] should support edge functions', async () => {
const dir = fixture('edge-function');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
const body = { hello: 'world' };
let res = await fetch(`http://localhost:${port}/api/edge-function`, {
method: 'POST',
headers: {
'content-type': 'application/json',
},
body: JSON.stringify(body),
});
validateResponseHeaders(res);
// support for edge functions has to manually ensure that these properties
// are set up; so, we test that they are all passed through properly
expect(await res.json()).toMatchObject({
headerContentType: 'application/json',
url: `http://localhost:${port}/api/edge-function`,
method: 'POST',
body: '{"hello":"world"}',
decamelized: 'some_camel_case_thing',
uppercase: 'SOMETHING',
optionalChaining: 'fallback',
});
} finally {
await dev.kill('SIGTERM');
}
});
test('[vercel dev] should support request body', async () => {
const dir = fixture('node-request-body');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
const body = { hello: 'world' };
// Test that `req.body` works in dev
let res = await fetch(`http://localhost:${port}/api/req-body`, {
method: 'POST',
headers: {
'content-type': 'application/json',
},
body: JSON.stringify(body),
});
validateResponseHeaders(res);
expect(await res.json()).toMatchObject(body);
// Test that `req` "data" events work in dev
res = await fetch(`http://localhost:${port}/api/data-events`, {
method: 'POST',
headers: {
'content-type': 'application/json',
},
body: JSON.stringify(body),
});
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should maintain query when invoking serverless function', async () => {
const dir = fixture('node-query-invoke');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
const res = await fetch(`http://localhost:${port}/something?url-param=a`);
validateResponseHeaders(res);
const text = await res.text();
const parsed = url.parse(text, true);
expect(parsed.pathname).toEqual('/something');
expect(parsed.query['url-param']).toEqual('a');
expect(parsed.query['route-param']).toEqual('b');
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should maintain query when proxy passing', async () => {
const dir = fixture('query-proxy');
const { dev, port, readyResolver } = await testFixture(dir);
const dest = createServer((req, res) => {
res.end(req.url);
});
try {
await Promise.all([readyResolver, listen(dest, 0)]);
const destAddr = dest.address();
if (!destAddr || typeof destAddr === 'string') {
throw new Error('Unexpected HTTP address');
}
const res = await fetch(
`http://localhost:${port}/${destAddr.port}?url-param=a`
);
validateResponseHeaders(res);
const text = await res.text();
const parsed = url.parse(text, true);
expect(parsed.pathname).toEqual('/something');
expect(parsed.query['url-param']).toEqual('a');
expect(parsed.query['route-param']).toEqual('b');
} finally {
dest.close();
dev.kill('SIGTERM');
}
});
test('[vercel dev] should maintain query when dev server defines routes', async () => {
const dir = fixture('dev-server-query');
const { dev, port, readyResolver } = await testFixture(dir, {
env: {
VERCEL_DEV_COMMAND: 'next dev --port $PORT',
},
});
try {
await readyResolver;
const res = await fetch(`http://localhost:${port}/test?url-param=a`);
validateResponseHeaders(res);
const text = await res.text();
// Hacky way of getting the page payload from the response
// HTML since we don't have a HTML parser handy.
const json = text
.match(/<pre>(.*)<\/pre>/)![1]
.replace('</pre>', '')
.replace('<!-- -->', '')
.replace(/&/g, '&')
.replace(/"/g, '"');
const parsed = JSON.parse(json);
const query = url.parse(parsed.url, true).query;
expect(query['url-param']).toEqual('a');
expect(query['route-param']).toEqual('b');
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should allow `cache-control` to be overwritten', async () => {
const dir = fixture('headers');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
const res = await fetch(
`http://localhost:${port}/?name=cache-control&value=immutable`
);
expect(res.headers.get('cache-control')).toEqual('immutable');
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should send `etag` header for static files', async () => {
const dir = fixture('headers');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
const res = await fetch(`http://localhost:${port}/foo.txt`);
const expected = 'd263af8ab880c0b97eb6c5c125b5d44f9e5addd9';
expect(res.headers.get('etag')).toEqual(`"${expected}"`);
const body = await res.text();
expect(body.trim()).toEqual('hi');
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should frontend dev server and routes', async () => {
const dir = fixture('dev-server-and-routes');
const { dev, port, readyResolver } = await testFixture(dir, {
env: {
VERCEL_DEV_COMMAND: 'next dev --port $PORT',
},
});
try {
await readyResolver;
let podId: string;
let res = await fetch(`http://localhost:${port}/`);
validateResponseHeaders(res);
podId = res.headers.get('x-vercel-id')!.match(/:(\w+)-/)![1];
let body = await res.text();
expect(body.includes('hello, this is the frontend')).toBeTruthy();
res = await fetch(`http://localhost:${port}/api/users`);
validateResponseHeaders(res, podId);
body = await res.text();
expect(body).toEqual('users');
res = await fetch(`http://localhost:${port}/api/users/1`);
validateResponseHeaders(res, podId);
body = await res.text();
expect(body).toEqual('users/1');
res = await fetch(`http://localhost:${port}/api/welcome`);
validateResponseHeaders(res, podId);
body = await res.text();
expect(body).toEqual('hello and welcome');
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should support `@vercel/static` routing', async () => {
const dir = fixture('static-routes');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
const res = await fetch(`http://localhost:${port}/`);
expect(res.status).toEqual(200);
const body = await res.text();
expect(body.trim()).toEqual('<body>Hello!</body>');
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should support `@vercel/static-build` routing', async () => {
const dir = fixture('static-build-routing');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
const res = await fetch(`http://localhost:${port}/api/date`);
expect(res.status).toEqual(200);
const body = await res.text();
expect(body.startsWith('The current date:')).toBeTruthy();
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should support directory listing', async () => {
const dir = fixture('directory-listing');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
// Get directory listing
let res = await fetch(`http://localhost:${port}/`);
let body = await res.text();
expect(res.status).toEqual(200);
expect(body.includes('Index of')).toBeTruthy();
// Get a file
res = await fetch(`http://localhost:${port}/file.txt`);
body = await res.text();
expect(res.status).toEqual(200);
expect(body.trim()).toEqual('Hello from file!');
// Invoke a lambda
res = await fetch(`http://localhost:${port}/lambda.js`);
body = await res.text();
expect(res.status).toEqual(200);
expect(body).toEqual('Hello from Lambda!');
// Trigger a 404
res = await fetch(`http://localhost:${port}/does-not-exist`);
expect(res.status).toEqual(404);
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should respond with 404 listing with Accept header support', async () => {
const dir = fixture('directory-listing');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
// HTML response
let res = await fetch(`http://localhost:${port}/does-not-exist`, {
headers: {
Accept: 'text/html',
},
});
expect(res.status).toEqual(404);
expect(res.headers.get('content-type')).toEqual('text/html; charset=utf-8');
let body = await res.text();
expect(body.startsWith('<!DOCTYPE html>')).toBeTruthy();
// JSON response
res = await fetch(`http://localhost:${port}/does-not-exist`, {
headers: {
Accept: 'application/json',
},
});
expect(res.status).toEqual(404);
expect(res.headers.get('content-type')).toEqual('application/json');
body = await res.text();
expect(body).toEqual(
'{"error":{"code":404,"message":"The page could not be found."}}\n'
);
// Plain text response
res = await fetch(`http://localhost:${port}/does-not-exist`);
expect(res.status).toEqual(404);
body = await res.text();
expect(res.headers.get('content-type')).toEqual(
'text/plain; charset=utf-8'
);
expect(body).toEqual('The page could not be found.\n\nNOT_FOUND\n');
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should support `public` directory with zero config', async () => {
const dir = fixture('api-with-public');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
let res = await fetch(`http://localhost:${port}/api/user`);
let body = await res.text();
expect(body).toEqual('hello:user');
res = await fetch(`http://localhost:${port}/`);
body = await res.text();
expect(body.startsWith('<h1>hello world</h1>')).toBeTruthy();
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should support static files with zero config', async () => {
const dir = fixture('api-with-static');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
let res = await fetch(`http://localhost:${port}/api/user`);
let body = await res.text();
expect(body).toEqual('bye:user');
res = await fetch(`http://localhost:${port}/`);
body = await res.text();
expect(body.startsWith('<h1>goodbye world</h1>')).toBeTruthy();
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] should support custom 404 routes', async () => {
const dir = fixture('custom-404');
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
// Test custom 404 with static dest
let res = await fetch(`http://localhost:${port}/error.html`);
expect(res.status).toEqual(404);
let body = await res.text();
expect(body.trim()).toEqual('<div>Custom 404 page</div>');
// Test custom 404 with lambda dest
res = await fetch(`http://localhost:${port}/error.js`);
expect(res.status).toEqual(404);
body = await res.text();
expect(body).toEqual('Custom 404 Lambda\n');
// Test regular 404 still works
res = await fetch(`http://localhost:${port}/does-not-exist`);
expect(res.status).toEqual(404);
body = await res.text();
expect(body).toEqual('The page could not be found.\n\nNOT_FOUND\n');
} finally {
dev.kill('SIGTERM');
}
});
test('[vercel dev] prints `npm install` errors', async () => {
const dir = fixture('runtime-not-installed');
const result = await exec(dir);
expect(result.stderr.includes('npm ERR! 404')).toBeTruthy();
expect(
result.stderr.includes('Failed to install `vercel dev` dependencies')
).toBeTruthy();
expect(
result.stderr.includes('https://vercel.link/npm-install-failed-dev')
).toBeTruthy();
});
test('[vercel dev] `vercel.json` should be invalidated if deleted', async () => {
const dir = fixture('invalidate-vercel-config');
const configPath = join(dir, 'vercel.json');
const originalConfig = await fs.readJSON(configPath);
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
{
// Env var should be set from `vercel.json`
const res = await fetch(`http://localhost:${port}/api`);
const body = await res.json();
expect(body.FOO).toBe('bar');
}
{
// Env var should not be set after `vercel.json` is deleted
await fs.remove(configPath);
const res = await fetch(`http://localhost:${port}/api`);
const body = await res.json();
expect(body.FOO).toBe(undefined);
}
} finally {
dev.kill('SIGTERM');
await fs.writeJSON(configPath, originalConfig);
}
});
test('[vercel dev] reflects changes to config and env without restart', async () => {
const dir = fixture('node-helpers');
const configPath = join(dir, 'vercel.json');
const originalConfig = await fs.readJSON(configPath);
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
{
// Node.js helpers should be available by default
const res = await fetch(`http://localhost:${port}/?foo=bar`);
const body = await res.json();
expect(body.hasHelpers).toBe(true);
expect(body.query.foo).toBe('bar');
}
{
// Disable the helpers via `config.helpers = false`
const config = {
...originalConfig,
builds: [
{
...originalConfig.builds[0],
config: {
helpers: false,
},
},
],
};
await fs.writeJSON(configPath, config);
const res = await fetch(`http://localhost:${port}/?foo=bar`);
const body = await res.json();
expect(body.hasHelpers).toBe(false);
expect(body.query).toBe(undefined);
}
{
// Enable the helpers via `config.helpers = true`
const config = {
...originalConfig,
builds: [
{
...originalConfig.builds[0],
config: {
helpers: true,
},
},
],
};
await fs.writeJSON(configPath, config);
const res = await fetch(`http://localhost:${port}/?foo=baz`);
const body = await res.json();
expect(body.hasHelpers).toBe(true);
expect(body.query.foo).toBe('baz');
}
{
// Disable the helpers via `NODEJS_HELPERS = '0'`
const config = {
...originalConfig,
build: {
env: {
NODEJS_HELPERS: '0',
},
},
};
await fs.writeJSON(configPath, config);
const res = await fetch(`http://localhost:${port}/?foo=baz`);
const body = await res.json();
expect(body.hasHelpers).toBe(false);
expect(body.query).toBe(undefined);
}
{
// Enable the helpers via `NODEJS_HELPERS = '1'`
const config = {
...originalConfig,
build: {
env: {
NODEJS_HELPERS: '1',
},
},
};
await fs.writeJSON(configPath, config);
const res = await fetch(`http://localhost:${port}/?foo=boo`);
const body = await res.json();
expect(body.hasHelpers).toBe(true);
expect(body.query.foo).toBe('boo');
}
} finally {
dev.kill('SIGTERM');
await fs.writeJSON(configPath, originalConfig);
}
});
test('[vercel dev] `@vercel/node` TypeScript should be resolved by default', async () => {
// The purpose of this test is to test that `@vercel/node` can properly
// resolve the default "typescript" module when the project doesn't include
// its own version. To properly test for this, a fixture needs to be created
// *outside* of the `vercel` repo, since otherwise the root-level
// "node_modules/typescript" is resolved as relative to the project, and
// not relative to `@vercel/node` which is what we are testing for here.
const dir = join(os.tmpdir(), 'vercel-node-typescript-resolve-test');
const apiDir = join(dir, 'api');
await fs.mkdirp(apiDir);
await fs.writeFile(
join(apiDir, 'hello.js'),
'export default (req, res) => { res.end("world"); }'
);
const { dev, port, readyResolver } = await testFixture(dir);
try {
await readyResolver;
const res = await fetch(`http://localhost:${port}/api/hello`);
const body = await res.text();
expect(body).toBe('world');
} finally {
dev.kill('SIGTERM');
await fs.remove(dir);
}
});
test(
'[vercel dev] validate routes that use `check: true`',
testFixtureStdio('routes-check-true', async (testPath: any) => {
await testPath(200, '/blog/post', 'Blog Home');
})
);
test(
'[vercel dev] validate routes that use `check: true` and `status` code',
testFixtureStdio('routes-check-true-status', async (testPath: any) => {
await testPath(403, '/secret');
await testPath(200, '/post', 'This is a post.');
await testPath(200, '/post.html', 'This is a post.');
})
);
test(
'[vercel dev] validate routes that use custom 404 page',
testFixtureStdio('routes-custom-404', async (testPath: any) => {
await testPath(200, '/', 'Home Page');
await testPath(404, '/nothing', 'Custom User 404');
await testPath(404, '/exact', 'Exact Custom 404');
await testPath(200, '/api/hello', 'Hello');
await testPath(404, '/api/nothing', 'Custom User 404');
})
);
test(
'[vercel dev] handles miss after route',
testFixtureStdio('handle-miss-after-route', async (testPath: any) => {
await testPath(200, '/post', 'Blog Post Page', {
test: '1',
override: 'one',
});
})
);
test(
'[vercel dev] handles miss after rewrite',
testFixtureStdio('handle-miss-after-rewrite', async (testPath: any) => {
await testPath(200, '/post', 'Blog Post Page', {
test: '1',
override: 'one',
});
await testPath(200, '/blog/post', 'Blog Post Page', {
test: '1',
override: 'two',
});
await testPath(404, '/blog/about.html', undefined, {
test: '1',
override: 'two',
});
})
);
test(
'[vercel dev] does not display directory listing after 404',
testFixtureStdio('handle-miss-hide-dir-list', async (testPath: any) => {
await testPath(404, '/post');
await testPath(200, '/post/one.html', 'First Post');
})
);
test(
'[vercel dev] should preserve query string even after miss phase',
testFixtureStdio('handle-miss-querystring', async (testPath: any) => {
await testPath(200, '/', 'Index Page');
if (process.env.CI && process.platform === 'darwin') {
console.log('Skipping since GH Actions hangs for some reason');
} else {
await testPath(200, '/echo/first/second', 'a=first,b=second');
await testPath(200, '/functions/echo.js?a=one&b=two', 'a=one,b=two');
}
})
);
test(
'[vercel dev] handles hit after handle: filesystem',
testFixtureStdio('handle-hit-after-fs', async (testPath: any) => {
await testPath(200, '/blog.html', 'Blog Page', { test: '1' });
})
);
test(
'[vercel dev] handles hit after dest',
testFixtureStdio('handle-hit-after-dest', async (testPath: any) => {
await testPath(200, '/post', 'Blog Post', { test: '1', override: 'one' });
})
);
test(
'[vercel dev] handles hit after rewrite',
testFixtureStdio('handle-hit-after-rewrite', async (testPath: any) => {
await testPath(200, '/post', 'Blog Post', { test: '1', override: 'one' });
})
);
test(
'[vercel dev] should serve the public directory and api functions',
testFixtureStdio('public-and-api', async (testPath: any) => {
await testPath(200, '/', 'This is the home page');
await testPath(200, '/about.html', 'This is the about page');
await testPath(200, '/.well-known/humans.txt', 'We come in peace');
await testPath(200, '/api/date', /current date/);
await testPath(200, '/api/rand', /random number/);
await testPath(200, '/api/rand.js', /random number/);
await testPath(404, '/api/api', /NOT_FOUND/m);
await testPath(404, '/nothing', /Custom 404 Page/);
})
);
test(
'[vercel dev] should allow user rewrites for path segment files',
testFixtureStdio('test-zero-config-rewrite', async (testPath: any) => {
await testPath(404, '/');
await testPath(200, '/echo/1', '{"id":"1"}', {
'Access-Control-Allow-Origin': '*',
});
await testPath(200, '/echo/2', '{"id":"2"}', {
'Access-Control-Allow-Headers': '*',
});
})
);
test('[vercel dev] validate builds', async () => {
const directory = fixture('invalid-builds');
const output = await exec(directory);
expect(output.exitCode).toBe(1);
expect(output.stderr).toMatch(
/Invalid vercel\.json - `builds\[0\].src` should be string/m
);
});
test('[vercel dev] validate routes', async () => {
const directory = fixture('invalid-routes');
const output = await exec(directory);
expect(output.exitCode).toBe(1);
expect(output.stderr).toMatch(
/Invalid vercel\.json - `routes\[0\].src` should be string/m
);
});
test('[vercel dev] validate cleanUrls', async () => {
const directory = fixture('invalid-clean-urls');
const output = await exec(directory);
expect(output.exitCode).toBe(1);
expect(output.stderr).toMatch(
/Invalid vercel\.json - `cleanUrls` should be boolean/m
);
});
test('[vercel dev] validate trailingSlash', async () => {
const directory = fixture('invalid-trailing-slash');
const output = await exec(directory);
expect(output.exitCode).toBe(1);
expect(output.stderr).toMatch(
/Invalid vercel\.json - `trailingSlash` should be boolean/m
);
});
test('[vercel dev] validate rewrites', async () => {
const directory = fixture('invalid-rewrites');
const output = await exec(directory);
expect(output.exitCode).toBe(1);
expect(output.stderr).toMatch(
/Invalid vercel\.json - `rewrites\[0\].destination` should be string/m
);
}); | the_stack |
import { Project } from "./core/project";
import * as activeProject from "./activeProject";
let getProject = activeProject.GetProject.ifCurrentOrErrorOut;
import { Types } from "../../../socket/socketContract";
import * as types from "../../../common/types";
import * as utils from "../../../common/utils";
let { resolve } = utils;
import * as fsu from "../../utils/fsu";
import { errorsCache } from "./cache/tsErrorsCache";
import { getPathCompletionsForAutocomplete } from "./modules/getPathCompletions";
export function getCompletionsAtPosition(query: Types.GetCompletionsAtPositionQuery): Promise<Types.GetCompletionsAtPositionResponse> {
const { filePath, position, prefix } = query;
const project = getProject(query.filePath);
const service = project.languageService;
const languageServiceHost = project.languageServiceHost;
const completions: ts.CompletionInfo = service.getCompletionsAtPosition(filePath, position, undefined);
let completionList = completions ? completions.entries.filter(x => !!x) : [];
const endsInPunctuation = utils.prefixEndsInPunctuation(prefix);
/** Doing too many suggestions is slowing us down in some cases */
let maxSuggestions = 10000;
// limit to maxSuggestions
if (completionList.length > maxSuggestions) completionList = completionList.slice(0, maxSuggestions);
let completionsToReturn: Types.Completion[] = completionList.map((c, index) => {
return {
name: c.name,
kind: c.kind,
comment: '',
display: ''
};
});
/**
* Add function signature help
*/
if (query.prefix == '(') {
const signatures = service.getSignatureHelpItems(query.filePath, query.position);
if (signatures && signatures.items) {
signatures.items.forEach((item, index) => {
const template: string = item.parameters.map((p, i) => {
const display = '{{' + (i + 1) + ':' + ts.displayPartsToString(p.displayParts) + '}}';
return display;
}).join(ts.displayPartsToString(item.separatorDisplayParts));
const name: string = item.parameters.map((p) => ts.displayPartsToString(p.displayParts))
.join(ts.displayPartsToString(item.separatorDisplayParts));
// e.g. test(something:string):any;
// prefix: test(
// template: {{something}}
// suffix: ): any;
const description: string =
ts.displayPartsToString(item.prefixDisplayParts)
+ template
+ ts.displayPartsToString(item.suffixDisplayParts);
completionsToReturn.unshift({
kind: types.completionKindSnippet,
/** We use `(sig)` prefix to make sure its sorted by monaco to be at the top */
name: '(sig) ' + name,
insertText: template,
display: 'function signature',
comment: `Overload ${index + 1} of ${signatures.items.length}`
});
});
}
}
/**
* Add file path completions
*/
const pathCompletions = getPathCompletionsForAutocomplete({
position,
project,
filePath,
prefix
});
if (pathCompletions.length) {
completionsToReturn = pathCompletions.map(f => {
const result: types.Completion = {
kind: types.completionKindPath,
name: f.relativePath,
display: f.fileName,
comment: f.fullPath,
textEdit: {
from: languageServiceHost.getLineAndCharacterOfPosition(filePath, f.pathStringRange.from),
to: languageServiceHost.getLineAndCharacterOfPosition(filePath, f.pathStringRange.to),
newText: f.relativePath
}
};
return result;
}).concat(completionsToReturn);
}
return resolve({
completions: completionsToReturn,
endsInPunctuation: endsInPunctuation
});
}
export function getCompletionEntryDetails(query: Types.GetCompletionEntryDetailsQuery): Promise<Types.GetCompletionEntryDetailsResponse> {
const project = getProject(query.filePath);
const service = project.languageService;
const { filePath, position, label } = query;
const completionDetails = project.languageService.getCompletionEntryDetails(filePath, position, label, undefined, undefined);
/**
* For JS Projects, TS will add all sorts of globals as members (because it cannot know for sure)
* However if you try to `getCompletionEntryDetails` for them, you will get `undefined`.
*/
if (!completionDetails) return resolve({ display: label, comment: '' });
const comment = ts.displayPartsToString(completionDetails.documentation || []);
const display = ts.displayPartsToString(completionDetails.displayParts || []);
const result = { display: display, comment: comment };
return resolve(result);
}
export function quickInfo(query: Types.QuickInfoQuery): Promise<Types.QuickInfoResponse> {
let project = getProject(query.filePath);
const { languageServiceHost } = project;
const errors = positionErrors({ filePath: query.filePath, position: query.position });
var info = project.languageService.getQuickInfoAtPosition(query.filePath, query.position);
if (!info && !errors.length) {
return Promise.resolve({ valid: false });
} else {
return resolve({
valid: true,
info: info && {
name: ts.displayPartsToString(info.displayParts || []),
comment: ts.displayPartsToString(info.documentation || []),
range: {
from: project.languageServiceHost.getLineAndCharacterOfPosition(query.filePath, info.textSpan.start),
to: project.languageServiceHost.getLineAndCharacterOfPosition(query.filePath, info.textSpan.start + info.textSpan.length),
}
},
errors: errors
});
}
}
/** Utility */
function positionErrors(query: Types.FilePathPositionQuery): types.CodeError[] {
let project = getProject(query.filePath);
if (!project.includesSourceFile(query.filePath)) {
return [];
}
let editorPos = project.languageServiceHost.getLineAndCharacterOfPosition(query.filePath, query.position);
let errors = errorsCache.getErrorsForFilePath(query.filePath);
errors = errors.filter(e =>
// completely contained in the multiline
(e.from.line < editorPos.line && e.to.line > editorPos.line)
// error is single line and on the same line and characters match
|| (e.from.line == e.to.line && e.from.line == editorPos.line && e.from.ch <= editorPos.ch && e.to.ch >= editorPos.ch)
);
return errors;
}
export function getRenameInfo(query: Types.GetRenameInfoQuery): Promise<Types.GetRenameInfoResponse> {
let project = getProject(query.filePath);
var findInStrings = false, findInComments = false;
var info = project.languageService.getRenameInfo(query.filePath, query.position);
if (info && info.canRename) {
var locations: { [filePath: string]: ts.TextSpan[] } = {};
project.languageService.findRenameLocations(query.filePath, query.position, findInStrings, findInComments)
.forEach(loc => {
if (!locations[loc.fileName]) locations[loc.fileName] = [];
// Using unshift makes them with maximum value on top ;)
locations[loc.fileName].unshift(loc.textSpan);
});
return resolve({
canRename: true,
localizedErrorMessage: info.localizedErrorMessage,
displayName: info.displayName,
fullDisplayName: info.fullDisplayName,
kind: info.kind,
kindModifiers: info.kindModifiers,
triggerSpan: info.triggerSpan,
locations: locations
});
}
else {
return resolve({
canRename: false
});
}
}
export function getDefinitionsAtPosition(query: Types.GetDefinitionsAtPositionQuery): Promise<Types.GetDefinitionsAtPositionResponse> {
let project = getProject(query.filePath);
var definitions = project.languageService.getDefinitionAtPosition(query.filePath, query.position);
var projectFileDirectory = project.configFile.projectFileDirectory;
if (!definitions || !definitions.length) return resolve({ projectFileDirectory: projectFileDirectory, definitions: [] });
return resolve({
projectFileDirectory: projectFileDirectory,
definitions: definitions.map(d => {
// If we can get the filename *we are in the same program :P*
var pos = project.languageServiceHost.getLineAndCharacterOfPosition(d.fileName, d.textSpan.start);
return {
filePath: d.fileName,
position: pos,
span: d.textSpan,
};
})
});
}
import { getLangHelp } from "./modules/langHelp";
export function getDoctorInfo(query: Types.GetDoctorInfoQuery): Promise<Types.GetDoctorInfoResponse> {
let project = getProject(query.filePath);
let filePath = query.filePath;
let position = project.languageServiceHost.getPositionOfLineAndCharacter(query.filePath, query.editorPosition.line, query.editorPosition.ch);
// Get langHelp
const program = project.languageService.getProgram();
const sourceFile = program.getSourceFile(query.filePath);
const positionNode = ts.getTokenAtPosition(sourceFile, position, true);
const langHelp = getLangHelp(positionNode)
// Just collect other responses
let defPromised = getDefinitionsAtPosition({ filePath, position });
let quickInfoPromised = quickInfo({ filePath, position: position });
return defPromised.then((defRes) => {
return quickInfoPromised.then((infoRes) => {
return getReferences({ filePath, position }).then(refRes => {
const valid = !!defRes.definitions.length || infoRes.valid || !!refRes.references.length || !!langHelp;
return {
valid,
definitions: defRes.definitions,
quickInfo: infoRes.valid && infoRes.info.name ? {
name: infoRes.info.name,
comment: infoRes.info.comment
} : null,
langHelp,
references: refRes.references
}
});
});
});
}
export function getReferences(query: Types.GetReferencesQuery): Promise<Types.GetReferencesResponse> {
let project = getProject(query.filePath);
var languageService = project.languageService;
var references: ReferenceDetails[] = [];
var refs = languageService.getReferencesAtPosition(query.filePath, query.position) || [];
references = refs.map(r => {
const position = project.languageServiceHost.getLineAndCharacterOfPosition(r.fileName, r.textSpan.start);
return { filePath: r.fileName, position: position, span: r.textSpan }
});
return resolve({
references
})
}
/**
* Formatting
*/
import * as formatting from "./modules/formatting";
export function formatDocument(query: Types.FormatDocumentQuery): Promise<Types.FormatDocumentResponse> {
let project = getProject(query.filePath);
const { languageServiceHost, languageService } = project;
let tsresult = formatting.formatDocument(project, query.filePath, query.editorOptions);
const edits = tsresult.map(res => {
const result: Types.FormattingEdit = {
from: languageServiceHost.getLineAndCharacterOfPosition(query.filePath, res.span.start),
to: languageServiceHost.getLineAndCharacterOfPosition(query.filePath, res.span.start + res.span.length),
newText: res.newText
};
return result;
});
return resolve({ edits });
}
export function formatDocumentRange(query: Types.FormatDocumentRangeQuery): Promise<Types.FormatDocumentRangeResponse> {
let project = getProject(query.filePath);
const { languageServiceHost, languageService } = project;
let tsresult = formatting.formatDocumentRange(project, query.filePath, query.from, query.to, query.editorOptions);
const edits = tsresult.map(res => {
const result: Types.FormattingEdit = {
from: languageServiceHost.getLineAndCharacterOfPosition(query.filePath, res.span.start),
to: languageServiceHost.getLineAndCharacterOfPosition(query.filePath, res.span.start + res.span.length),
newText: res.newText
};
return result;
});
return resolve({ edits });
}
export function getFormattingEditsAfterKeystroke(query: Types.FormattingEditsAfterKeystrokeQuery): Promise<Types.FormattingEditsAfterKeystrokeResponse> {
let project = getProject(query.filePath);
const { languageServiceHost, languageService } = project;
const position = languageServiceHost.getPositionOfLineAndCharacter(query.filePath, query.editorPosition.line, query.editorPosition.ch);
const options = formatting.completeFormatCodeOptions(query.editorOptions, project.configFile.project.formatCodeOptions);
const tsresult = languageService.getFormattingEditsAfterKeystroke(query.filePath, position, query.key, options) || [];
const edits = tsresult.map(res => {
const result: Types.FormattingEdit = {
from: languageServiceHost.getLineAndCharacterOfPosition(query.filePath, res.span.start),
to: languageServiceHost.getLineAndCharacterOfPosition(query.filePath, res.span.start + res.span.length),
newText: res.newText
};
return result;
});
return resolve({ edits });
}
import { removeUnusedImports as removeUnusedImportsCore } from './modules/removeUnusedImports';
export function removeUnusedImports(query: Types.FilePathQuery): Promise<types.RefactoringsByFilePath> {
let project = getProject(query.filePath);
const { languageServiceHost, languageService } = project;
return resolve(removeUnusedImportsCore(query.filePath, languageService));
}
/**
* Symbol search
*/
//--------------------------------------------------------------------------
// getNavigateToItems
//--------------------------------------------------------------------------
// Look at
// https://github.com/Microsoft/TypeScript/blob/master/src/services/navigateTo.ts
// for inspiration
// Reason for forking:
// didn't give all results
// gave results from lib.d.ts
// I wanted the practice
function getSymbolsForFile(project: Project, sourceFile: ts.SourceFile): types.NavigateToItem[] {
let getNodeKind = ts.getNodeKind;
function getDeclarationName(declaration: ts.Declaration): string {
let result = ts.getNameOfDeclaration(declaration);
if (result === undefined) {
return '';
}
return result.getText();
}
function getTextOfIdentifierOrLiteral(node: ts.Node) {
if (node.kind === ts.SyntaxKind.Identifier ||
node.kind === ts.SyntaxKind.StringLiteral ||
node.kind === ts.SyntaxKind.NumericLiteral) {
return (<ts.Identifier | ts.LiteralExpression>node).text;
}
return undefined;
}
var items: types.NavigateToItem[] = [];
let declarations = sourceFile.getNamedDeclarations();
declarations.forEach((value: ts.Declaration[], key: string) => {
value.forEach(declaration => {
let item: types.NavigateToItem = {
name: getDeclarationName(declaration),
kind: getNodeKind(declaration),
filePath: sourceFile.fileName,
fileName: utils.getFileName(sourceFile.fileName),
position: project.languageServiceHost.getLineAndCharacterOfPosition(sourceFile.fileName, declaration.getStart())
}
items.push(item);
})
});
return items;
}
export function getNavigateToItems(query: {}): Promise<types.GetNavigateToItemsResponse> {
let project = activeProject.GetProject.getCurrentIfAny();
var languageService = project.languageService;
let items: types.NavigateToItem[] = [];
for (let file of project.getProjectSourceFiles()) {
getSymbolsForFile(project, file).forEach(i => items.push(i));
}
return utils.resolve({ items });
}
export function getNavigateToItemsForFilePath(query: { filePath: string }): Promise<types.GetNavigateToItemsResponse> {
let project = activeProject.GetProject.getCurrentIfAny();
var languageService = project.languageService;
const file = project.getSourceFile(query.filePath)
const items = getSymbolsForFile(project, file);
return utils.resolve({ items });
}
/**
* Dependency View
*/
import { getProgramDependencies } from "./modules/programDependencies";
export function getDependencies(query: {}): Promise<Types.GetDependenciesResponse> {
let project = activeProject.GetProject.getCurrentIfAny();
var links = getProgramDependencies(project.configFile, project.languageService.getProgram());
return resolve({ links });
}
/**
* AST View
*/
import { astToText, astToTextFull } from "./modules/astToText";
export function getAST(query: Types.GetASTQuery): Promise<Types.GetASTResponse> {
let project = getProject(query.filePath);
var service = project.languageService;
var files = service.getProgram().getSourceFiles().filter(x => x.fileName == query.filePath);
if (!files.length) resolve({});
var sourceFile = files[0];
let root = query.mode === Types.ASTMode.visitor
? astToText(sourceFile)
: astToTextFull(sourceFile);
return resolve({ root });
}
/**
* JS Ouput
*/
import { getRawJsOutput } from "./modules/building";
export function getJSOutputStatus(query: Types.FilePathQuery, autoEmit = true): types.GetJSOutputStatusResponse {
const project = activeProject.GetProject.ifCurrent(query.filePath);
if (!project) {
return {
inActiveProject: false
}
}
const jsFile = getRawJsOutput(project, query.filePath);
/**
* We just read/write from disk for now
* Would be better if it interacted with master
*/
const getContents = (filePath: string) => fsu.existsSync(filePath) ? fsu.readFile(filePath) : '';
const setContents = fsu.writeFile;
/**
* Note: If we have compileOnSave as false then the output status isn't relevant
*/
const noJsFile = (project.configFile.project.compileOnSave === false)
|| (project.configFile.inMemory === true)
|| !jsFile;
let state
= noJsFile
? types.JSOutputState.NoJSFile
: getContents(jsFile.filePath) === jsFile.contents
? types.JSOutputState.JSUpToDate
: types.JSOutputState.JSOutOfDate;
/**
* If the state is JSOutOfDate we can easily fix that to bring it up to date for `compileOnSave`
*/
if (autoEmit && state === types.JSOutputState.JSOutOfDate && project.configFile.project.compileOnSave !== false) {
setContents(jsFile.filePath, jsFile.contents);
state = types.JSOutputState.JSUpToDate;
}
const outputStatus: types.JSOutputStatus = {
inputFilePath: query.filePath,
state,
outputFilePath: jsFile && jsFile.filePath
};
return {
inActiveProject: true,
outputStatus
};
}
/**
* Get Quick Fix
*/
import { QuickFix, QuickFixQueryInformation } from "./quickFix/quickFix";
import * as qf from "./quickFix/quickFix";
import { allQuickFixes } from "./quickFix/quickFixRegistry";
function getDiagnositcsByFilePath(query: Types.FilePathQuery) {
let project = getProject(query.filePath);
var diagnostics = project.languageService.getSyntacticDiagnostics(query.filePath);
if (diagnostics.length === 0) {
diagnostics = project.languageService.getSemanticDiagnostics(query.filePath);
}
return diagnostics;
}
function getInfoForQuickFixAnalysis(query: Types.GetQuickFixesQuery): QuickFixQueryInformation {
let project = getProject(query.filePath);
let program = project.languageService.getProgram();
let sourceFile = program.getSourceFile(query.filePath);
let sourceFileText: string,
fileErrors: ts.Diagnostic[],
positionErrors: ts.Diagnostic[],
positionErrorMessages: string[],
positionNode: ts.Node;
if (project.includesSourceFile(query.filePath)) {
sourceFileText = sourceFile.getFullText();
fileErrors = getDiagnositcsByFilePath(query);
/** We want errors that are *touching* and thefore expand the query position by one */
positionErrors = fileErrors.filter(e => ((e.start - 1) < query.position) && (e.start + e.length + 1) > query.position);
positionErrorMessages = positionErrors.map(e => ts.flattenDiagnosticMessageText(e.messageText, '\n'));
positionNode = ts.getTokenAtPosition(sourceFile, query.position, true);
} else {
sourceFileText = "";
fileErrors = [];
positionErrors = [];
positionErrorMessages = [];
positionNode = undefined;
}
let service = project.languageService;
let typeChecker = program.getTypeChecker();
return {
project,
program,
sourceFile,
sourceFileText,
fileErrors,
positionErrors,
positionErrorMessages,
position: query.position,
positionNode,
service,
typeChecker,
filePath: query.filePath,
formatOptions: {
indentSize: query.indentSize
}
};
}
const tsCodefixPrefix = 'CodeFix:';
export function getQuickFixes(query: Types.GetQuickFixesQuery): Promise<Types.GetQuickFixesResponse> {
const project = getProject(query.filePath);
const info = getInfoForQuickFixAnalysis(query);
// We let the quickFix determine if it wants provide any fixes for this file
const fixes = allQuickFixes
.map(x => {
var canProvide = x.canProvideFix(info);
if (!canProvide)
return;
else
return { key: x.key, display: canProvide.display };
})
.filter(x => !!x);
/**
* TS Code fixes
* They comes with the `changes` on query. So we use that on `get` as well as `apply`
*/
const tsCodeFixes = project.languageService.getCodeFixesAtPosition(query.filePath, query.position, query.position, info.positionErrors.map(e => e.code), info.formatOptions);
if (tsCodeFixes.length) {
tsCodeFixes.forEach((fix, i) => {
fixes.unshift({
key: `${tsCodefixPrefix}${i}`, display: fix.description
})
})
}
return resolve({ fixes });
}
export function applyQuickFix(query: Types.ApplyQuickFixQuery): Promise<Types.ApplyQuickFixResponse> {
const info = getInfoForQuickFixAnalysis(query);
/**
* If TS Code fix
*/
if (query.key.startsWith(tsCodefixPrefix)) {
/** Find the code fix */
let project = getProject(query.filePath);
const tsCodeFixes = project.languageService.getCodeFixesAtPosition(query.filePath, query.position, query.position, info.positionErrors.map(e => e.code), info.formatOptions);
const index = +query.key.substr(tsCodefixPrefix.length);
const tsCodeFix = tsCodeFixes[index];
/** Map code fix to refactoring */
const refactorings: types.Refactoring[] = [];
tsCodeFix.changes.forEach(change => {
change.textChanges.forEach(tc => {
const res: types.Refactoring = {
filePath: change.fileName,
newText: tc.newText,
span: tc.span
};
refactorings.push(res);
});
});
return resolve({ refactorings: qf.getRefactoringsByFilePath(refactorings) });
}
const fix = allQuickFixes.filter(x => x.key == query.key)[0];
const res = fix.provideFix(info);
const refactorings = qf.getRefactoringsByFilePath(res);
return resolve({ refactorings });
}
/**
* Semantic Tree
*/
function sortNavbarItemsBySpan(items: ts.NavigationBarItem[]) {
items.sort((a, b) => a.spans[0].start - b.spans[0].start);
// sort children recursively
for (let item of items) {
if (item.childItems) {
sortNavbarItemsBySpan(item.childItems);
}
}
}
function flattenNavBarItems(items: ts.NavigationBarItem[]): ts.NavigationBarItem[] {
if (!items.length) return [];
const root = items[0];
/**
* Where we store the final good ones
*/
const results: ts.NavigationBarItem[] = [];
/** Just to remove the dupes for different keys */
const resultMapBig: { [key: string]: ts.NavigationBarItem } = Object.create(null);
/**
* The same items apprear with differnt indent, and different `spans`
* But at least one span seems to match, hence this key(s) function
*/
const getKeys = (item: ts.NavigationBarItem): string[] => {
return item.spans.map(span => {
return `${span.start}-${item.text}`
});
};
/** This is used to unflatten the resulting map */
const getParentMapKey = (item: ts.NavigationBarItem): string => {
return `${item.spans[0].start}-${item.text}`
};
const parentMap: { [key: string]: ts.NavigationBarItem } = {};
/**
* First create a map of everything
*/
const addToMap = (item: ts.NavigationBarItem, parent: ts.NavigationBarItem) => {
const keys = getKeys(item);
const previous = keys.some(key => !!resultMapBig[key]);
// If we already have it no need to add it.
// This is because the first time it gets added the parent then is the best one
if (!previous) {
keys.forEach(key => resultMapBig[key] = item);
results.push(item);
if (item !== root) {
const parentMapKey = getParentMapKey(item)
parentMap[parentMapKey] = parent;
}
}
// Whatever is in the final map is the version we want to use for parent pointers
const itemToUseAsParent = resultMapBig[getParentMapKey(item)];
// Also visit all children
item.childItems && item.childItems.forEach((child) => addToMap(child, itemToUseAsParent));
// Now delete the childItems as they are supposed to be restored by `parentMap`
delete item.childItems;
}
// Flatten into the map
items.forEach(item => addToMap(item, root));
// Now restore based on child pointers
results.forEach(item => {
if (item == root) return;
const key = getParentMapKey(item);
const parent = parentMap[key];
if (!parent.childItems) parent.childItems = [];
parent.childItems.push(item);
});
// Now we only need the children of the root :)
return results[0].childItems || [];
}
function navigationBarItemToSemanticTreeNode(item: ts.NavigationBarItem, project: Project, query: Types.FilePathQuery): Types.SemanticTreeNode {
let toReturn: Types.SemanticTreeNode = {
text: item.text,
kind: item.kind,
kindModifiers: item.kindModifiers,
start: project.languageServiceHost.getLineAndCharacterOfPosition(query.filePath, item.spans[0].start),
end: project.languageServiceHost.getLineAndCharacterOfPosition(query.filePath, item.spans[0].start + item.spans[0].length),
subNodes: item.childItems ? item.childItems.map(ci => navigationBarItemToSemanticTreeNode(ci, project, query)) : []
}
return toReturn;
}
export function getSemanticTree(query: Types.GetSemanticTreeQuery): Promise<Types.GetSemanticTreeReponse> {
let project = getProject(query.filePath);
let navBarItems = project.languageService.getNavigationBarItems(query.filePath);
// The nav bar from the language service has nodes at various levels (with duplication)
// We want a flat version
navBarItems = flattenNavBarItems(navBarItems);
// Sort items by first spans:
sortNavbarItemsBySpan(navBarItems);
// convert to SemanticTreeNodes
let nodes = navBarItems.map(nbi => navigationBarItemToSemanticTreeNode(nbi, project, query));
return resolve({ nodes });
}
/**
* Document highlights
*/
export function getOccurrencesAtPosition(query: Types.GetOccurancesAtPositionQuery): Promise<Types.GetOccurancesAtPositionResponse> {
let project = getProject(query.filePath);
const { languageServiceHost } = project;
const position = languageServiceHost.getPositionOfLineAndCharacter(query.filePath, query.editorPosition.line, query.editorPosition.ch);
const tsresults = project.languageService.getOccurrencesAtPosition(query.filePath, position) || [];
const results: Types.GetOccurancesAtPositionResult[] = tsresults.map(res => {
const result: Types.GetOccurancesAtPositionResult = {
filePath: res.fileName,
isWriteAccess: res.isWriteAccess,
start: project.languageServiceHost.getLineAndCharacterOfPosition(res.fileName, res.textSpan.start),
end: project.languageServiceHost.getLineAndCharacterOfPosition(res.fileName, res.textSpan.start + res.textSpan.length),
}
return result;
});
return resolve({ results });
} | the_stack |
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export abstract class AuthToken {
public static class: java.lang.Class<com.twitter.sdk.android.core.AuthToken>;
public createdAt: number;
public isExpired(): boolean;
public constructor();
public constructor(param0: number);
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class AuthTokenAdapter extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.AuthTokenAdapter>;
public deserialize(param0: com.google.gson.JsonElement, param1: java.lang.reflect.Type, param2: com.google.gson.JsonDeserializationContext): com.twitter.sdk.android.core.AuthToken;
public constructor();
public serialize(param0: com.twitter.sdk.android.core.AuthToken, param1: java.lang.reflect.Type, param2: com.google.gson.JsonSerializationContext): com.google.gson.JsonElement;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class BuildConfig {
public static class: java.lang.Class<com.twitter.sdk.android.core.BuildConfig>;
public static DEBUG: boolean;
public static APPLICATION_ID: string;
public static BUILD_TYPE: string;
public static FLAVOR: string;
public static VERSION_CODE: number;
public static VERSION_NAME: string;
public static ARTIFACT_ID: string;
public static BUILD_NUMBER: string;
public static BUILD_TIME: number;
public static GROUP: string;
public constructor();
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export abstract class Callback<T> extends retrofit2.Callback<any> {
public static class: java.lang.Class<com.twitter.sdk.android.core.Callback<any>>;
public constructor();
public onFailure(param0: retrofit2.Call<any>, param1: java.lang.Throwable): void;
public onResponse(param0: retrofit2.Call<any>, param1: retrofit2.Response<any>): void;
public success(param0: com.twitter.sdk.android.core.Result<any>): void;
public failure(param0: com.twitter.sdk.android.core.TwitterException): void;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class DefaultLogger extends com.twitter.sdk.android.core.Logger {
public static class: java.lang.Class<com.twitter.sdk.android.core.DefaultLogger>;
public e(param0: string, param1: string, param2: java.lang.Throwable): void;
public constructor();
public w(param0: string, param1: string): void;
public v(param0: string, param1: string): void;
public log(param0: number, param1: string, param2: string): void;
public getLogLevel(): number;
public w(param0: string, param1: string, param2: java.lang.Throwable): void;
public log(param0: number, param1: string, param2: string, param3: boolean): void;
public e(param0: string, param1: string): void;
public isLoggable(param0: string, param1: number): boolean;
public d(param0: string, param1: string): void;
public i(param0: string, param1: string, param2: java.lang.Throwable): void;
public d(param0: string, param1: string, param2: java.lang.Throwable): void;
public constructor(param0: number);
public v(param0: string, param1: string, param2: java.lang.Throwable): void;
public setLogLevel(param0: number): void;
public i(param0: string, param1: string): void;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class GuestSession extends com.twitter.sdk.android.core.Session<com.twitter.sdk.android.core.internal.oauth.GuestAuthToken> {
public static class: java.lang.Class<com.twitter.sdk.android.core.GuestSession>;
public static LOGGED_OUT_USER_ID: number;
public constructor(param0: com.twitter.sdk.android.core.internal.oauth.GuestAuthToken);
public constructor(param0: any, param1: number);
}
export module GuestSession {
export class Serializer extends com.twitter.sdk.android.core.internal.persistence.SerializationStrategy<com.twitter.sdk.android.core.GuestSession> {
public static class: java.lang.Class<com.twitter.sdk.android.core.GuestSession.Serializer>;
public constructor();
public deserialize(param0: string): com.twitter.sdk.android.core.GuestSession;
public deserialize(param0: string): any;
public serialize(param0: any): string;
public serialize(param0: com.twitter.sdk.android.core.GuestSession): string;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class GuestSessionProvider {
public static class: java.lang.Class<com.twitter.sdk.android.core.GuestSessionProvider>;
public constructor(param0: com.twitter.sdk.android.core.internal.oauth.OAuth2Service, param1: com.twitter.sdk.android.core.SessionManager<com.twitter.sdk.android.core.GuestSession>);
public getCurrentSession(): com.twitter.sdk.android.core.GuestSession;
public refreshCurrentSession(param0: com.twitter.sdk.android.core.GuestSession): com.twitter.sdk.android.core.GuestSession;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class IntentUtils {
public static class: java.lang.Class<com.twitter.sdk.android.core.IntentUtils>;
public constructor();
public static safeStartActivity(param0: globalAndroid.content.Context, param1: globalAndroid.content.Intent): boolean;
public static isActivityAvailable(param0: globalAndroid.content.Context, param1: globalAndroid.content.Intent): boolean;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class Logger {
public static class: java.lang.Class<com.twitter.sdk.android.core.Logger>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.Logger interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
isLoggable(param0: string, param1: number): boolean;
getLogLevel(): number;
setLogLevel(param0: number): void;
d(param0: string, param1: string, param2: java.lang.Throwable): void;
v(param0: string, param1: string, param2: java.lang.Throwable): void;
i(param0: string, param1: string, param2: java.lang.Throwable): void;
w(param0: string, param1: string, param2: java.lang.Throwable): void;
e(param0: string, param1: string, param2: java.lang.Throwable): void;
d(param0: string, param1: string): void;
v(param0: string, param1: string): void;
i(param0: string, param1: string): void;
w(param0: string, param1: string): void;
e(param0: string, param1: string): void;
log(param0: number, param1: string, param2: string): void;
log(param0: number, param1: string, param2: string, param3: boolean): void;
});
public constructor();
public e(param0: string, param1: string, param2: java.lang.Throwable): void;
public w(param0: string, param1: string): void;
public v(param0: string, param1: string): void;
public log(param0: number, param1: string, param2: string): void;
public getLogLevel(): number;
public w(param0: string, param1: string, param2: java.lang.Throwable): void;
public log(param0: number, param1: string, param2: string, param3: boolean): void;
public e(param0: string, param1: string): void;
public isLoggable(param0: string, param1: number): boolean;
public d(param0: string, param1: string): void;
public i(param0: string, param1: string, param2: java.lang.Throwable): void;
public d(param0: string, param1: string, param2: java.lang.Throwable): void;
public v(param0: string, param1: string, param2: java.lang.Throwable): void;
public setLogLevel(param0: number): void;
public i(param0: string, param1: string): void;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class OAuthSigning {
public static class: java.lang.Class<com.twitter.sdk.android.core.OAuthSigning>;
public getAuthorizationHeader(param0: string, param1: string, param2: java.util.Map<string,string>): string;
public getOAuthEchoHeaders(param0: string, param1: string, param2: java.util.Map<string,string>): java.util.Map<string,string>;
public getOAuthEchoHeadersForVerifyCredentials(): java.util.Map<string,string>;
public constructor(param0: com.twitter.sdk.android.core.TwitterAuthConfig, param1: com.twitter.sdk.android.core.TwitterAuthToken);
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class PersistedSessionManager<T> extends com.twitter.sdk.android.core.SessionManager<any> {
public static class: java.lang.Class<com.twitter.sdk.android.core.PersistedSessionManager<any>>;
public setSession(param0: number, param1: any): void;
public clearActiveSession(): void;
public clearSession(param0: number): void;
public constructor(param0: com.twitter.sdk.android.core.internal.persistence.PreferenceStore, param1: com.twitter.sdk.android.core.internal.persistence.SerializationStrategy<any>, param2: string, param3: string);
public getSessionMap(): java.util.Map<java.lang.Long,any>;
public setActiveSession(param0: any): void;
public getActiveSession(): any;
public getSession(param0: number): any;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class Result<T> extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.Result<any>>;
public data: T;
public response: retrofit2.Response;
public constructor(param0: T, param1: retrofit2.Response);
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class Session<T> extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.Session<any>>;
public getAuthToken(): T;
public equals(param0: any): boolean;
public hashCode(): number;
public constructor(param0: T, param1: number);
public getId(): number;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class SessionManager<T> extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.SessionManager<any>>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.SessionManager<any> interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
getActiveSession(): T;
setActiveSession(param0: T): void;
clearActiveSession(): void;
getSession(param0: number): T;
setSession(param0: number, param1: T): void;
clearSession(param0: number): void;
getSessionMap(): java.util.Map<java.lang.Long,T>;
});
public constructor();
public getSessionMap(): java.util.Map<java.lang.Long,T>;
public getActiveSession(): T;
public clearActiveSession(): void;
public clearSession(param0: number): void;
public getSession(param0: number): T;
public setSession(param0: number, param1: T): void;
public setActiveSession(param0: T): void;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class Twitter {
public static class: java.lang.Class<com.twitter.sdk.android.core.Twitter>;
public static TAG: string;
public getContext(param0: string): globalAndroid.content.Context;
public getTwitterAuthConfig(): com.twitter.sdk.android.core.TwitterAuthConfig;
public static isDebug(): boolean;
public static getInstance(): com.twitter.sdk.android.core.Twitter;
public getActivityLifecycleManager(): com.twitter.sdk.android.core.internal.ActivityLifecycleManager;
public static initialize(param0: globalAndroid.content.Context): void;
public getExecutorService(): java.util.concurrent.ExecutorService;
public static getLogger(): com.twitter.sdk.android.core.Logger;
public static initialize(param0: com.twitter.sdk.android.core.TwitterConfig): void;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterApiClient {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterApiClient>;
public getMediaService(): com.twitter.sdk.android.core.services.MediaService;
public constructor();
public getSearchService(): com.twitter.sdk.android.core.services.SearchService;
public getCollectionService(): com.twitter.sdk.android.core.services.CollectionService;
public getFavoriteService(): com.twitter.sdk.android.core.services.FavoriteService;
public getService(param0: java.lang.Class): any;
public getListService(): com.twitter.sdk.android.core.services.ListService;
public constructor(param0: okhttp3.OkHttpClient);
public getStatusesService(): com.twitter.sdk.android.core.services.StatusesService;
public getAccountService(): com.twitter.sdk.android.core.services.AccountService;
public constructor(param0: com.twitter.sdk.android.core.TwitterSession);
public constructor(param0: com.twitter.sdk.android.core.TwitterSession, param1: okhttp3.OkHttpClient);
public getConfigurationService(): com.twitter.sdk.android.core.services.ConfigurationService;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterApiErrorConstants {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterApiErrorConstants>;
public static RATE_LIMIT_EXCEEDED: number;
public static COULD_NOT_AUTHENTICATE: number;
public static CLIENT_NOT_PRIVILEGED: number;
public static PAGE_NOT_EXIST: number;
public static UNKNOWN_ERROR: number;
public constructor();
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterApiException extends com.twitter.sdk.android.core.TwitterException {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterApiException>;
public static DEFAULT_ERROR_CODE: number;
public constructor(param0: retrofit2.Response);
public getStatusCode(): number;
public getErrorMessage(): string;
public getTwitterRateLimit(): com.twitter.sdk.android.core.TwitterRateLimit;
public static readApiError(param0: retrofit2.Response): com.twitter.sdk.android.core.models.ApiError;
public getErrorCode(): number;
public static readApiRateLimit(param0: retrofit2.Response): com.twitter.sdk.android.core.TwitterRateLimit;
public getResponse(): retrofit2.Response;
public constructor(param0: string, param1: java.lang.Throwable);
public constructor(param0: string);
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterAuthConfig {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterAuthConfig>;
public static DEFAULT_AUTH_REQUEST_CODE: number;
public static CREATOR: globalAndroid.os.Parcelable.Creator<com.twitter.sdk.android.core.TwitterAuthConfig>;
public describeContents(): number;
public writeToParcel(param0: globalAndroid.os.Parcel, param1: number): void;
public getConsumerKey(): string;
public constructor(param0: string, param1: string);
public getConsumerSecret(): string;
public getRequestCode(): number;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterAuthException extends com.twitter.sdk.android.core.TwitterException {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterAuthException>;
public constructor(param0: string, param1: java.lang.Throwable);
public constructor(param0: string);
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterAuthToken extends com.twitter.sdk.android.core.AuthToken {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterAuthToken>;
public static CREATOR: globalAndroid.os.Parcelable.Creator<com.twitter.sdk.android.core.TwitterAuthToken>;
public token: string;
public secret: string;
public isExpired(): boolean;
public constructor();
public describeContents(): number;
public constructor(param0: number);
public writeToParcel(param0: globalAndroid.os.Parcel, param1: number): void;
public equals(param0: any): boolean;
public hashCode(): number;
public constructor(param0: string, param1: string);
public toString(): string;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterConfig {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterConfig>;
}
export module TwitterConfig {
export class Builder {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterConfig.Builder>;
public debug(param0: boolean): com.twitter.sdk.android.core.TwitterConfig.Builder;
public executorService(param0: java.util.concurrent.ExecutorService): com.twitter.sdk.android.core.TwitterConfig.Builder;
public twitterAuthConfig(param0: com.twitter.sdk.android.core.TwitterAuthConfig): com.twitter.sdk.android.core.TwitterConfig.Builder;
public build(): com.twitter.sdk.android.core.TwitterConfig;
public logger(param0: com.twitter.sdk.android.core.Logger): com.twitter.sdk.android.core.TwitterConfig.Builder;
public constructor(param0: globalAndroid.content.Context);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterContext {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterContext>;
public getDatabasePath(param0: string): java.io.File;
public getFilesDir(): java.io.File;
public getSharedPreferences(param0: string, param1: number): globalAndroid.content.SharedPreferences;
public openOrCreateDatabase(param0: string, param1: number, param2: globalAndroid.database.sqlite.SQLiteDatabase.CursorFactory): globalAndroid.database.sqlite.SQLiteDatabase;
public getCacheDir(): java.io.File;
public getExternalFilesDir(param0: string): java.io.File;
public openOrCreateDatabase(param0: string, param1: number, param2: globalAndroid.database.sqlite.SQLiteDatabase.CursorFactory, param3: globalAndroid.database.DatabaseErrorHandler): globalAndroid.database.sqlite.SQLiteDatabase;
public getExternalCacheDir(): java.io.File;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterCore {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterCore>;
public static TAG: string;
public static getInstance(): com.twitter.sdk.android.core.TwitterCore;
public getGuestSessionProvider(): com.twitter.sdk.android.core.GuestSessionProvider;
public getApiClient(param0: com.twitter.sdk.android.core.TwitterSession): com.twitter.sdk.android.core.TwitterApiClient;
public addGuestApiClient(param0: com.twitter.sdk.android.core.TwitterApiClient): void;
public getVersion(): string;
public getGuestApiClient(): com.twitter.sdk.android.core.TwitterApiClient;
public getAuthConfig(): com.twitter.sdk.android.core.TwitterAuthConfig;
public getApiClient(): com.twitter.sdk.android.core.TwitterApiClient;
public getIdentifier(): string;
public addApiClient(param0: com.twitter.sdk.android.core.TwitterSession, param1: com.twitter.sdk.android.core.TwitterApiClient): void;
public getSessionManager(): com.twitter.sdk.android.core.SessionManager<com.twitter.sdk.android.core.TwitterSession>;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterException {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterException>;
public constructor(param0: string, param1: java.lang.Throwable);
public constructor(param0: string);
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterRateLimit {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterRateLimit>;
public getRemaining(): number;
public getLimit(): number;
public getReset(): number;
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export class TwitterSession extends com.twitter.sdk.android.core.Session<com.twitter.sdk.android.core.TwitterAuthToken> {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterSession>;
public static UNKNOWN_USER_ID: number;
public static UNKNOWN_USER_NAME: string;
public equals(param0: any): boolean;
public hashCode(): number;
public getUserName(): string;
public getUserId(): number;
public constructor(param0: com.twitter.sdk.android.core.TwitterAuthToken, param1: number, param2: string);
public constructor(param0: any, param1: number);
}
export module TwitterSession {
export class Serializer extends com.twitter.sdk.android.core.internal.persistence.SerializationStrategy<com.twitter.sdk.android.core.TwitterSession> {
public static class: java.lang.Class<com.twitter.sdk.android.core.TwitterSession.Serializer>;
public serialize(param0: com.twitter.sdk.android.core.TwitterSession): string;
public deserialize(param0: string): any;
public serialize(param0: any): string;
public deserialize(param0: string): com.twitter.sdk.android.core.TwitterSession;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export abstract class AuthHandler {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.AuthHandler>;
public requestCode: number;
public handleOnActivityResult(param0: number, param1: number, param2: globalAndroid.content.Intent): boolean;
public authorize(param0: globalAndroid.app.Activity): boolean;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class AuthState {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.AuthState>;
public isAuthorizeInProgress(): boolean;
public beginAuthorize(param0: globalAndroid.app.Activity, param1: com.twitter.sdk.android.core.identity.AuthHandler): boolean;
public endAuthorize(): void;
public getAuthHandler(): com.twitter.sdk.android.core.identity.AuthHandler;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class OAuthActivity implements com.twitter.sdk.android.core.identity.OAuthController.Listener {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.OAuthActivity>;
public constructor();
public onComplete(param0: number, param1: globalAndroid.content.Intent): void;
public onCreate(param0: globalAndroid.os.Bundle): void;
public onBackPressed(): void;
public onSaveInstanceState(param0: globalAndroid.os.Bundle): void;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class OAuthController extends com.twitter.sdk.android.core.identity.OAuthWebViewClient.Listener {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.OAuthController>;
public onError(param0: com.twitter.sdk.android.core.identity.WebViewException): void;
public handleAuthError(param0: number, param1: com.twitter.sdk.android.core.TwitterAuthException): void;
public onPageFinished(param0: globalAndroid.webkit.WebView, param1: string): void;
public onSuccess(param0: globalAndroid.os.Bundle): void;
}
export module OAuthController {
export class Listener {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.OAuthController.Listener>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.identity.OAuthController$Listener interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
onComplete(param0: number, param1: globalAndroid.content.Intent): void;
});
public constructor();
public onComplete(param0: number, param1: globalAndroid.content.Intent): void;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class OAuthHandler extends com.twitter.sdk.android.core.identity.AuthHandler {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.OAuthHandler>;
public authorize(param0: globalAndroid.app.Activity): boolean;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class OAuthWebChromeClient {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.OAuthWebChromeClient>;
public onConsoleMessage(param0: globalAndroid.webkit.ConsoleMessage): boolean;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class OAuthWebViewClient {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.OAuthWebViewClient>;
public shouldOverrideUrlLoading(param0: globalAndroid.webkit.WebView, param1: string): boolean;
public onPageFinished(param0: globalAndroid.webkit.WebView, param1: string): void;
public onReceivedError(param0: globalAndroid.webkit.WebView, param1: number, param2: string, param3: string): void;
public onReceivedSslError(param0: globalAndroid.webkit.WebView, param1: globalAndroid.webkit.SslErrorHandler, param2: globalAndroid.net.http.SslError): void;
}
export module OAuthWebViewClient {
export class Listener {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.OAuthWebViewClient.Listener>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.identity.OAuthWebViewClient$Listener interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
onPageFinished(param0: globalAndroid.webkit.WebView, param1: string): void;
onSuccess(param0: globalAndroid.os.Bundle): void;
onError(param0: com.twitter.sdk.android.core.identity.WebViewException): void;
});
public constructor();
public onError(param0: com.twitter.sdk.android.core.identity.WebViewException): void;
public onPageFinished(param0: globalAndroid.webkit.WebView, param1: string): void;
public onSuccess(param0: globalAndroid.os.Bundle): void;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class SSOAuthHandler extends com.twitter.sdk.android.core.identity.AuthHandler {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.SSOAuthHandler>;
public static availableSSOPackage(param0: globalAndroid.content.pm.PackageManager): string;
public static isAvailable(param0: globalAndroid.content.Context): boolean;
public authorize(param0: globalAndroid.app.Activity): boolean;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class TwitterAuthClient {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.TwitterAuthClient>;
public constructor();
public cancelAuthorize(): void;
public requestEmail(param0: com.twitter.sdk.android.core.TwitterSession, param1: com.twitter.sdk.android.core.Callback<string>): void;
public onActivityResult(param0: number, param1: number, param2: globalAndroid.content.Intent): void;
public getRequestCode(): number;
public authorize(param0: globalAndroid.app.Activity, param1: com.twitter.sdk.android.core.Callback<com.twitter.sdk.android.core.TwitterSession>): void;
}
export module TwitterAuthClient {
export class AuthStateLazyHolder {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.TwitterAuthClient.AuthStateLazyHolder>;
}
export class CallbackWrapper extends com.twitter.sdk.android.core.Callback<com.twitter.sdk.android.core.TwitterSession> {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.TwitterAuthClient.CallbackWrapper>;
public success(param0: com.twitter.sdk.android.core.Result<any>): void;
public success(param0: com.twitter.sdk.android.core.Result<com.twitter.sdk.android.core.TwitterSession>): void;
public failure(param0: com.twitter.sdk.android.core.TwitterException): void;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class TwitterLoginButton {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.TwitterLoginButton>;
public setCallback(param0: com.twitter.sdk.android.core.Callback<com.twitter.sdk.android.core.TwitterSession>): void;
public onActivityResult(param0: number, param1: number, param2: globalAndroid.content.Intent): void;
public getCallback(): com.twitter.sdk.android.core.Callback<com.twitter.sdk.android.core.TwitterSession>;
public constructor(param0: globalAndroid.content.Context, param1: globalAndroid.util.AttributeSet);
public setOnClickListener(param0: globalAndroid.view.View.OnClickListener): void;
public constructor(param0: globalAndroid.content.Context, param1: globalAndroid.util.AttributeSet, param2: number);
public getActivity(): globalAndroid.app.Activity;
public constructor(param0: globalAndroid.content.Context);
}
export module TwitterLoginButton {
export class LoginClickListener {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.TwitterLoginButton.LoginClickListener>;
public onClick(param0: globalAndroid.view.View): void;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module identity {
export class WebViewException {
public static class: java.lang.Class<com.twitter.sdk.android.core.identity.WebViewException>;
public getFailingUrl(): string;
public getErrorCode(): number;
public getDescription(): string;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class ActivityLifecycleManager {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.ActivityLifecycleManager>;
public resetCallbacks(): void;
public constructor(param0: globalAndroid.content.Context);
public registerCallbacks(param0: com.twitter.sdk.android.core.internal.ActivityLifecycleManager.Callbacks): boolean;
}
export module ActivityLifecycleManager {
export class ActivityLifecycleCallbacksWrapper {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.ActivityLifecycleManager.ActivityLifecycleCallbacksWrapper>;
}
export abstract class Callbacks {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.ActivityLifecycleManager.Callbacks>;
public onActivityResumed(param0: globalAndroid.app.Activity): void;
public onActivityCreated(param0: globalAndroid.app.Activity, param1: globalAndroid.os.Bundle): void;
public onActivitySaveInstanceState(param0: globalAndroid.app.Activity, param1: globalAndroid.os.Bundle): void;
public onActivityPaused(param0: globalAndroid.app.Activity): void;
public onActivityDestroyed(param0: globalAndroid.app.Activity): void;
public onActivityStopped(param0: globalAndroid.app.Activity): void;
public constructor();
public onActivityStarted(param0: globalAndroid.app.Activity): void;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class CommonUtils {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.CommonUtils>;
public constructor();
public static logControlled(param0: globalAndroid.content.Context, param1: string): void;
public static logControlledError(param0: globalAndroid.content.Context, param1: string, param2: java.lang.Throwable): void;
public static logControlled(param0: globalAndroid.content.Context, param1: number, param2: string, param3: string): void;
public static copyStream(param0: java.io.InputStream, param1: java.io.OutputStream, param2: androidNative.Array<number>): void;
public static closeOrLog(param0: java.io.Closeable, param1: string): void;
public static streamToString(param0: java.io.InputStream): string;
public static getBooleanResourceValue(param0: globalAndroid.content.Context, param1: string, param2: boolean): boolean;
public static getStringResourceValue(param0: globalAndroid.content.Context, param1: string, param2: string): string;
public static closeQuietly(param0: java.io.Closeable): void;
public static logOrThrowIllegalStateException(param0: string, param1: string): void;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class CurrentTimeProvider {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.CurrentTimeProvider>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.internal.CurrentTimeProvider interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
getCurrentTimeMillis(): number;
});
public constructor();
public getCurrentTimeMillis(): number;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class ExecutorUtils {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.ExecutorUtils>;
public static buildThreadPoolExecutorService(param0: string): java.util.concurrent.ExecutorService;
public static buildSingleThreadScheduledExecutorService(param0: string): java.util.concurrent.ScheduledExecutorService;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class SessionMonitor<T> extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.SessionMonitor<any>>;
public monitorState: com.twitter.sdk.android.core.internal.SessionMonitor.MonitorState;
public triggerVerificationIfNecessary(): void;
public constructor(param0: com.twitter.sdk.android.core.SessionManager<T>, param1: java.util.concurrent.ExecutorService, param2: com.twitter.sdk.android.core.internal.SessionVerifier<T>);
public verifyAll(): void;
public monitorActivityLifecycle(param0: com.twitter.sdk.android.core.internal.ActivityLifecycleManager): void;
}
export module SessionMonitor {
export class MonitorState {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.SessionMonitor.MonitorState>;
public verifying: boolean;
public lastVerification: number;
public beginVerification(param0: number): boolean;
public endVerification(param0: number): void;
public constructor();
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class SessionVerifier<T> extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.SessionVerifier<any>>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.internal.SessionVerifier<any> interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
verifySession(param0: T): void;
});
public constructor();
public verifySession(param0: T): void;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class SystemCurrentTimeProvider extends com.twitter.sdk.android.core.internal.CurrentTimeProvider {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.SystemCurrentTimeProvider>;
public constructor();
public getCurrentTimeMillis(): number;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class TwitterApi {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.TwitterApi>;
public static BASE_HOST: string;
public static BASE_HOST_URL: string;
public constructor();
public static buildUserAgent(param0: string, param1: string): string;
public constructor(param0: string);
public getBaseHostUrl(): string;
public buildUponBaseHostUrl(param0: androidNative.Array<string>): globalAndroid.net.Uri.Builder;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class TwitterApiConstants {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.TwitterApiConstants>;
public constructor();
}
export module TwitterApiConstants {
export class Errors {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.TwitterApiConstants.Errors>;
public static APP_AUTH_ERROR_CODE: number;
public static ALREADY_FAVORITED: number;
public static ALREADY_UNFAVORITED: number;
public static GUEST_AUTH_ERROR_CODE: number;
public constructor();
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class TwitterSessionVerifier extends com.twitter.sdk.android.core.internal.SessionVerifier<com.twitter.sdk.android.core.TwitterSession> {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.TwitterSessionVerifier>;
public constructor();
public verifySession(param0: any): void;
public verifySession(param0: com.twitter.sdk.android.core.TwitterSession): void;
}
export module TwitterSessionVerifier {
export class AccountServiceProvider {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.TwitterSessionVerifier.AccountServiceProvider>;
public getAccountService(param0: com.twitter.sdk.android.core.TwitterSession): com.twitter.sdk.android.core.services.AccountService;
public constructor();
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class UserUtils {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.UserUtils>;
public static formatScreenName(param0: string): string;
public static getProfileImageUrlHttps(param0: com.twitter.sdk.android.core.models.User, param1: com.twitter.sdk.android.core.internal.UserUtils.AvatarSize): string;
}
export module UserUtils {
export class AvatarSize {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.UserUtils.AvatarSize>;
public static NORMAL: com.twitter.sdk.android.core.internal.UserUtils.AvatarSize;
public static BIGGER: com.twitter.sdk.android.core.internal.UserUtils.AvatarSize;
public static MINI: com.twitter.sdk.android.core.internal.UserUtils.AvatarSize;
public static ORIGINAL: com.twitter.sdk.android.core.internal.UserUtils.AvatarSize;
public static REASONABLY_SMALL: com.twitter.sdk.android.core.internal.UserUtils.AvatarSize;
public static valueOf(param0: string): com.twitter.sdk.android.core.internal.UserUtils.AvatarSize;
public static values(): androidNative.Array<com.twitter.sdk.android.core.internal.UserUtils.AvatarSize>;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export class VineCardUtils {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.VineCardUtils>;
public static PLAYER_CARD: string;
public static VINE_CARD: string;
public static VINE_USER_ID: number;
public static getImageValue(param0: com.twitter.sdk.android.core.models.Card): com.twitter.sdk.android.core.models.ImageValue;
public static getStreamUrl(param0: com.twitter.sdk.android.core.models.Card): string;
public static isVine(param0: com.twitter.sdk.android.core.models.Card): boolean;
public static getPublisherId(param0: com.twitter.sdk.android.core.models.Card): string;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module network {
export class GuestAuthInterceptor {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.network.GuestAuthInterceptor>;
public intercept(param0: okhttp3.Interceptor.Chain): okhttp3.Response;
public constructor(param0: com.twitter.sdk.android.core.GuestSessionProvider);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module network {
export class GuestAuthNetworkInterceptor {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.network.GuestAuthNetworkInterceptor>;
public intercept(param0: okhttp3.Interceptor.Chain): okhttp3.Response;
public constructor();
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module network {
export class GuestAuthenticator {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.network.GuestAuthenticator>;
public authenticate(param0: okhttp3.Route, param1: okhttp3.Response): okhttp3.Request;
public constructor(param0: com.twitter.sdk.android.core.GuestSessionProvider);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module network {
export class OAuth1aInterceptor {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.network.OAuth1aInterceptor>;
public intercept(param0: okhttp3.Interceptor.Chain): okhttp3.Response;
public constructor(param0: com.twitter.sdk.android.core.Session<any>, param1: com.twitter.sdk.android.core.TwitterAuthConfig);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module network {
export class OkHttpClientHelper {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.network.OkHttpClientHelper>;
public static getCustomOkHttpClient(param0: okhttp3.OkHttpClient, param1: com.twitter.sdk.android.core.Session<any>, param2: com.twitter.sdk.android.core.TwitterAuthConfig): okhttp3.OkHttpClient;
public static getCertificatePinner(): okhttp3.CertificatePinner;
public constructor();
public static getOkHttpClient(param0: com.twitter.sdk.android.core.Session<any>, param1: com.twitter.sdk.android.core.TwitterAuthConfig): okhttp3.OkHttpClient;
public static getCustomOkHttpClient(param0: okhttp3.OkHttpClient, param1: com.twitter.sdk.android.core.GuestSessionProvider): okhttp3.OkHttpClient;
public static getOkHttpClient(param0: com.twitter.sdk.android.core.GuestSessionProvider): okhttp3.OkHttpClient;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module network {
export class UrlUtils {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.network.UrlUtils>;
public static UTF8: string;
public static getQueryParams(param0: string, param1: boolean): java.util.TreeMap<string,string>;
public static urlDecode(param0: string): string;
public static getQueryParams(param0: java.net.URI, param1: boolean): java.util.TreeMap<string,string>;
public static percentEncode(param0: string): string;
public static urlEncode(param0: string): string;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class GuestAuthToken extends com.twitter.sdk.android.core.internal.oauth.OAuth2Token {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.GuestAuthToken>;
public static HEADER_GUEST_TOKEN: string;
public constructor(param0: string, param1: string, param2: string, param3: number);
public constructor(param0: number);
public constructor(param0: string, param1: string);
public constructor(param0: string, param1: string, param2: number);
public equals(param0: any): boolean;
public isExpired(): boolean;
public constructor();
public constructor(param0: string, param1: string, param2: string);
public hashCode(): number;
public getGuestToken(): string;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class GuestTokenResponse {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.GuestTokenResponse>;
public guestToken: string;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class OAuth1aHeaders {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuth1aHeaders>;
public static HEADER_AUTH_SERVICE_PROVIDER: string;
public static HEADER_AUTH_CREDENTIALS: string;
public getOAuthEchoHeaders(param0: com.twitter.sdk.android.core.TwitterAuthConfig, param1: com.twitter.sdk.android.core.TwitterAuthToken, param2: string, param3: string, param4: string, param5: java.util.Map<string,string>): java.util.Map<string,string>;
public getAuthorizationHeader(param0: com.twitter.sdk.android.core.TwitterAuthConfig, param1: com.twitter.sdk.android.core.TwitterAuthToken, param2: string, param3: string, param4: string, param5: java.util.Map<string,string>): string;
public constructor();
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class OAuth1aParameters {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuth1aParameters>;
public getAuthorizationHeader(): string;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class OAuth1aService extends com.twitter.sdk.android.core.internal.oauth.OAuthService {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuth1aService>;
public constructor(param0: com.twitter.sdk.android.core.TwitterCore, param1: com.twitter.sdk.android.core.internal.TwitterApi);
public requestTempToken(param0: com.twitter.sdk.android.core.Callback<com.twitter.sdk.android.core.internal.oauth.OAuthResponse>): void;
public getAuthorizeUrl(param0: com.twitter.sdk.android.core.TwitterAuthToken): string;
public buildCallbackUrl(param0: com.twitter.sdk.android.core.TwitterAuthConfig): string;
public static parseAuthResponse(param0: string): com.twitter.sdk.android.core.internal.oauth.OAuthResponse;
public requestAccessToken(param0: com.twitter.sdk.android.core.Callback<com.twitter.sdk.android.core.internal.oauth.OAuthResponse>, param1: com.twitter.sdk.android.core.TwitterAuthToken, param2: string): void;
}
export module OAuth1aService {
export class OAuthApi {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuth1aService.OAuthApi>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.internal.oauth.OAuth1aService$OAuthApi interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
getTempToken(param0: string): retrofit2.Call<okhttp3.ResponseBody>;
getAccessToken(param0: string, param1: string): retrofit2.Call<okhttp3.ResponseBody>;
});
public constructor();
public getTempToken(param0: string): retrofit2.Call<okhttp3.ResponseBody>;
public getAccessToken(param0: string, param1: string): retrofit2.Call<okhttp3.ResponseBody>;
}
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class OAuth2Service extends com.twitter.sdk.android.core.internal.oauth.OAuthService {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuth2Service>;
public constructor(param0: com.twitter.sdk.android.core.TwitterCore, param1: com.twitter.sdk.android.core.internal.TwitterApi);
public requestGuestAuthToken(param0: com.twitter.sdk.android.core.Callback<com.twitter.sdk.android.core.internal.oauth.GuestAuthToken>): void;
}
export module OAuth2Service {
export class OAuth2Api {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuth2Service.OAuth2Api>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.internal.oauth.OAuth2Service$OAuth2Api interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
getGuestToken(param0: string): retrofit2.Call<com.twitter.sdk.android.core.internal.oauth.GuestTokenResponse>;
getAppAuthToken(param0: string, param1: string): retrofit2.Call<com.twitter.sdk.android.core.internal.oauth.OAuth2Token>;
});
public constructor();
public getGuestToken(param0: string): retrofit2.Call<com.twitter.sdk.android.core.internal.oauth.GuestTokenResponse>;
public getAppAuthToken(param0: string, param1: string): retrofit2.Call<com.twitter.sdk.android.core.internal.oauth.OAuth2Token>;
}
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class OAuth2Token extends com.twitter.sdk.android.core.AuthToken {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuth2Token>;
public static TOKEN_TYPE_BEARER: string;
public static CREATOR: globalAndroid.os.Parcelable.Creator<com.twitter.sdk.android.core.internal.oauth.OAuth2Token>;
public constructor(param0: number);
public describeContents(): number;
public getAccessToken(): string;
public constructor(param0: string, param1: string);
public constructor(param0: string, param1: string, param2: number);
public getTokenType(): string;
public equals(param0: any): boolean;
public isExpired(): boolean;
public constructor();
public writeToParcel(param0: globalAndroid.os.Parcel, param1: number): void;
public hashCode(): number;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class OAuthConstants {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuthConstants>;
public static HEADER_AUTHORIZATION: string;
public static HEADER_GUEST_TOKEN: string;
public static PARAM_CALLBACK: string;
public static PARAM_CONSUMER_KEY: string;
public static PARAM_NONCE: string;
public static PARAM_SIGNATURE_METHOD: string;
public static PARAM_TIMESTAMP: string;
public static PARAM_TOKEN: string;
public static PARAM_TOKEN_SECRET: string;
public static PARAM_VERSION: string;
public static PARAM_SIGNATURE: string;
public static PARAM_VERIFIER: string;
public static AUTHORIZATION_BASIC: string;
public static AUTHORIZATION_BEARER: string;
public static PARAM_GRANT_TYPE: string;
public static GRANT_TYPE_CLIENT_CREDENTIALS: string;
public constructor();
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export class OAuthResponse {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuthResponse>;
public static CREATOR: globalAndroid.os.Parcelable.Creator<com.twitter.sdk.android.core.internal.oauth.OAuthResponse>;
public authToken: com.twitter.sdk.android.core.TwitterAuthToken;
public userName: string;
public userId: number;
public toString(): string;
public describeContents(): number;
public constructor(param0: com.twitter.sdk.android.core.TwitterAuthToken, param1: string, param2: number);
public writeToParcel(param0: globalAndroid.os.Parcel, param1: number): void;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module oauth {
export abstract class OAuthService {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.oauth.OAuthService>;
public getUserAgent(): string;
public getApi(): com.twitter.sdk.android.core.internal.TwitterApi;
public getRetrofit(): retrofit2.Retrofit;
public getTwitterCore(): com.twitter.sdk.android.core.TwitterCore;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module persistence {
export class FileStore {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.persistence.FileStore>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.internal.persistence.FileStore interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
getCacheDir(): java.io.File;
getExternalCacheDir(): java.io.File;
getFilesDir(): java.io.File;
getExternalFilesDir(): java.io.File;
});
public constructor();
public getExternalFilesDir(): java.io.File;
public getExternalCacheDir(): java.io.File;
public getCacheDir(): java.io.File;
public getFilesDir(): java.io.File;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module persistence {
export class FileStoreImpl extends com.twitter.sdk.android.core.internal.persistence.FileStore {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.persistence.FileStoreImpl>;
public getExternalFilesDir(): java.io.File;
public getExternalCacheDir(): java.io.File;
public getCacheDir(): java.io.File;
public getFilesDir(): java.io.File;
public constructor(param0: globalAndroid.content.Context);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module persistence {
export class PersistenceStrategy<T> extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.persistence.PersistenceStrategy<any>>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.internal.persistence.PersistenceStrategy<any> interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
save(param0: T): void;
restore(): T;
clear(): void;
});
public constructor();
public clear(): void;
public save(param0: T): void;
public restore(): T;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module persistence {
export class PreferenceStore {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.persistence.PreferenceStore>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.internal.persistence.PreferenceStore interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
get(): globalAndroid.content.SharedPreferences;
edit(): globalAndroid.content.SharedPreferences.Editor;
save(param0: globalAndroid.content.SharedPreferences.Editor): boolean;
});
public constructor();
public get(): globalAndroid.content.SharedPreferences;
public save(param0: globalAndroid.content.SharedPreferences.Editor): boolean;
public edit(): globalAndroid.content.SharedPreferences.Editor;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module persistence {
export class PreferenceStoreImpl extends com.twitter.sdk.android.core.internal.persistence.PreferenceStore {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.persistence.PreferenceStoreImpl>;
public constructor(param0: globalAndroid.content.Context, param1: string);
public get(): globalAndroid.content.SharedPreferences;
public save(param0: globalAndroid.content.SharedPreferences.Editor): boolean;
public edit(): globalAndroid.content.SharedPreferences.Editor;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module persistence {
export class PreferenceStoreStrategy<T> extends com.twitter.sdk.android.core.internal.persistence.PersistenceStrategy<any> {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.persistence.PreferenceStoreStrategy<any>>;
public restore(): any;
public clear(): void;
public save(param0: any): void;
public constructor(param0: com.twitter.sdk.android.core.internal.persistence.PreferenceStore, param1: com.twitter.sdk.android.core.internal.persistence.SerializationStrategy<any>, param2: string);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module internal {
export module persistence {
export class SerializationStrategy<T> extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.internal.persistence.SerializationStrategy<any>>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.internal.persistence.SerializationStrategy<any> interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
serialize(param0: T): string;
deserialize(param0: string): T;
});
public constructor();
public deserialize(param0: string): T;
public serialize(param0: T): string;
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class ApiError {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.ApiError>;
public message: string;
public code: number;
public constructor(param0: string, param1: number);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class ApiErrors {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.ApiErrors>;
public errors: java.util.List<com.twitter.sdk.android.core.models.ApiError>;
public constructor(param0: java.util.List<com.twitter.sdk.android.core.models.ApiError>);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class BindingValues {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.BindingValues>;
public constructor();
public get(param0: string): any;
public constructor(param0: java.util.Map<string,any>);
public containsKey(param0: string): boolean;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class BindingValuesAdapter extends java.lang.Object {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.BindingValuesAdapter>;
public constructor();
public serialize(param0: com.twitter.sdk.android.core.models.BindingValues, param1: java.lang.reflect.Type, param2: com.google.gson.JsonSerializationContext): com.google.gson.JsonElement;
public deserialize(param0: com.google.gson.JsonElement, param1: java.lang.reflect.Type, param2: com.google.gson.JsonDeserializationContext): com.twitter.sdk.android.core.models.BindingValues;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Card {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Card>;
public bindingValues: com.twitter.sdk.android.core.models.BindingValues;
public name: string;
public constructor(param0: com.twitter.sdk.android.core.models.BindingValues, param1: string);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Configuration {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Configuration>;
public dmTextCharacterLimit: number;
public nonUsernamePaths: java.util.List<string>;
public photoSizeLimit: number;
public photoSizes: com.twitter.sdk.android.core.models.MediaEntity.Sizes;
public shortUrlLengthHttps: number;
public constructor(param0: number, param1: java.util.List<string>, param2: number, param3: com.twitter.sdk.android.core.models.MediaEntity.Sizes, param4: number);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Coordinates {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Coordinates>;
public static INDEX_LONGITUDE: number;
public static INDEX_LATITUDE: number;
public coordinates: java.util.List<java.lang.Double>;
public type: string;
public getLatitude(): java.lang.Double;
public getLongitude(): java.lang.Double;
public constructor(param0: java.lang.Double, param1: java.lang.Double, param2: string);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Entity {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Entity>;
public indices: java.util.List<java.lang.Integer>;
public getStart(): number;
public getEnd(): number;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class HashtagEntity extends com.twitter.sdk.android.core.models.Entity {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.HashtagEntity>;
public text: string;
public constructor(param0: string, param1: number, param2: number);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Identifiable {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Identifiable>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.models.Identifiable interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
getId(): number;
});
public constructor();
public getId(): number;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Image {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Image>;
public w: number;
public h: number;
public imageType: string;
public constructor(param0: number, param1: number, param2: string);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class ImageValue {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.ImageValue>;
public height: number;
public width: number;
public url: string;
public alt: string;
public constructor(param0: number, param1: number, param2: string, param3: string);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Media {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Media>;
public mediaId: number;
public mediaIdString: string;
public size: number;
public image: com.twitter.sdk.android.core.models.Image;
public constructor(param0: number, param1: string, param2: number, param3: com.twitter.sdk.android.core.models.Image);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class MediaEntity extends com.twitter.sdk.android.core.models.UrlEntity {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.MediaEntity>;
public id: number;
public idStr: string;
public mediaUrl: string;
public mediaUrlHttps: string;
public sizes: com.twitter.sdk.android.core.models.MediaEntity.Sizes;
public sourceStatusId: number;
public sourceStatusIdStr: string;
public type: string;
public videoInfo: com.twitter.sdk.android.core.models.VideoInfo;
public altText: string;
public constructor(param0: string, param1: string, param2: string, param3: number, param4: number);
public constructor(param0: string, param1: string, param2: string, param3: number, param4: number, param5: number, param6: string, param7: string, param8: string, param9: com.twitter.sdk.android.core.models.MediaEntity.Sizes, param10: number, param11: string, param12: string, param13: com.twitter.sdk.android.core.models.VideoInfo, param14: string);
}
export module MediaEntity {
export class Size {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.MediaEntity.Size>;
public w: number;
public h: number;
public resize: string;
public constructor(param0: number, param1: number, param2: string);
}
export class Sizes {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.MediaEntity.Sizes>;
public medium: com.twitter.sdk.android.core.models.MediaEntity.Size;
public thumb: com.twitter.sdk.android.core.models.MediaEntity.Size;
public small: com.twitter.sdk.android.core.models.MediaEntity.Size;
public large: com.twitter.sdk.android.core.models.MediaEntity.Size;
public constructor(param0: com.twitter.sdk.android.core.models.MediaEntity.Size, param1: com.twitter.sdk.android.core.models.MediaEntity.Size, param2: com.twitter.sdk.android.core.models.MediaEntity.Size, param3: com.twitter.sdk.android.core.models.MediaEntity.Size);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class MentionEntity extends com.twitter.sdk.android.core.models.Entity {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.MentionEntity>;
public id: number;
public idStr: string;
public name: string;
public screenName: string;
public constructor(param0: number, param1: string, param2: string, param3: string, param4: number, param5: number);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class ModelUtils {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.ModelUtils>;
public static getSafeList(param0: java.util.List): java.util.List;
public static getSafeMap(param0: java.util.Map): java.util.Map;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Place {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Place>;
public attributes: java.util.Map<string,string>;
public boundingBox: com.twitter.sdk.android.core.models.Place.BoundingBox;
public country: string;
public countryCode: string;
public fullName: string;
public id: string;
public name: string;
public placeType: string;
public url: string;
public constructor(param0: java.util.Map<string,string>, param1: com.twitter.sdk.android.core.models.Place.BoundingBox, param2: string, param3: string, param4: string, param5: string, param6: string, param7: string, param8: string);
}
export module Place {
export class BoundingBox {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Place.BoundingBox>;
public coordinates: java.util.List<java.util.List<java.util.List<java.lang.Double>>>;
public type: string;
public constructor(param0: java.util.List<java.util.List<java.util.List<java.lang.Double>>>, param1: string);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class SafeListAdapter {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.SafeListAdapter>;
public constructor();
public create(param0: com.google.gson.Gson, param1: com.google.gson.reflect.TypeToken): com.google.gson.TypeAdapter;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class SafeMapAdapter {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.SafeMapAdapter>;
public constructor();
public create(param0: com.google.gson.Gson, param1: com.google.gson.reflect.TypeToken): com.google.gson.TypeAdapter;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Search {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Search>;
public tweets: java.util.List<com.twitter.sdk.android.core.models.Tweet>;
public searchMetadata: com.twitter.sdk.android.core.models.SearchMetadata;
public constructor(param0: java.util.List<com.twitter.sdk.android.core.models.Tweet>, param1: com.twitter.sdk.android.core.models.SearchMetadata);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class SearchMetadata {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.SearchMetadata>;
public maxId: number;
public sinceId: number;
public refreshUrl: string;
public nextResults: string;
public count: number;
public completedIn: number;
public sinceIdStr: string;
public query: string;
public maxIdStr: string;
public constructor(param0: number, param1: number, param2: string, param3: string, param4: number, param5: number, param6: string, param7: string, param8: string);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class SymbolEntity extends com.twitter.sdk.android.core.models.Entity {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.SymbolEntity>;
public text: string;
public constructor(param0: string, param1: number, param2: number);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class Tweet extends com.twitter.sdk.android.core.models.Identifiable {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.Tweet>;
public static INVALID_ID: number;
public coordinates: com.twitter.sdk.android.core.models.Coordinates;
public createdAt: string;
public currentUserRetweet: any;
public entities: com.twitter.sdk.android.core.models.TweetEntities;
public extendedEntities: com.twitter.sdk.android.core.models.TweetEntities;
public favoriteCount: java.lang.Integer;
public favorited: boolean;
public filterLevel: string;
public id: number;
public idStr: string;
public inReplyToScreenName: string;
public inReplyToStatusId: number;
public inReplyToStatusIdStr: string;
public inReplyToUserId: number;
public inReplyToUserIdStr: string;
public lang: string;
public place: com.twitter.sdk.android.core.models.Place;
public possiblySensitive: boolean;
public scopes: any;
public quotedStatusId: number;
public quotedStatusIdStr: string;
public quotedStatus: com.twitter.sdk.android.core.models.Tweet;
public retweetCount: number;
public retweeted: boolean;
public retweetedStatus: com.twitter.sdk.android.core.models.Tweet;
public source: string;
public text: string;
public displayTextRange: java.util.List<java.lang.Integer>;
public truncated: boolean;
public user: com.twitter.sdk.android.core.models.User;
public withheldCopyright: boolean;
public withheldInCountries: java.util.List<string>;
public withheldScope: string;
public card: com.twitter.sdk.android.core.models.Card;
public constructor(param0: com.twitter.sdk.android.core.models.Coordinates, param1: string, param2: any, param3: com.twitter.sdk.android.core.models.TweetEntities, param4: com.twitter.sdk.android.core.models.TweetEntities, param5: java.lang.Integer, param6: boolean, param7: string, param8: number, param9: string, param10: string, param11: number, param12: string, param13: number, param14: string, param15: string, param16: com.twitter.sdk.android.core.models.Place, param17: boolean, param18: any, param19: number, param20: string, param21: com.twitter.sdk.android.core.models.Tweet, param22: number, param23: boolean, param24: com.twitter.sdk.android.core.models.Tweet, param25: string, param26: string, param27: java.util.List<java.lang.Integer>, param28: boolean, param29: com.twitter.sdk.android.core.models.User, param30: boolean, param31: java.util.List<string>, param32: string, param33: com.twitter.sdk.android.core.models.Card);
public hashCode(): number;
public getId(): number;
public equals(param0: any): boolean;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class TweetBuilder {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.TweetBuilder>;
public setCoordinates(param0: com.twitter.sdk.android.core.models.Coordinates): com.twitter.sdk.android.core.models.TweetBuilder;
public setRetweetedStatus(param0: com.twitter.sdk.android.core.models.Tweet): com.twitter.sdk.android.core.models.TweetBuilder;
public setUser(param0: com.twitter.sdk.android.core.models.User): com.twitter.sdk.android.core.models.TweetBuilder;
public setLang(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setCurrentUserRetweet(param0: any): com.twitter.sdk.android.core.models.TweetBuilder;
public setId(param0: number): com.twitter.sdk.android.core.models.TweetBuilder;
public setInReplyToStatusId(param0: number): com.twitter.sdk.android.core.models.TweetBuilder;
public setEntities(param0: com.twitter.sdk.android.core.models.TweetEntities): com.twitter.sdk.android.core.models.TweetBuilder;
public setPossiblySensitive(param0: boolean): com.twitter.sdk.android.core.models.TweetBuilder;
public setInReplyToUserId(param0: number): com.twitter.sdk.android.core.models.TweetBuilder;
public setWithheldCopyright(param0: boolean): com.twitter.sdk.android.core.models.TweetBuilder;
public setInReplyToStatusIdStr(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setWithheldInCountries(param0: java.util.List<string>): com.twitter.sdk.android.core.models.TweetBuilder;
public setFilterLevel(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setExtendedEntities(param0: com.twitter.sdk.android.core.models.TweetEntities): com.twitter.sdk.android.core.models.TweetBuilder;
public setFavoriteCount(param0: java.lang.Integer): com.twitter.sdk.android.core.models.TweetBuilder;
public setPlace(param0: com.twitter.sdk.android.core.models.Place): com.twitter.sdk.android.core.models.TweetBuilder;
public setWithheldScope(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setCreatedAt(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setFavorited(param0: boolean): com.twitter.sdk.android.core.models.TweetBuilder;
public setSource(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public constructor();
public setInReplyToUserIdStr(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setDisplayTextRange(param0: java.util.List<java.lang.Integer>): com.twitter.sdk.android.core.models.TweetBuilder;
public setRetweeted(param0: boolean): com.twitter.sdk.android.core.models.TweetBuilder;
public setInReplyToScreenName(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setQuotedStatus(param0: com.twitter.sdk.android.core.models.Tweet): com.twitter.sdk.android.core.models.TweetBuilder;
public setQuotedStatusIdStr(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setText(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public build(): com.twitter.sdk.android.core.models.Tweet;
public copy(param0: com.twitter.sdk.android.core.models.Tweet): com.twitter.sdk.android.core.models.TweetBuilder;
public setRetweetCount(param0: number): com.twitter.sdk.android.core.models.TweetBuilder;
public setIdStr(param0: string): com.twitter.sdk.android.core.models.TweetBuilder;
public setQuotedStatusId(param0: number): com.twitter.sdk.android.core.models.TweetBuilder;
public setTruncated(param0: boolean): com.twitter.sdk.android.core.models.TweetBuilder;
public setCard(param0: com.twitter.sdk.android.core.models.Card): com.twitter.sdk.android.core.models.TweetBuilder;
public setScopes(param0: any): com.twitter.sdk.android.core.models.TweetBuilder;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class TweetEntities {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.TweetEntities>;
public urls: java.util.List<com.twitter.sdk.android.core.models.UrlEntity>;
public userMentions: java.util.List<com.twitter.sdk.android.core.models.MentionEntity>;
public media: java.util.List<com.twitter.sdk.android.core.models.MediaEntity>;
public hashtags: java.util.List<com.twitter.sdk.android.core.models.HashtagEntity>;
public symbols: java.util.List<com.twitter.sdk.android.core.models.SymbolEntity>;
public constructor(param0: java.util.List<com.twitter.sdk.android.core.models.UrlEntity>, param1: java.util.List<com.twitter.sdk.android.core.models.MentionEntity>, param2: java.util.List<com.twitter.sdk.android.core.models.MediaEntity>, param3: java.util.List<com.twitter.sdk.android.core.models.HashtagEntity>, param4: java.util.List<com.twitter.sdk.android.core.models.SymbolEntity>);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class TwitterCollection {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.TwitterCollection>;
public contents: com.twitter.sdk.android.core.models.TwitterCollection.Content;
public metadata: com.twitter.sdk.android.core.models.TwitterCollection.Metadata;
public constructor(param0: com.twitter.sdk.android.core.models.TwitterCollection.Content, param1: com.twitter.sdk.android.core.models.TwitterCollection.Metadata);
}
export module TwitterCollection {
export class Content {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.TwitterCollection.Content>;
public tweetMap: java.util.Map<java.lang.Long,com.twitter.sdk.android.core.models.Tweet>;
public userMap: java.util.Map<java.lang.Long,com.twitter.sdk.android.core.models.User>;
public constructor(param0: java.util.Map<java.lang.Long,com.twitter.sdk.android.core.models.Tweet>, param1: java.util.Map<java.lang.Long,com.twitter.sdk.android.core.models.User>);
}
export class Metadata {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.TwitterCollection.Metadata>;
public timelineId: string;
public position: com.twitter.sdk.android.core.models.TwitterCollection.Metadata.Position;
public timelineItems: java.util.List<com.twitter.sdk.android.core.models.TwitterCollection.TimelineItem>;
public constructor(param0: string, param1: com.twitter.sdk.android.core.models.TwitterCollection.Metadata.Position, param2: java.util.List<com.twitter.sdk.android.core.models.TwitterCollection.TimelineItem>);
}
export module Metadata {
export class Position {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.TwitterCollection.Metadata.Position>;
public minPosition: java.lang.Long;
public maxPosition: java.lang.Long;
public constructor(param0: java.lang.Long, param1: java.lang.Long);
}
}
export class TimelineItem {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.TwitterCollection.TimelineItem>;
public tweetItem: com.twitter.sdk.android.core.models.TwitterCollection.TimelineItem.TweetItem;
public constructor(param0: com.twitter.sdk.android.core.models.TwitterCollection.TimelineItem.TweetItem);
}
export module TimelineItem {
export class TweetItem {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.TwitterCollection.TimelineItem.TweetItem>;
public id: java.lang.Long;
public constructor(param0: java.lang.Long);
}
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class UrlEntity extends com.twitter.sdk.android.core.models.Entity {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.UrlEntity>;
public url: string;
public expandedUrl: string;
public displayUrl: string;
public constructor(param0: string, param1: string, param2: string, param3: number, param4: number);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class User extends com.twitter.sdk.android.core.models.Identifiable {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.User>;
public static INVALID_ID: number;
public contributorsEnabled: boolean;
public createdAt: string;
public defaultProfile: boolean;
public defaultProfileImage: boolean;
public description: string;
public email: string;
public entities: com.twitter.sdk.android.core.models.UserEntities;
public favouritesCount: number;
public followRequestSent: boolean;
public followersCount: number;
public friendsCount: number;
public geoEnabled: boolean;
public id: number;
public idStr: string;
public isTranslator: boolean;
public lang: string;
public listedCount: number;
public location: string;
public name: string;
public profileBackgroundColor: string;
public profileBackgroundImageUrl: string;
public profileBackgroundImageUrlHttps: string;
public profileBackgroundTile: boolean;
public profileBannerUrl: string;
public profileImageUrl: string;
public profileImageUrlHttps: string;
public profileLinkColor: string;
public profileSidebarBorderColor: string;
public profileSidebarFillColor: string;
public profileTextColor: string;
public profileUseBackgroundImage: boolean;
public protectedUser: boolean;
public screenName: string;
public showAllInlineMedia: boolean;
public status: com.twitter.sdk.android.core.models.Tweet;
public statusesCount: number;
public timeZone: string;
public url: string;
public utcOffset: number;
public verified: boolean;
public withheldInCountries: java.util.List<string>;
public withheldScope: string;
public getId(): number;
public constructor(param0: boolean, param1: string, param2: boolean, param3: boolean, param4: string, param5: string, param6: com.twitter.sdk.android.core.models.UserEntities, param7: number, param8: boolean, param9: number, param10: number, param11: boolean, param12: number, param13: string, param14: boolean, param15: string, param16: number, param17: string, param18: string, param19: string, param20: string, param21: string, param22: boolean, param23: string, param24: string, param25: string, param26: string, param27: string, param28: string, param29: string, param30: boolean, param31: boolean, param32: string, param33: boolean, param34: com.twitter.sdk.android.core.models.Tweet, param35: number, param36: string, param37: string, param38: number, param39: boolean, param40: java.util.List<string>, param41: string);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class UserBuilder {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.UserBuilder>;
public setShowAllInlineMedia(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setUrl(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setIsTranslator(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileLinkColor(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setWithheldInCountries(param0: java.util.List<string>): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileBackgroundImageUrl(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setFriendsCount(param0: number): com.twitter.sdk.android.core.models.UserBuilder;
public setLang(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileBackgroundImageUrlHttps(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setDefaultProfile(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setScreenName(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setFavouritesCount(param0: number): com.twitter.sdk.android.core.models.UserBuilder;
public setFollowRequestSent(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setLocation(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setContributorsEnabled(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setVerified(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setWithheldScope(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setDefaultProfileImage(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileUseBackgroundImage(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public constructor();
public setId(param0: number): com.twitter.sdk.android.core.models.UserBuilder;
public setGeoEnabled(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setCreatedAt(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileImageUrl(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileSidebarBorderColor(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setListedCount(param0: number): com.twitter.sdk.android.core.models.UserBuilder;
public build(): com.twitter.sdk.android.core.models.User;
public setProfileBackgroundColor(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileTextColor(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setProtectedUser(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setUtcOffset(param0: number): com.twitter.sdk.android.core.models.UserBuilder;
public setDescription(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setEntities(param0: com.twitter.sdk.android.core.models.UserEntities): com.twitter.sdk.android.core.models.UserBuilder;
public setIdStr(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setName(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileBannerUrl(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setTimeZone(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setEmail(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setFollowersCount(param0: number): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileImageUrlHttps(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileBackgroundTile(param0: boolean): com.twitter.sdk.android.core.models.UserBuilder;
public setStatusesCount(param0: number): com.twitter.sdk.android.core.models.UserBuilder;
public setProfileSidebarFillColor(param0: string): com.twitter.sdk.android.core.models.UserBuilder;
public setStatus(param0: com.twitter.sdk.android.core.models.Tweet): com.twitter.sdk.android.core.models.UserBuilder;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class UserEntities {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.UserEntities>;
public url: com.twitter.sdk.android.core.models.UserEntities.UrlEntities;
public description: com.twitter.sdk.android.core.models.UserEntities.UrlEntities;
public constructor(param0: com.twitter.sdk.android.core.models.UserEntities.UrlEntities, param1: com.twitter.sdk.android.core.models.UserEntities.UrlEntities);
}
export module UserEntities {
export class UrlEntities {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.UserEntities.UrlEntities>;
public urls: java.util.List<com.twitter.sdk.android.core.models.UrlEntity>;
public constructor(param0: java.util.List<com.twitter.sdk.android.core.models.UrlEntity>);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class UserValue {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.UserValue>;
public idStr: string;
public constructor(param0: string);
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module models {
export class VideoInfo {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.VideoInfo>;
public aspectRatio: java.util.List<java.lang.Integer>;
public durationMillis: number;
public variants: java.util.List<com.twitter.sdk.android.core.models.VideoInfo.Variant>;
public constructor(param0: java.util.List<java.lang.Integer>, param1: number, param2: java.util.List<com.twitter.sdk.android.core.models.VideoInfo.Variant>);
}
export module VideoInfo {
export class Variant {
public static class: java.lang.Class<com.twitter.sdk.android.core.models.VideoInfo.Variant>;
public bitrate: number;
public contentType: string;
public url: string;
public constructor(param0: number, param1: string, param2: string);
}
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export class AccountService {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.AccountService>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.services.AccountService interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
verifyCredentials(param0: java.lang.Boolean, param1: java.lang.Boolean, param2: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.User>;
});
public constructor();
public verifyCredentials(param0: java.lang.Boolean, param1: java.lang.Boolean, param2: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.User>;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export class CollectionService {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.CollectionService>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.services.CollectionService interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
collection(param0: string, param1: java.lang.Integer, param2: java.lang.Long, param3: java.lang.Long): retrofit2.Call<com.twitter.sdk.android.core.models.TwitterCollection>;
});
public constructor();
public collection(param0: string, param1: java.lang.Integer, param2: java.lang.Long, param3: java.lang.Long): retrofit2.Call<com.twitter.sdk.android.core.models.TwitterCollection>;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export class ConfigurationService {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.ConfigurationService>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.services.ConfigurationService interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
configuration(): retrofit2.Call<com.twitter.sdk.android.core.models.Configuration>;
});
public constructor();
public configuration(): retrofit2.Call<com.twitter.sdk.android.core.models.Configuration>;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export class FavoriteService {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.FavoriteService>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.services.FavoriteService interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
list(param0: java.lang.Long, param1: string, param2: java.lang.Integer, param3: string, param4: string, param5: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
destroy(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
create(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
});
public constructor();
public destroy(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
public create(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
public list(param0: java.lang.Long, param1: string, param2: java.lang.Integer, param3: string, param4: string, param5: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export class ListService {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.ListService>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.services.ListService interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
statuses(param0: java.lang.Long, param1: string, param2: string, param3: java.lang.Long, param4: java.lang.Long, param5: java.lang.Long, param6: java.lang.Integer, param7: java.lang.Boolean, param8: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
});
public constructor();
public statuses(param0: java.lang.Long, param1: string, param2: string, param3: java.lang.Long, param4: java.lang.Long, param5: java.lang.Long, param6: java.lang.Integer, param7: java.lang.Boolean, param8: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export class MediaService {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.MediaService>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.services.MediaService interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
upload(param0: okhttp3.RequestBody, param1: okhttp3.RequestBody, param2: okhttp3.RequestBody): retrofit2.Call<com.twitter.sdk.android.core.models.Media>;
});
public constructor();
public upload(param0: okhttp3.RequestBody, param1: okhttp3.RequestBody, param2: okhttp3.RequestBody): retrofit2.Call<com.twitter.sdk.android.core.models.Media>;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export class SearchService {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.SearchService>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.services.SearchService interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
tweets(param0: string, param1: com.twitter.sdk.android.core.services.params.Geocode, param2: string, param3: string, param4: string, param5: java.lang.Integer, param6: string, param7: java.lang.Long, param8: java.lang.Long, param9: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Search>;
});
public constructor();
public tweets(param0: string, param1: com.twitter.sdk.android.core.services.params.Geocode, param2: string, param3: string, param4: string, param5: java.lang.Integer, param6: string, param7: java.lang.Long, param8: java.lang.Long, param9: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Search>;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export class StatusesService {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.StatusesService>;
/**
* Constructs a new instance of the com.twitter.sdk.android.core.services.StatusesService interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
mentionsTimeline(param0: java.lang.Integer, param1: java.lang.Long, param2: java.lang.Long, param3: java.lang.Boolean, param4: java.lang.Boolean, param5: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
userTimeline(param0: java.lang.Long, param1: string, param2: java.lang.Integer, param3: java.lang.Long, param4: java.lang.Long, param5: java.lang.Boolean, param6: java.lang.Boolean, param7: java.lang.Boolean, param8: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
homeTimeline(param0: java.lang.Integer, param1: java.lang.Long, param2: java.lang.Long, param3: java.lang.Boolean, param4: java.lang.Boolean, param5: java.lang.Boolean, param6: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
retweetsOfMe(param0: java.lang.Integer, param1: java.lang.Long, param2: java.lang.Long, param3: java.lang.Boolean, param4: java.lang.Boolean, param5: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
show(param0: java.lang.Long, param1: java.lang.Boolean, param2: java.lang.Boolean, param3: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
lookup(param0: string, param1: java.lang.Boolean, param2: java.lang.Boolean, param3: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
update(param0: string, param1: java.lang.Long, param2: java.lang.Boolean, param3: java.lang.Double, param4: java.lang.Double, param5: string, param6: java.lang.Boolean, param7: java.lang.Boolean, param8: string): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
retweet(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
destroy(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
unretweet(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
});
public constructor();
public mentionsTimeline(param0: java.lang.Integer, param1: java.lang.Long, param2: java.lang.Long, param3: java.lang.Boolean, param4: java.lang.Boolean, param5: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
public lookup(param0: string, param1: java.lang.Boolean, param2: java.lang.Boolean, param3: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
public update(param0: string, param1: java.lang.Long, param2: java.lang.Boolean, param3: java.lang.Double, param4: java.lang.Double, param5: string, param6: java.lang.Boolean, param7: java.lang.Boolean, param8: string): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
public userTimeline(param0: java.lang.Long, param1: string, param2: java.lang.Integer, param3: java.lang.Long, param4: java.lang.Long, param5: java.lang.Boolean, param6: java.lang.Boolean, param7: java.lang.Boolean, param8: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
public retweetsOfMe(param0: java.lang.Integer, param1: java.lang.Long, param2: java.lang.Long, param3: java.lang.Boolean, param4: java.lang.Boolean, param5: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
public show(param0: java.lang.Long, param1: java.lang.Boolean, param2: java.lang.Boolean, param3: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
public destroy(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
public unretweet(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
public homeTimeline(param0: java.lang.Integer, param1: java.lang.Long, param2: java.lang.Long, param3: java.lang.Boolean, param4: java.lang.Boolean, param5: java.lang.Boolean, param6: java.lang.Boolean): retrofit2.Call<java.util.List<com.twitter.sdk.android.core.models.Tweet>>;
public retweet(param0: java.lang.Long, param1: java.lang.Boolean): retrofit2.Call<com.twitter.sdk.android.core.models.Tweet>;
}
}
}
}
}
}
}
declare module com {
export module twitter {
export module sdk {
export module android {
export module core {
export module services {
export module params {
export class Geocode {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.params.Geocode>;
public latitude: number;
public longitude: number;
public radius: number;
public distance: com.twitter.sdk.android.core.services.params.Geocode.Distance;
public toString(): string;
public constructor(param0: number, param1: number, param2: number, param3: com.twitter.sdk.android.core.services.params.Geocode.Distance);
}
export module Geocode {
export class Distance {
public static class: java.lang.Class<com.twitter.sdk.android.core.services.params.Geocode.Distance>;
public static MILES: com.twitter.sdk.android.core.services.params.Geocode.Distance;
public static KILOMETERS: com.twitter.sdk.android.core.services.params.Geocode.Distance;
public identifier: string;
public static values(): androidNative.Array<com.twitter.sdk.android.core.services.params.Geocode.Distance>;
public static valueOf(param0: string): com.twitter.sdk.android.core.services.params.Geocode.Distance;
}
}
}
}
}
}
}
}
}
//Generics information:
//com.twitter.sdk.android.core.Callback:1
//com.twitter.sdk.android.core.PersistedSessionManager:1
//com.twitter.sdk.android.core.Result:1
//com.twitter.sdk.android.core.Session:1
//com.twitter.sdk.android.core.SessionManager:1
//com.twitter.sdk.android.core.internal.SessionMonitor:1
//com.twitter.sdk.android.core.internal.SessionVerifier:1
//com.twitter.sdk.android.core.internal.persistence.PersistenceStrategy:1
//com.twitter.sdk.android.core.internal.persistence.PreferenceStoreStrategy:1
//com.twitter.sdk.android.core.internal.persistence.SerializationStrategy:1 | the_stack |
import type { Episode, ExternalUrl, RecommendationSeed, SpotifyType, SearchType, Device, Cursor } from "spotify-types";
import type { Track } from "./structures/Track";
import type { User } from "./structures/User";
import type { Playlist } from "./structures/Playlist";
import type { Artist } from './structures/Artist';
import type { Album } from './structures/Album';
import type { Show } from './structures/Show';
import type { Client } from "./Client";
/**
* All the spotify web api methods.
*/
export type Methods = 'GET' | 'POST' | 'DELETE' | 'PUT' | 'PATCH';
/**
* The auth identity to generate a token or the token itself.
*/
export type AuthIdentity = string | { clientID: string, clientSecret: string } | GetUserTokenOptions;
/**
* Converts a string type into camelcase.
*/
export type CamelCase<S extends string> = S extends `${infer P1}_${infer P2}${infer P3}`
? `${Lowercase<P1>}${Uppercase<P2>}${CamelCase<P3>}`
: Lowercase<S>;
/**
* Converts an object with camel case keys.
*/
export type CamelCaseObjectKeys<T> = {
[K in keyof T as CamelCase<string &K>]: T[K]
};
/**
* The options required for the Client.
*/
export interface ClientOptions {
/** The ready event which is called when the token is aquired. */
onReady?: (client: Client) => void;
/** The refresh which is called when there is a token refresh. */
onRefresh?: () => void;
/** Set true to refresh token if the token is needed to be acquired if expired by default it is false. */
refreshToken?: boolean;
/** Your spotify web api token or some authenication details to generate one. */
token: AuthIdentity;
/** If the token provided is a string and is user authroized set this to true. */
userAuthorizedToken?: boolean;
/** The cache settings for the client. */
cacheSettings?: CacheSettings | boolean;
/** Boolean stating should the client retry when the request is rate limited or not by default it is true. */
retryOnRateLimit?: boolean;
}
/** The options necessary for the fetch function in Client. */
export interface FetchOptions {
/** The headers to apply. */
headers?: Record<string, string>;
/** The method type. */
method?: Methods;
/** Search query parameters. */
params?: Record<string, any>;
/** The json body to send if available. */
body?: Record<string, string | boolean | number | (string | boolean | number)[]>;
}
/** The meta details for the client refresh meta. */
export interface ClientRefreshMeta {
/** The spotify application client id. */
clientID: string;
/** The spotify application client secret. */
clientSecret: string;
/** The refresh token if available. */
refreshToken?: string;
/** The redirect url provided for authenication if available. */
redirectURL?: string;
}
/**
* Option structure required to get user token.
*/
export interface GetUserTokenOptions {
/** The spotify application client id. */
clientID: string;
/** The spotify application client secret. */
clientSecret: string;
/** The refresh token if available. */
refreshToken?: string;
/** The redirect url provided for the authenication. */
redirectURL: string;
/** The code query acquired from the authorization if available. */
code?: string;
}
/**
* The context containing the details of the spotify user token.
*/
export interface UserTokenContext {
/** The actual access token. */
accessToken: string;
/** Token type. Probably 'Bearer'. */
tokenType: string;
/** The duration in seconds in which the token will expire. */
expiresIn: number;
/** The refresh token to get a new one after the actual one expired. */
refreshToken: string;
/** The scopes to get the token. */
scope: string;
}
/**
* The settings of the cache for the ClientOptions.
*/
export interface CacheSettings {
/** Cache setting for spotify users. */
users?: boolean;
/** Cache setting for spotify artists. */
artists?: boolean;
/** Cache setting for spotify tracks. */
tracks?: boolean;
/** Cache setting for spotify playlists. */
playlists?: boolean;
/** Cache setting for spotify albums. */
albums?: boolean;
/** Cache setting for spotify episodes. */
episodes?: boolean;
/** Cache setting for spotify shows. */
shows?: boolean;
}
/** The options structure for search functions in the various managers. */
export interface SearchOptions {
/** If true, the response will include any relevant audio content that is hosted externally. */
includeExternalAudio?: boolean;
/** The offset index of the results. */
offset?: number;
/** The limit of the results. */
limit?: number;
/** If a country code is specified, only content that is playable in that market is returned. */
market?: string;
}
/** The linked track object for the [linkedFrom] field in [Track]. */
export interface LinkedTrack {
/** A map of url name and the url. */
externalURL: ExternalUrl;
/** The id of the linked track. */
id: string;
/** The type of spotify object. */
type: SpotifyType;
/** The uri of this object. */
uri: string;
}
/** The playlist track object. */
export interface PlaylistTrack {
/** The date and time the track or episode was added. */
addedAt?: string;
/** The Spotify user who added the track or episode. */
addedBy?: User;
/** Whether this track or episode is a local file or not. */
isLocal: boolean;
/** Information about the track or episode. */
track: Track | Episode | null;
}
/** The object returned by [Browse.getFeaturedPlaylists] function. */
export interface FeaturedPlaylistContent {
/** The message of the featured playlists. */
message: string;
/** The featured playlists. */
playlists: Playlist[];
}
/**
* The collection of recommendation seed objects with tracks provided from the spotify api.
*/
export interface Recommendations {
/** An array of recommendation seed objects. */
seeds: RecommendationSeed[];
/** An array of track object (simplified) ordered according to the parameters supplied. */
tracks: Track[];
}
/** The options structure for search functions in the client. */
export interface ClientSearchOptions extends SearchOptions {
/** The list of item types to search across. */
types: SearchType[];
}
/** The object structure returned by the [Client.search] function. */
export interface SearchContent {
/** The episode search results. */
episodes?: Episode[];
/** The show search results. */
shows?: Show[];
/** The track search results. */
tracks?: Track[];
/** The artists search results. */
artists?: Artist[];
/** the album search results. */
albums?: Album[];
}
/** The options structure required for [PlaylistManager.reorderItems] function. */
export interface PlaylistReorderOptions {
/** The uris of the tracks or episodes. */
uris?: string[];
/** The position of the first item to be reordered. */
rangeStart?: number;
/** The position where the items should be inserted. */
insertBefore?: number;
/** The amount of items to be reordered. */
rangeLength?: number;
/** The playlist’s snapshot ID against which you want to make the changes. */
snapshotID?: string;
}
/** The saved object type. */
export interface Saved<T> {
/** The timestamp when the item was added at. */
addedAt: number;
/** The saved item. */
item: T;
}
/** The context object of the player. */
export interface PlayerContext {
/** External URLs for this context. */
externalURL: ExternalUrl;
/** A link to the Web API endpoint providing full details of the track. */
href: string;
/** The object type. */
type: SpotifyType;
/** The Spotify URI for the context. */
uri: string;
}
/** The recently played object which is returned by the [Player.getRecentlyPlayed] function. */
export interface RecentlyPlayed {
/** The cursors to check other pages of recently played. */
cursors: Cursor;
/** The items which have been recently played. */
items: {
/** The track which has been played recently. */
track: Track,
/** The timestamp when it was played. */
playedAt: string
}[];
}
/** The current playback returned by the [Player.getCurrentPlayback] function. */
export interface CurrentPlayback extends CurrentlyPlaying {
shuffleState: boolean;
repeatState: 'track' | 'off' | 'context';
device: CamelCaseObjectKeys<Device>;
}
/** The object structure containg the details of the currently playing which is returned by [Player.getCurrentlyPlaying] function. */
export interface CurrentlyPlaying {
timestamp: number;
progress: number;
isPlaying: boolean;
currentPlayingType: string;
item: Track | Episode | null;
context: PlayerContext;
}
/**
* The scopes for the user authorization process.
* @see https://developer.spotify.com/documentation/general/guides/scopes/
*/
export enum Scopes {
/** Write access to user-provided images. */
ImageUpload = "ugc-image-upload",
/** Read access to a user’s recently played tracks. */
ReadRecentlyPlayed = "user-read-recently-played",
/** Read access to a user’s player state. */
ReadPlaybackState = "user-read-playback-state",
/** Read access to a user's top artists and tracks. */
ReadTopArtistsAndUsers = "user-top-read",
/** Remote control playback of Spotify. This scope is currently available to Spotify iOS and Android SDKs. */
RemoteControl = "app-remote-control",
/** Write access to a user's public playlists. */
ModifyPublicPlaylists = "playlist-modify-public",
/** Write access to a user’s playback state */
WritePlaybackState = "user-modify-playback-state",
/** Write access to a user's private playlists. */
ModifyPrivatePlaylists = "playlist-modify-private",
/** Read access to user's private playlists. */
ReadPrivatePlaylists = "playlist-read-private",
/** Write/delete access to the list of artists and other users that the user follows. */
ModifyFollowers = "user-follow-modify",
/** Read access to the list of artists and other users that the user follows. */
ReadFollowers = "user-follow-read",
/** Read access to a user’s currently playing content. */
ReadCurrentlyPlaying = "user-read-currently-playing",
/** Write/delete access to a user's "Your Music" library. */
ModifyUserLibrary = "user-library-modify",
/** Read access to a user's library. */
ReadUserLibrary = "user-library-read",
/** Read access to a user’s playback position in a content. */
ReadPlaybackPosition = "user-read-playback-position",
/** Read access to user’s email address. */
ReadUserEmail = "user-read-email",
/** Read access to user’s subscription details (type of user account). */
ReadUserPrivateDetails = "user-read-private",
/** Include collaborative playlists when requesting a user's playlists. */
ReadCollaborativePlaylists = "playlist-read-collaborative",
/** Control playback of a Spotify track. This scope is currently available to the Web Playback SDK. The user must have a Spotify Premium account. */
Streaming = "streaming"
}
/**
* The time range type from the spotify api used for the [/me/top/{type}] endpoint.
*/
export enum TimeRange {
/** Time range of several years. */
Long = "long_term",
/** Time range of 6 months. */
Medium = "medium_term",
/** Time range of 4 weeks. */
Short = "short_term"
} | the_stack |
import * as parser from '@pgql/parse';
interface RelationClause {
RangeVar: {
relname: string;
};
}
interface StringSqlValue {
String: {
str: string;
};
}
interface IntegerSqlValue {
Integer: {
ival: number;
};
}
interface TypeNameSqlValue {
TypeName: {
names: SqlValue[];
};
}
type SqlValue = StringSqlValue | IntegerSqlValue | TypeNameSqlValue;
const toString = (value: SqlValue): string => {
if ('String' in value) {
return value.String.str;
}
if ('Integer' in value) {
return `${value.Integer.ival}`;
}
if ('TypeName' in value) {
return value.TypeName.names.map((name) => toString(name)).join('.');
}
throw new Error(`Could not convert ConstValue to string '${value}'`);
};
const toSql = (expression: RawExpression): string => {
if ('FuncCall' in expression) {
return `${expression.FuncCall.funcname.map(toString).join('.')}(${
expression.FuncCall.args?.map(toSql).join(', ') || ''
})`;
}
if ('A_Const' in expression) {
return toString(expression.A_Const.val);
}
if ('ColumnRef' in expression) {
// TODO: should we escape this and/or also use the optional name?
return expression.ColumnRef?.fields.map(toString).join('.');
}
if ('A_Expr' in expression) {
// TODO: how to decide when to add parens around an expression e.g. SELECT 1 * 2 + 3 vs SELECT 1 * (2 + 3)
// seems to only group the A_Expr differently.
return `${toSql(expression.A_Expr.lexpr)} ${expression.A_Expr.name
.map(toString)
.join('.')} ${toSql(expression.A_Expr.rexpr)}`;
}
if ('TypeCast' in expression) {
return `${toSql(expression.TypeCast.arg)}::${expression.TypeCast.typeName.TypeName.names
.map(toString)
.join('.')}`;
}
throw new Error(`Could not convert expression to SQL: ${JSON.stringify(expression, null, 2)}`);
};
interface FuncCallExpression {
FuncCall: {
funcname: SqlValue[];
args?: RawExpression[];
};
}
interface ConstExpression {
A_Const: {
val: SqlValue;
};
}
interface ColumnRefExpression {
ColumnRef: {
fields: SqlValue[];
};
}
interface LeftRightExpression {
A_Expr: {
name: SqlValue[];
lexpr: RawExpression;
rexpr: RawExpression;
};
}
interface TypeCastExpression {
TypeCast: {
arg: RawExpression;
typeName: {
TypeName: {
names: SqlValue[];
};
};
};
}
type RawExpression =
| TypeCastExpression
| LeftRightExpression
| ColumnRefExpression
| ConstExpression
| FuncCallExpression;
interface TableElementCheckConstraint {
contype: 4;
raw_expr: RawExpression;
conname?: string;
}
interface TableElementDefaultConstraint {
contype: 2;
raw_expr: RawExpression;
}
interface TableElementPrimaryKeyConstraint {
contype: 5;
conname?: string;
keys?: SqlValue[];
}
interface TableElementUniqueConstraint {
contype: 6;
keys?: SqlValue[];
conname?: string;
}
interface TableElementNotNullConstraint {
contype: 1;
}
type TableElementReferencesAction = 'a' | 'r' | 'c' | 'd' | 'n';
interface TableElementReferencesConstraint {
contype: 8;
conname?: string;
pktable: {
RangeVar: {
relname: string;
};
};
pk_attrs: SqlValue[];
fk_attrs?: SqlValue[];
fk_matchtype: 's' | 'f' | 'p';
fk_upd_action: TableElementReferencesAction;
fk_del_action: TableElementReferencesAction;
initially_valid: boolean;
}
type TableElementConstraint =
| TableElementReferencesConstraint
| TableElementNotNullConstraint
| TableElementDefaultConstraint
| TableElementPrimaryKeyConstraint
| TableElementUniqueConstraint
| TableElementCheckConstraint;
interface TableElement {
ColumnDef?: {
colname: string;
typeName: {
TypeName: {
names: SqlValue[];
};
};
constraints?: {
Constraint: TableElementConstraint;
}[];
};
Constraint?: TableElementConstraint;
}
interface CreateStmt {
relation: RelationClause;
tableElts: TableElement[];
if_not_exists?: boolean;
}
interface DropStmt {
objects: SqlValue[][];
missing_ok?: boolean;
removeType: 37 | 45 | 19; // 37 = table, 45 = type, 19 = function
}
interface AlterTableAddColumnCmd {
subtype: 0;
def: {
ColumnDef: TableElement['ColumnDef'];
};
}
interface AlterTableDropColumnCmd {
subtype: 10;
name: string;
}
interface AlterTableAlterColumnAlterDefaultCmd {
subtype: 3;
name: string;
def?: RawExpression;
}
interface AlterTableAlterColumnSetNotNullCmd {
subtype: 5;
name: string;
}
interface AlterTableAlterColumnDropNotNullCmd {
subtype: 4;
name: string;
}
interface AlterTableAlterColumnSetDataTypeCmd {
subtype: 25;
name: string;
def: {
ColumnDef: {
typeName: {
TypeName: {
names: SqlValue[];
};
};
};
};
}
interface AlterTableAddConstraintCmd {
subtype: 14;
def: {
Constraint: TableElementConstraint;
};
}
interface AlterTableDropConstraintCmd {
subtype: 22;
name: string;
}
type AlterTableCmd =
| AlterTableAlterColumnSetNotNullCmd
| AlterTableAlterColumnDropNotNullCmd
| AlterTableAddColumnCmd
| AlterTableAlterColumnSetDataTypeCmd
| AlterTableAlterColumnAlterDefaultCmd
| AlterTableDropColumnCmd
| AlterTableAddConstraintCmd
| AlterTableDropConstraintCmd;
interface RenameTableStmt {
renameType: 37;
relation: {
RangeVar: {
relname: string;
};
};
newname: string;
}
interface RenameColumnStmt {
renameType: 6;
relation: {
RangeVar: {
relname: string;
};
};
subname: string;
newname: string;
}
type RenameStmt = RenameTableStmt | RenameColumnStmt;
interface AlterTableStmt {
relation: {
RangeVar: {
relname: string;
};
};
cmds: {
AlterTableCmd: AlterTableCmd;
}[];
}
interface VariableSetStmt {
kind: 0;
name: string;
args: RawExpression[];
}
interface CreateEnumStmt {
typeName: SqlValue[];
vals: SqlValue[];
}
interface AlterEnumStmt {
typeName: SqlValue[];
newVal: string;
newValNeighbor: string;
newValIsAfter: boolean;
}
type Stmt =
| { CreateExtensionStmt: {} }
| { CopyStmt: {} }
| { ViewStmt: {} }
| { CreateSchemaStmt: {} }
| { IndexStmt: {} }
| { AlterEnumStmt: AlterEnumStmt }
| { CreateEnumStmt: CreateEnumStmt }
| { VariableSetStmt: VariableSetStmt }
| { AlterTableStmt: AlterTableStmt }
| { RenameStmt: RenameStmt }
| { DropStmt: DropStmt }
| { CreateStmt: CreateStmt }
| { SelectStmt: {} }
| { DeleteStmt: {} }
| { TruncateStmt: {} }
| { UpdateStmt: {} }
| { InsertStmt: {} }
| { CreateFunctionStmt: {} };
interface Result {
RawStmt: {
stmt: Stmt;
};
}
type TableAction = 'NO_ACTION' | 'RESTRICT' | 'CASCADE' | 'SET_NULL' | 'SET_DEFAULT';
export interface Column {
name: string;
dataType: string;
constraints: TableConstraint[];
}
export interface Table {
name: string;
columns: Column[];
constraints: TableConstraint[];
}
export interface TableNotNullConstraint {
type: 'NOT_NULL';
name?: never;
}
export interface TablePrimaryKeyConstraint {
type: 'PRIMARY_KEY';
keys?: string[];
name?: never;
}
export interface TableDefaultConstraint {
type: 'DEFAULT';
expression: string;
name?: never;
}
export interface TableCheckConstraint {
type: 'CHECK';
expression: string;
name: string | undefined;
}
export interface TableUniqueConstraint {
type: 'UNIQUE';
keys?: string[];
name: string | undefined;
}
export interface TableReferencesConstraint {
type: 'REFERENCES';
onDelete: TableAction;
onUpdate: TableAction;
match: 'FULL' | 'PARTIAL' | 'SIMPLE';
columns?: string[]; // This makes it a foreign key constraint instead of references
refTable: string;
refColumns: string[];
name: string | undefined;
}
export type TableConstraint =
| TableNotNullConstraint
| TablePrimaryKeyConstraint
| TableDefaultConstraint
| TableCheckConstraint
| TableUniqueConstraint
| TableReferencesConstraint;
interface EnumType {
name: string;
values: string[];
}
/**
* This simulator accepts DDL statements such as CREATE TABLE, ALTER TABLE, etc and builds up the
* given tables, columns and types. Internally, it uses the actual postgres query parser.
*/
export class DataDefinitionLanguageSimulator {
private tables: { [tableName: string]: Table } = {};
private types: { [typeName: string]: EnumType } = {};
private toConstraint(constraint: TableElementConstraint): TableConstraint {
if (constraint.contype == 1) {
return {
type: 'NOT_NULL',
};
} else if (constraint.contype == 2) {
return {
type: `DEFAULT`,
expression: toSql(constraint.raw_expr),
};
} else if (constraint.contype == 4) {
//
return {
type: `CHECK`,
expression: toSql(constraint.raw_expr),
name: constraint.conname,
};
} else if (constraint.contype == 5) {
return {
type: `PRIMARY_KEY`,
keys: constraint.keys?.map(toString) || [],
};
} else if (constraint.contype == 6) {
return {
type: `UNIQUE`,
keys: constraint.keys?.map(toString) || [],
name: constraint.conname,
};
} else if (constraint.contype == 8) {
const actions: {
[K in TableElementReferencesAction]: TableAction;
} = {
a: `NO_ACTION`,
r: `RESTRICT`,
c: `CASCADE`,
d: `SET_DEFAULT`,
n: `SET_NULL`,
};
const matches: {
[K in 's' | 'f' | 'p']: 'FULL' | 'PARTIAL' | 'SIMPLE';
} = {
s: 'SIMPLE',
f: 'FULL',
p: 'PARTIAL',
};
return {
type: `REFERENCES`,
onDelete: actions[constraint.fk_del_action],
onUpdate: actions[constraint.fk_upd_action],
match: matches[constraint.fk_matchtype],
refTable: constraint.pktable.RangeVar.relname,
refColumns: constraint.pk_attrs.map(toString),
columns: constraint.fk_attrs?.map(toString) || [],
name: constraint.conname,
};
}
return constraint;
}
private parseRenameStatement(statement: RenameStmt): void {
if (statement.renameType == 6) {
const table = this.tables[statement.relation.RangeVar.relname];
if (!table) {
throw new Error(
`Table '${statement.relation.RangeVar.relname}' could not be found when trying to rename column '${statement.subname}' to '${statement.newname}'.`,
);
}
const column = table.columns.find((column) => column.name === statement.subname);
if (!column) {
throw new Error(
`Could not find column '${statement.subname}' in table '${table.name}' when trying to rename to '${statement.newname}'.`,
);
}
column.name = statement.newname;
return;
} else if (statement.renameType === 37) {
const table = this.tables[statement.relation.RangeVar.relname];
if (!table) {
throw new Error(
`Table '${statement.relation.RangeVar.relname}' could not be found when trying to rename to '${statement.newname}'.`,
);
}
if (this.tables[statement.newname]) {
throw new Error(
`Could not rename table '${statement.relation.RangeVar.relname}' to '${statement.newname}' because it already exists.`,
);
}
table.name = statement.newname;
delete this.tables[statement.relation.RangeVar.relname];
this.tables[table.name] = table;
return;
}
return statement;
}
private parseCreateStatement(statement: CreateStmt) {
const table: Table = {
name: statement.relation.RangeVar.relname,
columns: statement.tableElts
.filter((element) => element.ColumnDef)
.map((element) => {
const columnDef = element.ColumnDef!;
const name = columnDef.colname;
const dataType = columnDef.typeName.TypeName.names.map(toString).join('.');
const constraints =
columnDef.constraints?.map(({ Constraint: constraint }) =>
this.toConstraint(constraint),
) || [];
return {
name,
dataType,
constraints,
};
}),
constraints: statement.tableElts
.filter((element) => element.Constraint)
.map(({ Constraint: constraint }) => this.toConstraint(constraint!)),
};
if (this.tables[table.name] && !statement.if_not_exists) {
throw new Error(`Table '${table.name}' already exists`);
}
this.tables[table.name] = table;
}
private parseDropStatement(statement: DropStmt): void {
if (statement.removeType === 37) {
statement.objects.forEach((objects) => {
const name = toString(objects[objects.length - 1]);
const table = this.tables[name];
if (!table && !statement.missing_ok) {
throw new Error(`Could not drop table '${name}' because it does not exist.`);
}
// TODO: if there is a cascade clause we need to drop everything related
delete this.tables[name];
});
} else if (statement.removeType === 45) {
statement.objects.forEach((objects) => {
const name = toString(objects[objects.length - 1]);
const type = this.types[name];
if (!type && !statement.missing_ok) {
throw new Error(`Could not drop type '${name}' because it does not exist.`);
}
delete this.types[name];
});
} else if (statement.removeType === 19) {
// TODO: drop the function
} else {
return statement.removeType;
}
}
private parseAlterTableStatement(statement: AlterTableStmt) {
const table = this.tables[statement.relation.RangeVar.relname];
if (!table) {
throw new Error(
`Could not find table '${statement.relation.RangeVar.relname}' when altering table.`,
);
}
statement.cmds.forEach((command): void => {
if (command.AlterTableCmd.subtype === 0) {
const constraints =
command.AlterTableCmd.def.ColumnDef!.constraints?.map(({ Constraint: constraint }) =>
this.toConstraint(constraint),
) || [];
const dataType = command.AlterTableCmd.def
.ColumnDef!.typeName.TypeName.names.map(toString)
.join('.');
table.columns.push({
name: command.AlterTableCmd.def.ColumnDef!.colname,
constraints,
dataType,
});
} else if (command.AlterTableCmd.subtype === 5) {
const columnName = command.AlterTableCmd.name;
const column = table.columns.find((column) => column.name === columnName);
if (!column) {
throw new Error(
`Could not find column '${columnName}' to alter default in table '${table.name}'.`,
);
}
column.constraints.push({
type: `NOT_NULL`,
});
} else if (command.AlterTableCmd.subtype === 4) {
const columnName = command.AlterTableCmd.name;
const column = table.columns.find((column) => column.name === columnName);
if (!column) {
throw new Error(
`Could not find column '${columnName}' to alter not null in table '${table.name}'.`,
);
}
const constraintIndex = column.constraints.findIndex(
(constraint) => constraint.type === `NOT_NULL`,
);
if (constraintIndex === -1) {
throw new Error(
`Could not drop not null from column '${column.name}' because not null was not set`,
);
}
column.constraints.splice(constraintIndex, 1);
} else if (command.AlterTableCmd.subtype === 25) {
const columnName = command.AlterTableCmd.name;
const column = table.columns.find((column) => column.name === columnName);
if (!column) {
throw new Error(
`Could not find column '${columnName}' to set data type in table '${table.name}'.`,
);
}
const dataType = command.AlterTableCmd.def.ColumnDef.typeName.TypeName.names
.map(toString)
.join('.');
column.dataType = dataType;
} else if (command.AlterTableCmd.subtype === 3) {
const columnName = command.AlterTableCmd.name;
const column = table.columns.find((column) => column.name === columnName);
if (!column) {
throw new Error(
`Could not find column '${columnName}' to alter default in table '${table.name}'.`,
);
}
if (command.AlterTableCmd.def) {
column.constraints.push({
type: `DEFAULT`,
expression: toSql(command.AlterTableCmd.def),
});
} else {
const constraintIndex = column.constraints.findIndex(
(constraint) => constraint.type === `DEFAULT`,
);
column.constraints.splice(constraintIndex, 1);
}
} else if (command.AlterTableCmd.subtype === 10) {
const columnName = command.AlterTableCmd.name;
const columnIndex = table.columns.findIndex((column) => column.name === columnName);
if (columnIndex === -1) {
throw new Error(
`Could not find column '${columnName}' to drop in table '${table.name}'.`,
);
}
table.columns.splice(columnIndex, 1);
} else if (command.AlterTableCmd.subtype === 14) {
const newConstraint = this.toConstraint(command.AlterTableCmd.def.Constraint);
table.constraints.push(newConstraint);
} else if (command.AlterTableCmd.subtype === 22) {
const constraintName = command.AlterTableCmd.name;
const constraintIndex = table.constraints.findIndex(
(constraint) => constraint.name === constraintName,
);
if (!constraintIndex) {
// TODO: find the constraint matching the name
// index.name = constraintName || `${table.name}_${index.columns.join(`_`)}_fkey`;
// pkey for primary key, key for unique
// FIXME: the CHECK is actually named based on the expression. If it references one
// column it's added to the name. We can't just check if one of the columns is in
// the expression, because it checks if it's really a reference.
//
// Some examples:
// "test_check" CHECK (1 > 0) -- no column
// "test_check1" CHECK (123 > 0) -- no column, second check
// "test_check2" CHECK (foo_id > val) -- multiple columns, third check
// "test_val_check" CHECK (length('foo_id'::text) > val) -- column as string and real reference
// "test_val_check1" CHECK (1 > val AND val < 0) -- one column multiple references
} else {
table.constraints.splice(constraintIndex, 1);
}
} else {
throw new Error(
`Could not simulate alter table command '${JSON.stringify(command.AlterTableCmd)}'`,
);
}
});
}
private parseCreateEnumStatement(statement: CreateEnumStmt) {
const values = statement.vals.map(toString);
const name = statement.typeName.map(toString).join('.');
this.types[name] = {
name,
values,
};
}
private parseAlterEnumStatement(statement: AlterEnumStmt) {
const name = statement.typeName.map(toString).join('.');
const type = this.types[name];
if (!type) {
throw new Error(
`Could not alter type '${name}' when trying to add value '${statement.newVal}'.`,
);
}
const index = type.values.indexOf(statement.newValNeighbor);
if (index === -1) {
throw new Error(
`Could not find value '${statement.newValNeighbor}' when adding '${statement.newVal}' to type '${type.name}'`,
);
}
const newValueIndex = statement.newValIsAfter ? index + 1 : index;
type.values.splice(newValueIndex, 0, statement.newVal);
}
private parseStatement(statement: Stmt) {
if (
'SelectStmt' in statement ||
'DeleteStmt' in statement ||
'TruncateStmt' in statement ||
'UpdateStmt' in statement ||
'InsertStmt' in statement ||
'CopyStmt' in statement ||
'VariableSetStmt' in statement ||
'CreateExtensionStmt' in statement ||
'IndexStmt' in statement
) {
// These statements are not considered data definition altering statements.
return false;
}
if (
'CreateFunctionStmt' in statement ||
'ViewStmt' in statement ||
'CreateSchemaStmt' in statement
) {
// These statements are currently not supported yet. TODO: how to log using oclif?
return false;
}
if ('CreateStmt' in statement) {
return this.parseCreateStatement(statement.CreateStmt);
} else if ('CreateEnumStmt' in statement) {
return this.parseCreateEnumStatement(statement.CreateEnumStmt);
} else if ('AlterTableStmt' in statement) {
return this.parseAlterTableStatement(statement.AlterTableStmt);
} else if ('RenameStmt' in statement) {
return this.parseRenameStatement(statement.RenameStmt);
} else if ('DropStmt' in statement) {
return this.parseDropStatement(statement.DropStmt);
} else if ('AlterEnumStmt' in statement) {
return this.parseAlterEnumStatement(statement.AlterEnumStmt);
} else {
throw new Error(
`Found unknown statement when parsing queries: ${JSON.stringify(statement, null, 2)}`,
);
}
}
parse(sql: string) {
const result: Result[] = parser.parseQuerySync(sql);
result.forEach((result) => {
this.parseStatement(result.RawStmt.stmt);
});
}
getTables() {
return this.tables;
}
} | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* Manages a API Management Service API Diagnostics Logs.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as azure from "@pulumi/azure";
*
* const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
* const exampleInsights = new azure.appinsights.Insights("exampleInsights", {
* location: exampleResourceGroup.location,
* resourceGroupName: exampleResourceGroup.name,
* applicationType: "web",
* });
* const exampleService = new azure.apimanagement.Service("exampleService", {
* location: exampleResourceGroup.location,
* resourceGroupName: exampleResourceGroup.name,
* publisherName: "My Company",
* publisherEmail: "company@mycompany.io",
* skuName: "Developer_1",
* });
* const exampleApi = new azure.apimanagement.Api("exampleApi", {
* resourceGroupName: exampleResourceGroup.name,
* apiManagementName: exampleService.name,
* revision: "1",
* displayName: "Example API",
* path: "example",
* protocols: ["https"],
* "import": {
* contentFormat: "swagger-link-json",
* contentValue: "http://conferenceapi.azurewebsites.net/?format=json",
* },
* });
* const exampleLogger = new azure.apimanagement.Logger("exampleLogger", {
* apiManagementName: exampleService.name,
* resourceGroupName: exampleResourceGroup.name,
* applicationInsights: {
* instrumentationKey: exampleInsights.instrumentationKey,
* },
* });
* const exampleApiDiagnostic = new azure.apimanagement.ApiDiagnostic("exampleApiDiagnostic", {
* identifier: "applicationinsights",
* resourceGroupName: exampleResourceGroup.name,
* apiManagementName: exampleService.name,
* apiName: exampleApi.name,
* apiManagementLoggerId: exampleLogger.id,
* samplingPercentage: 5,
* alwaysLogErrors: true,
* logClientIp: true,
* verbosity: "verbose",
* httpCorrelationProtocol: "W3C",
* frontendRequest: {
* bodyBytes: 32,
* headersToLogs: [
* "content-type",
* "accept",
* "origin",
* ],
* },
* frontendResponse: {
* bodyBytes: 32,
* headersToLogs: [
* "content-type",
* "content-length",
* "origin",
* ],
* },
* backendRequest: {
* bodyBytes: 32,
* headersToLogs: [
* "content-type",
* "accept",
* "origin",
* ],
* },
* backendResponse: {
* bodyBytes: 32,
* headersToLogs: [
* "content-type",
* "content-length",
* "origin",
* ],
* },
* });
* ```
*
* ## Import
*
* API Management Service API Diagnostics Logs can be imported using the `resource id`, e.g.
*
* ```sh
* $ pulumi import azure:apimanagement/apiDiagnostic:ApiDiagnostic example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.ApiManagement/service/instance1/apis/api1/diagnostics/diagnostic1/loggers/logger1
* ```
*/
export class ApiDiagnostic extends pulumi.CustomResource {
/**
* Get an existing ApiDiagnostic resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: ApiDiagnosticState, opts?: pulumi.CustomResourceOptions): ApiDiagnostic {
return new ApiDiagnostic(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'azure:apimanagement/apiDiagnostic:ApiDiagnostic';
/**
* Returns true if the given object is an instance of ApiDiagnostic. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is ApiDiagnostic {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === ApiDiagnostic.__pulumiType;
}
/**
* Always log errors. Send telemetry if there is an erroneous condition, regardless of sampling settings.
*/
public readonly alwaysLogErrors!: pulumi.Output<boolean>;
/**
* The ID (name) of the Diagnostics Logger.
*/
public readonly apiManagementLoggerId!: pulumi.Output<string>;
/**
* The name of the API Management Service instance. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
public readonly apiManagementName!: pulumi.Output<string>;
/**
* The name of the API on which to configure the Diagnostics Logs. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
public readonly apiName!: pulumi.Output<string>;
/**
* A `backendRequest` block as defined below.
*/
public readonly backendRequest!: pulumi.Output<outputs.apimanagement.ApiDiagnosticBackendRequest>;
/**
* A `backendResponse` block as defined below.
*/
public readonly backendResponse!: pulumi.Output<outputs.apimanagement.ApiDiagnosticBackendResponse>;
/**
* A `frontendRequest` block as defined below.
*/
public readonly frontendRequest!: pulumi.Output<outputs.apimanagement.ApiDiagnosticFrontendRequest>;
/**
* A `frontendResponse` block as defined below.
*/
public readonly frontendResponse!: pulumi.Output<outputs.apimanagement.ApiDiagnosticFrontendResponse>;
/**
* The HTTP Correlation Protocol to use. Possible values are `None`, `Legacy` or `W3C`.
*/
public readonly httpCorrelationProtocol!: pulumi.Output<string>;
/**
* Identifier of the Diagnostics Logs. Possible values are `applicationinsights` and `azuremonitor`. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
public readonly identifier!: pulumi.Output<string>;
/**
* Log client IP address.
*/
public readonly logClientIp!: pulumi.Output<boolean>;
/**
* The format of the Operation Name for Application Insights telemetries. Possible values are `Name`, and `Url`. Defaults to `Name`.
*/
public readonly operationNameFormat!: pulumi.Output<string | undefined>;
/**
* The name of the Resource Group where the API Management Service API Diagnostics Logs should exist. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
public readonly resourceGroupName!: pulumi.Output<string>;
/**
* Sampling (%). For high traffic APIs, please read this [documentation](https://docs.microsoft.com/azure/api-management/api-management-howto-app-insights#performance-implications-and-log-sampling) to understand performance implications and log sampling. Valid values are between `0.0` and `100.0`.
*/
public readonly samplingPercentage!: pulumi.Output<number>;
/**
* Logging verbosity. Possible values are `verbose`, `information` or `error`.
*/
public readonly verbosity!: pulumi.Output<string>;
/**
* Create a ApiDiagnostic resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: ApiDiagnosticArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: ApiDiagnosticArgs | ApiDiagnosticState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as ApiDiagnosticState | undefined;
inputs["alwaysLogErrors"] = state ? state.alwaysLogErrors : undefined;
inputs["apiManagementLoggerId"] = state ? state.apiManagementLoggerId : undefined;
inputs["apiManagementName"] = state ? state.apiManagementName : undefined;
inputs["apiName"] = state ? state.apiName : undefined;
inputs["backendRequest"] = state ? state.backendRequest : undefined;
inputs["backendResponse"] = state ? state.backendResponse : undefined;
inputs["frontendRequest"] = state ? state.frontendRequest : undefined;
inputs["frontendResponse"] = state ? state.frontendResponse : undefined;
inputs["httpCorrelationProtocol"] = state ? state.httpCorrelationProtocol : undefined;
inputs["identifier"] = state ? state.identifier : undefined;
inputs["logClientIp"] = state ? state.logClientIp : undefined;
inputs["operationNameFormat"] = state ? state.operationNameFormat : undefined;
inputs["resourceGroupName"] = state ? state.resourceGroupName : undefined;
inputs["samplingPercentage"] = state ? state.samplingPercentage : undefined;
inputs["verbosity"] = state ? state.verbosity : undefined;
} else {
const args = argsOrState as ApiDiagnosticArgs | undefined;
if ((!args || args.apiManagementLoggerId === undefined) && !opts.urn) {
throw new Error("Missing required property 'apiManagementLoggerId'");
}
if ((!args || args.apiManagementName === undefined) && !opts.urn) {
throw new Error("Missing required property 'apiManagementName'");
}
if ((!args || args.apiName === undefined) && !opts.urn) {
throw new Error("Missing required property 'apiName'");
}
if ((!args || args.identifier === undefined) && !opts.urn) {
throw new Error("Missing required property 'identifier'");
}
if ((!args || args.resourceGroupName === undefined) && !opts.urn) {
throw new Error("Missing required property 'resourceGroupName'");
}
inputs["alwaysLogErrors"] = args ? args.alwaysLogErrors : undefined;
inputs["apiManagementLoggerId"] = args ? args.apiManagementLoggerId : undefined;
inputs["apiManagementName"] = args ? args.apiManagementName : undefined;
inputs["apiName"] = args ? args.apiName : undefined;
inputs["backendRequest"] = args ? args.backendRequest : undefined;
inputs["backendResponse"] = args ? args.backendResponse : undefined;
inputs["frontendRequest"] = args ? args.frontendRequest : undefined;
inputs["frontendResponse"] = args ? args.frontendResponse : undefined;
inputs["httpCorrelationProtocol"] = args ? args.httpCorrelationProtocol : undefined;
inputs["identifier"] = args ? args.identifier : undefined;
inputs["logClientIp"] = args ? args.logClientIp : undefined;
inputs["operationNameFormat"] = args ? args.operationNameFormat : undefined;
inputs["resourceGroupName"] = args ? args.resourceGroupName : undefined;
inputs["samplingPercentage"] = args ? args.samplingPercentage : undefined;
inputs["verbosity"] = args ? args.verbosity : undefined;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(ApiDiagnostic.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering ApiDiagnostic resources.
*/
export interface ApiDiagnosticState {
/**
* Always log errors. Send telemetry if there is an erroneous condition, regardless of sampling settings.
*/
alwaysLogErrors?: pulumi.Input<boolean>;
/**
* The ID (name) of the Diagnostics Logger.
*/
apiManagementLoggerId?: pulumi.Input<string>;
/**
* The name of the API Management Service instance. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
apiManagementName?: pulumi.Input<string>;
/**
* The name of the API on which to configure the Diagnostics Logs. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
apiName?: pulumi.Input<string>;
/**
* A `backendRequest` block as defined below.
*/
backendRequest?: pulumi.Input<inputs.apimanagement.ApiDiagnosticBackendRequest>;
/**
* A `backendResponse` block as defined below.
*/
backendResponse?: pulumi.Input<inputs.apimanagement.ApiDiagnosticBackendResponse>;
/**
* A `frontendRequest` block as defined below.
*/
frontendRequest?: pulumi.Input<inputs.apimanagement.ApiDiagnosticFrontendRequest>;
/**
* A `frontendResponse` block as defined below.
*/
frontendResponse?: pulumi.Input<inputs.apimanagement.ApiDiagnosticFrontendResponse>;
/**
* The HTTP Correlation Protocol to use. Possible values are `None`, `Legacy` or `W3C`.
*/
httpCorrelationProtocol?: pulumi.Input<string>;
/**
* Identifier of the Diagnostics Logs. Possible values are `applicationinsights` and `azuremonitor`. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
identifier?: pulumi.Input<string>;
/**
* Log client IP address.
*/
logClientIp?: pulumi.Input<boolean>;
/**
* The format of the Operation Name for Application Insights telemetries. Possible values are `Name`, and `Url`. Defaults to `Name`.
*/
operationNameFormat?: pulumi.Input<string>;
/**
* The name of the Resource Group where the API Management Service API Diagnostics Logs should exist. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
resourceGroupName?: pulumi.Input<string>;
/**
* Sampling (%). For high traffic APIs, please read this [documentation](https://docs.microsoft.com/azure/api-management/api-management-howto-app-insights#performance-implications-and-log-sampling) to understand performance implications and log sampling. Valid values are between `0.0` and `100.0`.
*/
samplingPercentage?: pulumi.Input<number>;
/**
* Logging verbosity. Possible values are `verbose`, `information` or `error`.
*/
verbosity?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a ApiDiagnostic resource.
*/
export interface ApiDiagnosticArgs {
/**
* Always log errors. Send telemetry if there is an erroneous condition, regardless of sampling settings.
*/
alwaysLogErrors?: pulumi.Input<boolean>;
/**
* The ID (name) of the Diagnostics Logger.
*/
apiManagementLoggerId: pulumi.Input<string>;
/**
* The name of the API Management Service instance. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
apiManagementName: pulumi.Input<string>;
/**
* The name of the API on which to configure the Diagnostics Logs. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
apiName: pulumi.Input<string>;
/**
* A `backendRequest` block as defined below.
*/
backendRequest?: pulumi.Input<inputs.apimanagement.ApiDiagnosticBackendRequest>;
/**
* A `backendResponse` block as defined below.
*/
backendResponse?: pulumi.Input<inputs.apimanagement.ApiDiagnosticBackendResponse>;
/**
* A `frontendRequest` block as defined below.
*/
frontendRequest?: pulumi.Input<inputs.apimanagement.ApiDiagnosticFrontendRequest>;
/**
* A `frontendResponse` block as defined below.
*/
frontendResponse?: pulumi.Input<inputs.apimanagement.ApiDiagnosticFrontendResponse>;
/**
* The HTTP Correlation Protocol to use. Possible values are `None`, `Legacy` or `W3C`.
*/
httpCorrelationProtocol?: pulumi.Input<string>;
/**
* Identifier of the Diagnostics Logs. Possible values are `applicationinsights` and `azuremonitor`. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
identifier: pulumi.Input<string>;
/**
* Log client IP address.
*/
logClientIp?: pulumi.Input<boolean>;
/**
* The format of the Operation Name for Application Insights telemetries. Possible values are `Name`, and `Url`. Defaults to `Name`.
*/
operationNameFormat?: pulumi.Input<string>;
/**
* The name of the Resource Group where the API Management Service API Diagnostics Logs should exist. Changing this forces a new API Management Service API Diagnostics Logs to be created.
*/
resourceGroupName: pulumi.Input<string>;
/**
* Sampling (%). For high traffic APIs, please read this [documentation](https://docs.microsoft.com/azure/api-management/api-management-howto-app-insights#performance-implications-and-log-sampling) to understand performance implications and log sampling. Valid values are between `0.0` and `100.0`.
*/
samplingPercentage?: pulumi.Input<number>;
/**
* Logging verbosity. Possible values are `verbose`, `information` or `error`.
*/
verbosity?: pulumi.Input<string>;
} | the_stack |
import rule from '../../src/rules/no-invalid-void-type';
import { RuleTester } from '../RuleTester';
const ruleTester = new RuleTester({
parser: '@typescript-eslint/parser',
});
ruleTester.run('allowInGenericTypeArguments: false', rule, {
valid: [
{
code: 'type Generic<T> = [T];',
options: [{ allowInGenericTypeArguments: false }],
},
{
// https://github.com/typescript-eslint/typescript-eslint/issues/1946
code: `
function foo(): void | never {
throw new Error('Test');
}
`,
options: [{ allowInGenericTypeArguments: false }],
},
{
code: 'type voidNeverUnion = void | never;',
options: [{ allowInGenericTypeArguments: false }],
},
{
code: 'type neverVoidUnion = never | void;',
options: [{ allowInGenericTypeArguments: false }],
},
],
invalid: [
{
code: 'type GenericVoid = Generic<void>;',
options: [{ allowInGenericTypeArguments: false }],
errors: [
{
messageId: 'invalidVoidNotReturn',
line: 1,
column: 28,
},
],
},
{
code: 'function takeVoid(thing: void) {}',
options: [{ allowInGenericTypeArguments: false }],
errors: [
{
messageId: 'invalidVoidNotReturn',
line: 1,
column: 26,
},
],
},
{
code: 'let voidPromise: Promise<void> = new Promise<void>(() => {});',
options: [{ allowInGenericTypeArguments: false }],
errors: [
{
messageId: 'invalidVoidNotReturn',
line: 1,
column: 26,
},
{
messageId: 'invalidVoidNotReturn',
line: 1,
column: 46,
},
],
},
{
code: 'let voidMap: Map<string, void> = new Map<string, void>();',
options: [{ allowInGenericTypeArguments: false }],
errors: [
{
messageId: 'invalidVoidNotReturn',
line: 1,
column: 26,
},
{
messageId: 'invalidVoidNotReturn',
line: 1,
column: 50,
},
],
},
{
code: 'type invalidVoidUnion = void | number;',
options: [{ allowInGenericTypeArguments: false }],
errors: [
{
messageId: 'invalidVoidNotReturn',
line: 1,
column: 25,
},
],
},
],
});
ruleTester.run('allowInGenericTypeArguments: true', rule, {
valid: [
'function func(): void {}',
'type NormalType = () => void;',
'let normalArrow = (): void => {};',
'let ughThisThing = void 0;',
'function takeThing(thing: undefined) {}',
'takeThing(void 0);',
'let voidPromise: Promise<void> = new Promise<void>(() => {});',
'let voidMap: Map<string, void> = new Map<string, void>();',
`
function returnsVoidPromiseDirectly(): Promise<void> {
return Promise.resolve();
}
`,
'async function returnsVoidPromiseAsync(): Promise<void> {}',
'type UnionType = string | number;',
'type GenericVoid = Generic<void>;',
'type Generic<T> = [T];',
'type voidPromiseUnion = void | Promise<void>;',
'type promiseNeverUnion = Promise<void> | never;',
],
invalid: [
{
code: 'function takeVoid(thing: void) {}',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 26,
},
],
},
{
code: 'const arrowGeneric = <T extends void>(arg: T) => {};',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 33,
},
],
},
{
code: 'const arrowGeneric1 = <T = void>(arg: T) => {};',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 28,
},
],
},
{
code: 'const arrowGeneric2 = <T extends void = void>(arg: T) => {};',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 34,
},
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 41,
},
],
},
{
code: 'function functionGeneric<T extends void>(arg: T) {}',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 36,
},
],
},
{
code: 'function functionGeneric1<T = void>(arg: T) {}',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 31,
},
],
},
{
code: 'function functionGeneric2<T extends void = void>(arg: T) {}',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 37,
},
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 44,
},
],
},
{
code: 'declare function functionDeclaration<T extends void>(arg: T): void;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 48,
},
],
},
{
code: 'declare function functionDeclaration1<T = void>(arg: T): void;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 43,
},
],
},
{
code: 'declare function functionDeclaration2<T extends void = void>(arg: T): void;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 49,
},
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 56,
},
],
},
{
code: 'functionGeneric<void>(undefined);',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 17,
},
],
},
{
code: 'declare function voidArray(args: void[]): void[];',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 34,
},
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 43,
},
],
},
{
code: 'let value = undefined as void;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 26,
},
],
},
{
code: 'let value = <void>undefined;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 14,
},
],
},
{
code: 'function takesThings(...things: void[]): void {}',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 33,
},
],
},
{
code: 'type KeyofVoid = keyof void;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 24,
},
],
},
{
code: `
interface Interface {
lambda: () => void;
voidProp: void;
}
`,
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 4,
column: 21,
},
],
},
{
code: `
class ClassName {
private readonly propName: void;
}
`,
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 3,
column: 38,
},
],
},
{
code: 'let letVoid: void;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 14,
},
],
},
{
code: `
type VoidType = void;
class OtherClassName {
private propName: VoidType;
}
`,
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 2,
column: 25,
},
],
},
{
code: 'type UnionType2 = string | number | void;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 37,
},
],
},
{
code: 'type UnionType3 = string | ((number & any) | (string | void));',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 56,
},
],
},
{
code: 'type IntersectionType = string & number & void;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 43,
},
],
},
{
code: `
type MappedType<T> = {
[K in keyof T]: void;
};
`,
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 3,
column: 27,
},
],
},
{
code: `
type ConditionalType<T> = {
[K in keyof T]: T[K] extends string ? void : string;
};
`,
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 3,
column: 49,
},
],
},
{
code: 'type ManyVoid = readonly void[];',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 26,
},
],
},
{
code: 'function foo(arr: readonly void[]) {}',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 28,
},
],
},
{
code: 'type invalidVoidUnion = void | Map<string, number>;',
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 25,
},
],
},
],
});
ruleTester.run('allowInGenericTypeArguments: whitelist', rule, {
valid: [
'type Allowed<T> = [T];',
'type Banned<T> = [T];',
{
code: 'type AllowedVoid = Allowed<void>;',
options: [{ allowInGenericTypeArguments: ['Allowed'] }],
},
{
code: 'type AllowedVoid = Ex.Mx.Tx<void>;',
options: [{ allowInGenericTypeArguments: ['Ex.Mx.Tx'] }],
},
{
// eslint-disable-next-line @typescript-eslint/internal/plugin-test-formatting
code: 'type AllowedVoid = Ex . Mx . Tx<void>;',
options: [{ allowInGenericTypeArguments: ['Ex.Mx.Tx'] }],
},
{
// eslint-disable-next-line @typescript-eslint/internal/plugin-test-formatting
code: 'type AllowedVoid = Ex . Mx . Tx<void>;',
options: [{ allowInGenericTypeArguments: ['Ex.Mx . Tx'] }],
},
{
code: 'type AllowedVoid = Ex.Mx.Tx<void>;',
options: [{ allowInGenericTypeArguments: ['Ex . Mx . Tx'] }],
},
{
code: 'type voidPromiseUnion = void | Promise<void>;',
options: [{ allowInGenericTypeArguments: ['Promise'] }],
},
{
code: 'type promiseVoidUnion = Promise<void> | void;',
options: [{ allowInGenericTypeArguments: ['Promise'] }],
},
{
// https://github.com/typescript-eslint/typescript-eslint/issues/1956
code: `
async function foo(bar: () => void | Promise<void>) {
await bar();
}
`,
options: [{ allowInGenericTypeArguments: ['Promise'] }],
},
{
code: 'type promiseNeverUnion = Promise<void> | never;',
options: [{ allowInGenericTypeArguments: ['Promise'] }],
},
{
code: 'type voidPromiseNeverUnion = void | Promise<void> | never;',
options: [{ allowInGenericTypeArguments: ['Promise'] }],
},
],
invalid: [
{
code: 'type BannedVoid = Banned<void>;',
options: [{ allowInGenericTypeArguments: ['Allowed'] }],
errors: [
{
messageId: 'invalidVoidForGeneric',
data: { generic: 'Banned' },
line: 1,
column: 26,
},
],
},
{
code: 'type BannedVoid = Ex.Mx.Tx<void>;',
options: [{ allowInGenericTypeArguments: ['Tx'] }],
errors: [
{
messageId: 'invalidVoidForGeneric',
data: { generic: 'Ex.Mx.Tx' },
line: 1,
column: 28,
},
],
},
{
code: 'function takeVoid(thing: void) {}',
options: [{ allowInGenericTypeArguments: ['Allowed'] }],
errors: [
{
messageId: 'invalidVoidNotReturnOrGeneric',
line: 1,
column: 26,
},
],
},
],
});
ruleTester.run('allowAsThisParameter: true', rule, {
valid: [
{
code: 'function f(this: void) {}',
options: [{ allowAsThisParameter: true }],
},
{
code: `
class Test {
public static helper(this: void) {}
method(this: void) {}
}
`,
options: [{ allowAsThisParameter: true }],
},
],
invalid: [
{
code: 'type alias = void;',
options: [
{ allowAsThisParameter: true, allowInGenericTypeArguments: true },
],
errors: [
{
messageId: 'invalidVoidNotReturnOrThisParamOrGeneric',
},
],
},
{
code: 'type alias = void;',
options: [
{ allowAsThisParameter: true, allowInGenericTypeArguments: false },
],
errors: [
{
messageId: 'invalidVoidNotReturnOrThisParam',
},
],
},
{
code: 'type alias = Array<void>;',
options: [
{ allowAsThisParameter: true, allowInGenericTypeArguments: false },
],
errors: [
{
messageId: 'invalidVoidNotReturnOrThisParam',
},
],
},
],
}); | the_stack |
import React, { ReactNode } from 'react';
import { useElementFilter } from './useElementFilter';
import { renderHook } from '@testing-library/react-hooks';
import { attachComponentData } from './componentData';
import { featureFlagsApiRef } from '../apis';
import {
ApiProvider,
ApiRegistry,
LocalStorageFeatureFlags,
} from '@backstage/core-app-api';
const WRAPPING_COMPONENT_KEY = 'core.blob.testing';
const INNER_COMPONENT_KEY = 'core.blob2.testing';
const WrappingComponent = (_props: { children: ReactNode }) => null;
attachComponentData(WrappingComponent, WRAPPING_COMPONENT_KEY, {
message: 'hey! im wrapping component data',
});
const InnerComponent = () => null;
attachComponentData(InnerComponent, INNER_COMPONENT_KEY, {
message: 'hey! im the inner component',
});
const MockComponent = (_props: { children: ReactNode }) => null;
const FeatureFlagComponent = (_props: {
children: ReactNode;
with?: string;
without?: string;
}) => null;
attachComponentData(FeatureFlagComponent, 'core.featureFlagged', true);
const mockFeatureFlagsApi = new LocalStorageFeatureFlags();
const Wrapper = ({ children }: { children?: React.ReactNode }) => (
<ApiProvider apis={ApiRegistry.with(featureFlagsApiRef, mockFeatureFlagsApi)}>
{children}
</ApiProvider>
);
describe('useElementFilter', () => {
it('should select elements based on a component data key', () => {
const tree = (
<MockComponent>
<WrappingComponent key="first">
<InnerComponent />
</WrappingComponent>
<MockComponent>
<WrappingComponent key="second">
<WrappingComponent key="third">
<InnerComponent />
</WrappingComponent>
</WrappingComponent>
</MockComponent>
</MockComponent>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements
.selectByComponentData({ key: WRAPPING_COMPONENT_KEY })
.getElements(),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.current.length).toBe(2);
expect(result.current[0].key).toBe('.$.$first');
expect(result.current[1].key).toBe('.$.$second');
});
it('should find componentData', () => {
const tree = (
<MockComponent>
<WrappingComponent key="first">
<InnerComponent />
</WrappingComponent>
<MockComponent>
<WrappingComponent key="second">
<InnerComponent />
</WrappingComponent>
</MockComponent>
</MockComponent>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements.findComponentData({ key: WRAPPING_COMPONENT_KEY }),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.current.length).toBe(2);
expect(result.current[0]).toEqual({
message: 'hey! im wrapping component data',
});
expect(result.current[1]).toEqual({
message: 'hey! im wrapping component data',
});
});
it('can be combined to together to filter the selection', () => {
const tree = (
<MockComponent>
<WrappingComponent key="first">
<InnerComponent />
</WrappingComponent>
<MockComponent>
<WrappingComponent key="second">
<InnerComponent />
</WrappingComponent>
</MockComponent>
<InnerComponent />
</MockComponent>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements
.selectByComponentData({ key: WRAPPING_COMPONENT_KEY })
.findComponentData({ key: INNER_COMPONENT_KEY }),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.current.length).toBe(2);
expect(result.current[0]).toEqual({
message: 'hey! im the inner component',
});
expect(result.current[1]).toEqual({
message: 'hey! im the inner component',
});
});
describe('FeatureFlags', () => {
describe('with', () => {
it('should not discover deeper than the feature gate if the feature flag is disabled', () => {
jest
.spyOn(mockFeatureFlagsApi, 'isActive')
.mockImplementation(() => false);
const tree = (
<MockComponent>
<FeatureFlagComponent with="testing-flag">
<WrappingComponent key="first">
<InnerComponent />
</WrappingComponent>
</FeatureFlagComponent>
<MockComponent>
<WrappingComponent key="second">
<InnerComponent />
</WrappingComponent>
</MockComponent>
<InnerComponent />
</MockComponent>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements
.selectByComponentData({ key: WRAPPING_COMPONENT_KEY })
.getElements(),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.current.length).toBe(1);
expect(result.current[0].key).toContain('second');
});
it('should discover components behind a feature flag if the flag is enabled', () => {
jest
.spyOn(mockFeatureFlagsApi, 'isActive')
.mockImplementation(() => true);
const tree = (
<MockComponent>
<FeatureFlagComponent with="testing-flag">
<WrappingComponent key="first">
<InnerComponent />
</WrappingComponent>
</FeatureFlagComponent>
<MockComponent>
<WrappingComponent key="second">
<InnerComponent />
</WrappingComponent>
</MockComponent>
<InnerComponent />
</MockComponent>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements
.selectByComponentData({ key: WRAPPING_COMPONENT_KEY })
.getElements(),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.current.length).toBe(2);
});
});
describe('without', () => {
it('should discover deeper than the feature gate if the feature flag is disabled', () => {
jest
.spyOn(mockFeatureFlagsApi, 'isActive')
.mockImplementation(() => false);
const tree = (
<MockComponent>
<FeatureFlagComponent without="testing-flag">
<WrappingComponent key="first">
<InnerComponent />
</WrappingComponent>
</FeatureFlagComponent>
<MockComponent>
<WrappingComponent key="second">
<InnerComponent />
</WrappingComponent>
</MockComponent>
<InnerComponent />
</MockComponent>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements
.selectByComponentData({ key: WRAPPING_COMPONENT_KEY })
.getElements(),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.current.length).toBe(2);
});
it('should not discover components behind a feature flag if the flag is enabled', () => {
jest
.spyOn(mockFeatureFlagsApi, 'isActive')
.mockImplementation(() => true);
const tree = (
<MockComponent>
<FeatureFlagComponent without="testing-flag">
<WrappingComponent key="first">
<InnerComponent />
</WrappingComponent>
</FeatureFlagComponent>
<MockComponent>
<WrappingComponent key="second">
<InnerComponent />
</WrappingComponent>
</MockComponent>
<InnerComponent />
</MockComponent>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements
.selectByComponentData({ key: WRAPPING_COMPONENT_KEY })
.getElements(),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.current.length).toBe(1);
});
});
});
it('should reject when strict mode is enabled with the correct string', () => {
const tree = (
<MockComponent>
<h1>Hello</h1>
</MockComponent>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements
.selectByComponentData({
key: WRAPPING_COMPONENT_KEY,
withStrictError: 'Could not find component',
})
.findComponentData({ key: INNER_COMPONENT_KEY }),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.error?.message).toEqual('Could not find component');
});
it('should support fragments and text node iteration', () => {
jest.spyOn(mockFeatureFlagsApi, 'isActive').mockImplementation(() => true);
const tree = (
<>
<MockComponent>
<>
<FeatureFlagComponent with="testing-flag">
<WrappingComponent key="first">
<InnerComponent />
</WrappingComponent>
</FeatureFlagComponent>
</>
<MockComponent>
hello my name
<>
<WrappingComponent key="second">
<InnerComponent />
</WrappingComponent>
</>
</MockComponent>
is text
<InnerComponent />
</MockComponent>
</>
);
const { result } = renderHook(
props =>
useElementFilter(props.tree, elements =>
elements
.selectByComponentData({ key: WRAPPING_COMPONENT_KEY })
.getElements(),
),
{
initialProps: { tree },
wrapper: Wrapper,
},
);
expect(result.current.length).toBe(2);
});
}); | the_stack |
import {
execute as defaultExecute,
subscribe as defaultSubscribe,
validateSchema as defaultValidateSchema,
validate as defaultValidate,
parse as defaultParse,
GraphQLSchema,
ExecutionResult,
ExecutionArgs,
DocumentNode,
GraphQLError,
specifiedRules as defaultValidationRules,
ValidationRule,
getOperationAST,
OperationDefinitionNode,
} from "graphql";
import { isAsyncIterable } from "./isAsyncIterable";
import type { Server as IOServer, Socket as IOSocket } from "socket.io";
export type PromiseOrPlain<T> = T | Promise<T>;
type DocumentSourceString = string;
type MaybeDocumentNode = Record<string, unknown> | DocumentNode;
const isDocumentNode = (input: MaybeDocumentNode): input is DocumentNode => {
return input["kind"] === "Document" && Array.isArray(input["definitions"]);
};
export type ExecuteFunction = (
args: ExecutionArgs
) => PromiseOrPlain<AsyncIterableIterator<ExecutionResult> | ExecutionResult>;
export type GetParameterFunctionParameter = {
/* The socket that sends the operation */
socket: IOSocket;
/* The GraphQL payload that is sent by the socket. */
graphQLPayload: {
/* The source document. Can be a string or object. */
source: DocumentSourceString | MaybeDocumentNode;
/* The variables for the source document. */
variableValues: { [key: string]: any } | null;
/* The name of the operation that should be executed. */
operationName: string | null;
};
};
/* Function which is invoked for each incoming operation */
export type GetParameterFunction = (
parameter: GetParameterFunctionParameter
) => PromiseOrPlain<{
/* The parameters that will be used for executing/subscribing to the operation. */
graphQLExecutionParameter: {
/* Executable GraphQL schema (required)*/
schema: GraphQLSchema;
/* Execution context that is injected into each resolver. */
contextValue?: unknown;
/* Root value that is injected into the root object types. */
rootValue?: unknown;
/* Source document. Will overwrite the value sent from the client. */
source?: DocumentSourceString | DocumentNode;
/* Variables for the source document. Will overwrite the value sent from the client. */
variableValues?: { [key: string]: any } | null;
/* Name of the operation that should be executed. Will overwrite the value sent from the client. */
operationName?: string;
};
/* Function for executing mutation and query operations. Uses `execute` exported from graphql by default. */
execute?: ExecuteFunction;
/* Function for executing subscription operations. Uses `subscribe` exported from graphql by default. */
subscribe?: typeof defaultSubscribe;
/* Function for parsing GraphQL source documents. Uses `parse` exported from graphql by default. */
parse?: typeof defaultParse;
/* Function for validating the GraphQL schema. Uses `validateSchema` exported from graphql by default. */
validateSchema?: typeof defaultValidateSchema;
/* Function for validating the GraphQL documents. Uses `validate` exported from graphql by default. */
validate?: typeof defaultValidate;
/* Array of validation rules. Uses the `specifiedRules` exported from graphql by default */
validationRules?: ValidationRule[];
}>;
const isSubscriptionOperation = (def: OperationDefinitionNode) =>
def.operation === "subscription";
type MessagePayload = {
id: number;
operation: DocumentSourceString | MaybeDocumentNode;
variables: { [key: string]: any } | null;
operationName: string | null;
};
const decodeMessage = (message: unknown): MessagePayload | Error => {
let id: number;
let operation: DocumentSourceString | MaybeDocumentNode | null = null;
let variables: { [key: string]: any } | null = null;
let operationName: string | null = null;
if (typeof message === "object" && message !== null) {
const maybeId: unknown = (message as any).id;
if (typeof maybeId === "number") {
id = maybeId;
} else {
return new Error("Invalid message format. Field 'id' is invalid.");
}
const maybeOperation: unknown = (message as any).operation;
if (
typeof maybeOperation === "string" ||
(typeof maybeOperation === "object" && maybeOperation !== null)
) {
operation = maybeOperation as DocumentSourceString | MaybeDocumentNode;
} else {
return new Error(
"Invalid message format. Field 'operation' is invalid. Must be DocumentSourceString or DocumentNode."
);
}
const maybeVariables: unknown = (message as any).variables ?? null;
if (typeof maybeVariables === "object") {
variables = maybeVariables;
} else {
return new Error(
"Invalid message format. Field 'variableValues' is invalid."
);
}
const maybeOperationName: unknown = (message as any).operationName ?? null;
if (maybeOperationName === null || typeof maybeOperationName === "string") {
operationName = maybeOperationName;
} else {
return new Error(
"Invalid message format. Field 'operationName' is invalid."
);
}
return {
id,
operation,
variables,
operationName,
};
}
return new Error("Invalid message format. Sent message is not an object.");
};
const decodeUnsubscribeMessage = (message: unknown): { id: number } | Error => {
if (typeof message === "object" && message !== null) {
const maybeId: unknown = (message as any).id;
if (typeof maybeId === "number") {
return { id: maybeId };
} else {
return new Error("Invalid message format. Field 'id' is invalid.");
}
}
return new Error("Invalid message format. Sent message is not an object.");
};
export type DecodeErrorHandler = (error: Error) => void;
export type RegisterSocketIOGraphQLServerParameter = {
socketServer: IOServer;
/* get the parameters for a incoming GraphQL operation */
getParameter: GetParameterFunction;
/* error handler for failed message decoding attempts */
onMessageDecodeError?: DecodeErrorHandler;
/* whether the GraphQL layer has to be enabled for each socket explicitly */
isLazy?: boolean;
};
export type UnsubscribeHandler = () => void;
export type SocketIOGraphQLServer = {
/* register a single socket */
registerSocket: (socket: IOSocket) => UnsubscribeHandler;
/* dispose a single socket */
disposeSocket: (socket: IOSocket) => void;
/* dispose all connections and remove all listeners on the socketServer. */
destroy: () => void;
};
export const registerSocketIOGraphQLServer = ({
socketServer,
getParameter,
onMessageDecodeError = console.error,
isLazy = false,
}: RegisterSocketIOGraphQLServerParameter): SocketIOGraphQLServer => {
let acceptNewConnections = true;
const disposeHandlers = new Map<IOSocket, UnsubscribeHandler>();
const registerSocket = (socket: IOSocket) => {
// In case the socket is already registered :)
const dispose = disposeHandlers.get(socket);
if (dispose) {
return dispose;
}
const subscriptions = new Map<number, () => void>();
const executeHandler = async (rawMessage: unknown) => {
const message = decodeMessage(rawMessage);
if (message instanceof Error) {
// TODO: Unify what we should do with this.
onMessageDecodeError(message);
return;
}
const emitFinalResult = (executionResult: ExecutionResult) =>
socket.emit("@graphql/result", {
...executionResult,
id,
isFinal: true,
});
const {
id,
operation: source,
variables: variableValues,
operationName,
} = message;
const {
graphQLExecutionParameter,
subscribe = defaultSubscribe,
execute = defaultExecute,
parse = defaultParse,
validateSchema = defaultValidateSchema,
validate = defaultValidate,
validationRules = defaultValidationRules,
} = await getParameter({
socket,
graphQLPayload: {
source,
variableValues,
operationName,
},
});
// Validate Schema
const schemaValidationErrors = validateSchema(
graphQLExecutionParameter.schema
);
if (schemaValidationErrors.length > 0) {
emitFinalResult({ errors: schemaValidationErrors });
return;
}
let documentAst: DocumentNode;
if (typeof source === "string") {
// Parse
try {
documentAst = parse(source);
} catch (syntaxError: unknown) {
emitFinalResult({ errors: [syntaxError as GraphQLError] });
return;
}
} else if (isDocumentNode(source)) {
documentAst = source;
} else {
emitFinalResult({
errors: [
new GraphQLError(
"Invalid DocumentNode. The provided document AST node is invalid."
),
],
});
return;
}
// Validate
const validationErrors = validate(
graphQLExecutionParameter.schema,
documentAst,
validationRules
);
if (validationErrors.length > 0) {
emitFinalResult({
errors: validationErrors,
});
return;
}
const executionParameter = {
document: documentAst,
operationName,
source,
variableValues,
...graphQLExecutionParameter,
};
const asyncIteratorHandler = async (
result: AsyncIterableIterator<ExecutionResult> | ExecutionResult
) => {
if (isAsyncIterable(result)) {
subscriptions.set(id, () => result.return?.(null));
for await (const subscriptionResult of result) {
socket.emit("@graphql/result", { ...subscriptionResult, id });
}
} else {
emitFinalResult(result);
}
};
let executionResult: PromiseOrPlain<
ExecutionResult | AsyncIterableIterator<ExecutionResult>
>;
const mainOperation = getOperationAST(documentAst, operationName);
if (!mainOperation) {
executionResult = {
errors: [new GraphQLError("No executable operation sent.")],
};
} else {
try {
if (isSubscriptionOperation(mainOperation)) {
executionResult = await subscribe({
...executionParameter,
document: documentAst,
});
} else {
executionResult = execute(executionParameter);
}
} catch (contextError) {
console.error("Unexpected error occurred.", contextError);
executionResult = {
errors: [new GraphQLError("A unexpected error occurred.")],
};
}
}
Promise.resolve(executionResult)
.then((result) => {
if (isAsyncIterable(result)) {
return asyncIteratorHandler(result);
} else {
emitFinalResult(result);
}
})
.catch((err) => {
emitFinalResult({
errors: [err],
});
});
};
socket.on("@graphql/execute", executeHandler);
const unsubscribeHandler = (rawMessage: unknown) => {
const message = decodeUnsubscribeMessage(rawMessage);
if (message instanceof Error) {
// TODO: Unify what we should do with this.
onMessageDecodeError(message);
return;
}
const id = message.id;
const subscription = subscriptions.get(id);
subscription?.();
subscriptions.delete(id);
};
socket.on("@graphql/unsubscribe", unsubscribeHandler);
const disconnectHandler = () => {
// Unsubscribe all pending GraphQL Live Queries and Subscriptions
subscriptions.forEach((unsubscribe) => unsubscribe());
disposeHandlers.delete(socket);
};
socket.once("disconnect", disconnectHandler);
const disposeHandler = () => {
socket.off("@graphql/execute", executeHandler);
socket.off("@graphql/unsubscribe", unsubscribeHandler);
socket.off("disconnect", disconnectHandler);
disconnectHandler();
};
disposeHandlers.set(socket, disposeHandler);
return disposeHandler;
};
if (isLazy === false && acceptNewConnections === true) {
socketServer.on("connection", registerSocket);
}
return {
registerSocket: (socket: IOSocket) =>
disposeHandlers.get(socket) ?? registerSocket(socket),
disposeSocket: (socket: IOSocket) => disposeHandlers.get(socket)?.(),
destroy: () => {
socketServer.off("connection", registerSocket);
for (const dispose of disposeHandlers.values()) {
dispose();
}
},
};
}; | the_stack |
import { Lookup } from "@esfx/iter-lookup";
import * as fn from "../";
import * as users from "./data/users";
import * as nodes from "./data/nodes";
import * as books from "./data/books";
import { Comparable } from '@esfx/equatable';
import { HashSet } from '@esfx/collections-hashset';
import { HashMap } from '@esfx/collections-hashmap';
import { Index } from '@esfx/interval';
describe("empty()", () => {
it("is empty", () => expect(fn.empty()).toEqualSequence([]));
});
describe("once()", () => {
it("is once", () => expect(fn.once(1)).toEqualSequence([1]));
});
describe("repeat()", () => {
it("0 times", () => expect(fn.repeat("a", 0)).toEqualSequence([]));
it("5 times", () => expect(fn.repeat("a", 5)).toEqualSequence(["a", "a", "a", "a", "a"]));
it.each`
type | count | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'count' is $type", ({ count, error }) => expect(() => fn.repeat("a", count)).toThrow(error));
});
describe("range()", () => {
it("same", () => expect(fn.range(1, 1)).toEqualSequence([1]));
it("low to high", () => expect(fn.range(1, 3)).toEqualSequence([1, 2, 3]));
it("low to high by 2", () => expect(fn.range(1, 3, 2)).toEqualSequence([1, 3]));
it("high to low", () => expect(fn.range(3, 1)).toEqualSequence([3, 2, 1]));
it("high to low by 2", () => expect(fn.range(3, 1, 2)).toEqualSequence([3, 1]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'start' is $type", ({ value, error }) => expect(() => fn.range(value, 3)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'end' is $type", ({ value, error }) => expect(() => fn.range(1, value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"0"} | ${0} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'increment' is $type", ({ value, error }) => expect(() => fn.range(1, 3, value)).toThrow(error));
});
describe("continuous()", () => {
it("after 5 elements", () => expect(fn.continuous(1)).toStartWithSequence([1, 1, 1, 1, 1]));
it("after 10 elements", () => expect(fn.continuous(1)).toStartWithSequence([1, 1, 1, 1, 1, 1, 1, 1, 1, 1]));
});
describe("generate()", () => {
it("even numbers", () => expect(fn.generate(3, i => i * 2)).toEqualSequence([0, 2, 4]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'count' is $type", ({ value, error }) => expect(() => fn.generate(value, () => {})).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'generator' is $type", ({ value, error }) => expect(() => fn.generate(1, value)).toThrow(error));
});
describe("consume()", () => {
it("consumes", () => {
const q = fn.consume(function* () { yield 1; } ());
expect(q).toEqualSequence([1]);
expect(q).toEqualSequence([]);
});
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterator"} | ${{}} | ${TypeError}
`("throws if 'iterator' is $type", ({ value, error }) => expect(() => fn.consume(value)).toThrow(error));
});
// describe("objectKeys()", () => {
// it("gets keys", () => expect(fn.objectKeys({ a: 1, b: 2 })).toEqualSequence(["a", "b"]));
// theory.throws("throws if 'source' is", (source: any) => fn.objectKeys(source), {
// "undefined": [TypeError, undefined],
// "null": [TypeError, null],
// "non-object": [TypeError, ""]
// });
// });
// describe("objectValues()", () => {
// it("gets values", () => expect(fn.objectValues({ a: 1, b: 2 })).toEqualSequence([1, 2]));
// theory.throws("throws if 'source' is", (source: any) => fn.objectValues(source), {
// "undefined": [TypeError, undefined],
// "null": [TypeError, null],
// "non-object": [TypeError, ""]
// });
// });
// describe("objectEntries()", () => {
// it("gets keys", () => expect(fn.objectEntries({ a: 1, b: 2 }).toArray()).toEqual([["a", 1], ["b", 2]]));
// theory.throws("throws if 'source' is", (source: any) => fn.objectEntries(source), {
// "undefined": [TypeError, undefined],
// "null": [TypeError, null],
// "non-object": [TypeError, ""]
// });
// });
// Subquery
describe("filter()", () => {
it("filters", () => expect(fn.filter([1, 2, 3], x => x >= 2)).toEqualSequence([2, 3]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.filter([], value)).toThrow(error));
});
describe("filterDefined()", () => {
it("filterDefined()", () => expect(fn.filterDefined([1, undefined, 2])).toEqualSequence([1, 2]));
});
describe("map()", () => {
it("maps", () => expect(fn.map([1, 2, 3], x => x * 2)).toEqualSequence([2, 4, 6]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'selector' is $type", ({ value, error }) => expect(() => fn.map([], value)).toThrow(error));
});
describe("flatMap()", () => {
it("flatMaps", () => expect(fn.flatMap([1, 2, 3], x => [x, 0])).toEqualSequence([1, 0, 2, 0, 3, 0]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'projection' is $type", ({ value, error }) => expect(() => fn.flatMap([], value)).toThrow(error));
});
describe("tap()", () => {
it("taps", () => {
const received: number[] = [];
const result = fn.tap([1, 2, 3, 4], v => received.push(v));
expect(result).toEqualSequence([1, 2, 3, 4]);
expect(received).toEqual([1, 2, 3, 4]);
});
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'callback' is $type", ({ value, error }) => expect(() => fn.tap([], value)).toThrow(error));
});
describe("reverse()", () => {
it("reverses", () => expect(fn.reverse([1, 2, 3], )).toEqualSequence([3, 2, 1]));
});
describe("skip()", () => {
it("skips", () => expect(fn.skip([1, 2, 3], 1)).toEqualSequence([2, 3]));
it("skip none", () => expect(fn.skip([1, 2, 3], 0)).toEqualSequence([1, 2, 3]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'count' is $type", ({ value, error }) => expect(() => fn.skip([], value)).toThrow(error));
});
describe("skipRight()", () => {
it("skips right", () => expect(fn.skipRight([1, 2, 3], 1)).toEqualSequence([1, 2]));
it("skips right none", () => expect(fn.skipRight([1, 2, 3], 0)).toEqualSequence([1, 2, 3]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'count' is $type", ({ value, error }) => expect(() => fn.skip([], value)).toThrow(error));
});
describe("skipWhile()", () => {
it("skips while", () => expect(fn.skipWhile([1, 2, 1, 3], x => x < 2)).toEqualSequence([2, 1, 3]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'callback' is $type", ({ value, error }) => expect(() => fn.skipWhile([], value)).toThrow(error));
});
describe("skipUntil()", () => {
it("skips until", () => expect(fn.skipUntil([1, 2, 1, 3], x => x >= 2)).toEqualSequence([2, 1, 3]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'callback' is $type", ({ value, error }) => expect(() => fn.skipUntil([], value)).toThrow(error));
});
describe("take()", () => {
it("takes", () => expect(fn.take([1, 2, 3], 2)).toEqualSequence([1, 2]));
it("takes none", () => expect(fn.take([1, 2, 3], 0)).toEqualSequence([]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'count' is $type", ({ value, error }) => expect(() => fn.take([], value)).toThrow(error));
});
describe("takeRight()", () => {
it("takes right", () => expect(fn.takeRight([1, 2, 3], 2)).toEqualSequence([2, 3]));
it("takes right none", () => expect(fn.takeRight([1, 2, 3], 0)).toEqualSequence([]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'count' is $type", ({ value, error }) => expect(() => fn.takeRight([], value)).toThrow(error));
});
describe("takeWhile()", () => {
it("takes while", () => expect(fn.takeWhile([1, 2, 3, 1], x => x < 3)).toEqualSequence([1, 2]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'callback' is $type", ({ value, error }) => expect(() => fn.takeWhile([], value)).toThrow(error));
});
describe("takeUntil()", () => {
it("takes until", () => expect(fn.takeUntil([1, 2, 3, 1], x => x >= 3)).toEqualSequence([1, 2]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'callback' is $type", ({ value, error }) => expect(() => fn.takeUntil([], value)).toThrow(error));
});
describe("intersect()", () => {
it("intersects", () => expect(fn.intersect([1, 1, 2, 3, 4], [1, 3, 3, 5, 7])).toEqualSequence([1, 3]));
it("intersects none", () => expect(fn.intersect([1, 1, 2, 3, 4], [])).toEqualSequence([]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.intersect([], value)).toThrow(error));
});
describe("union()", () => {
it("unions", () => expect(fn.union([1, 1, 2, 3, 4], [1, 3, 3, 5, 7])).toEqualSequence([1, 2, 3, 4, 5, 7]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.union([], value)).toThrow(error));
});
describe("except()", () => {
it("excepts", () => expect(fn.except([1, 1, 2, 3, 4], [2, 4, 5])).toEqualSequence([1, 3]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.except([], value)).toThrow(error));
});
describe("symmetricDifference()", () => {
it("symmetricDifference", () => expect(fn.symmetricDifference([1, 1, 2, 3, 4], [2, 4, 5])).toEqualSequence([1, 3, 5]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.symmetricDifference([], value)).toThrow(error));
});
describe("concat()", () => {
it("concats", () => expect(fn.concat([1, 1, 2, 3, 4], [1, 3, 3, 5, 7])).toEqualSequence([1, 1, 2, 3, 4, 1, 3, 3, 5, 7]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.concat([], value)).toThrow(error));
});
describe("distinct()", () => {
it("is distinct", () => expect(fn.distinct([1, 1, 2, 3, 4], )).toEqualSequence([1, 2, 3, 4]));
});
describe("append()", () => {
it("appends", () => expect(fn.append([1, 2, 3], 5)).toEqualSequence([1, 2, 3, 5]));
});
describe("prepend()", () => {
it("prepends", () => expect(fn.prepend([1, 2, 3], 5)).toEqualSequence([5, 1, 2, 3]));
});
describe("patch()", () => {
it.each`
start | skip | range | expected
${0} | ${0} | ${[9, 8, 7]} | ${[9, 8, 7, 1, 2, 3]}
${0} | ${2} | ${[9, 8, 7]} | ${[9, 8, 7, 3]}
${2} | ${0} | ${[9, 8, 7]} | ${[1, 2, 9, 8, 7, 3]}
${5} | ${0} | ${[9, 8, 7]} | ${[1, 2, 3, 9, 8, 7]}
${2} | ${1} | ${[9, 8, 7]} | ${[1, 2, 9, 8, 7]}
${2} | ${3} | ${[9, 8, 7]} | ${[1, 2, 9, 8, 7]}
`("patches with ($start, $skip, $range)", ({ start, skip, range, expected }) => expect(fn.patch([1, 2, 3], start, skip, range)).toEqualSequence(expected));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'start' is $type", ({ value, error }) => expect(() => fn.patch([], value, 0, [])).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'skipCount' is $type", ({ value, error }) => expect(() => fn.patch([], 0, value, [])).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'range' is $type", ({ value, error }) => expect(() => fn.patch([], 0, 0, value)).toThrow(error));
});
describe("defaultIfEmpty()", () => {
it("not empty", () => expect(fn.defaultIfEmpty([1, 2, 3], 9)).toEqualSequence([1, 2, 3]));
it("empty", () => expect(fn.defaultIfEmpty([], 9)).toEqualSequence([9]));
});
describe("pageBy()", () => {
it("pages with partial last page", () => expect(fn.toArray(fn.map(fn.pageBy([1, 2, 3], 2), x => Array.from(x)))).toEqual([[1, 2], [3]]));
it("pages exact", () => expect(fn.toArray(fn.map(fn.pageBy([1, 2, 3, 4], 2), x => Array.from(x)))).toEqual([[1, 2], [3, 4]]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"0"} | ${0} | ${RangeError}
${"negative"} | ${-1} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'pageSize' is $type", ({ value, error }) => expect(() => fn.pageBy([], value)).toThrow(error));
});
describe("spanMap()", () => {
it("odd/even spans", () => expect(fn.toArray(fn.map(fn.spanMap([1, 3, 2, 4, 5, 7], k => k % 2 === 1), g => Array.from(g)))).toEqual([[1, 3], [2, 4], [5, 7]]));
it("empty", () => expect(fn.spanMap([], k => k % 2 === 1)).toEqualSequence([]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.spanMap([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'elementSelector' is $type", ({ value, error }) => expect(() => fn.spanMap([], x => x, value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'spanSelector' is $type", ({ value, error }) => expect(() => fn.spanMap([], x => x, x => x, value)).toThrow(error));
});
describe("groupBy()", () => {
it("group by role", () => expect(fn.toArray(fn.groupBy(users.users, u => u.role, u => u.name, (role, names) => ({ role: role, names: fn.toArray(names) }))))
.toEqual([
{ role: "admin", names: ["alice"] },
{ role: "user", names: ["bob", "dave"] }
]));
it("group by symbol", () => {
const sym = Symbol();
const data = [
{ category: "a", value: 1 },
{ category: "a", value: 2 },
{ category: "a", value: 3 },
{ category: sym, value: 4 }
];
expect(fn.toArray(fn.groupBy(data, row => row.category, row => row.value, (category, values) => ({ category, values: fn.toArray(values) }))))
.toEqual([
{ category: "a", values: [1, 2, 3] },
{ category: sym, values: [4] }
]);
});
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.groupBy([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'elementSelector' is $type", ({ value, error }) => expect(() => fn.groupBy([], x => x, value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'resultSelector' is $type", ({ value, error }) => expect(() => fn.groupBy([], x => x, x => x, value)).toThrow(error));
});
describe("scan()", () => {
it("scans sums", () => expect(fn.scan([1, 2, 3], (c, e) => c + e, 0)).toEqualSequence([1, 3, 6]));
it("scans sums no seed", () => expect(fn.scan([1, 2, 3], (c, e) => c + e)).toEqualSequence([3, 6]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'accumulator' is $type", ({ value, error }) => expect(() => fn.scan([], value)).toThrow(error));
});
describe("scanRight()", () => {
it("scans sums from right", () => expect(fn.scanRight([1, 2, 3], (c, e) => c + e, 0)).toEqualSequence([3, 5, 6]));
it("scans sums from right no seed", () => expect(fn.scanRight([1, 2, 3], (c, e) => c + e)).toEqualSequence([5, 6]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'accumulator' is $type", ({ value, error }) => expect(() => fn.scanRight([], value)).toThrow(error));
});
// describe("through()", () => {
// it("pipes through", () => expect(fn.through([1, 2], q => {
// expect(q).toEqualSequence([1, 2]);
// return fn.from([3, 4]);
// })).toEqualSequence([3, 4]));
// it.each`
// type | value | error
// ${"undefined"} | ${undefined} | ${TypeError}
// ${"null"} | ${null} | ${TypeError}
// ${"non-function"} | ${""} | ${TypeError}
// `("throws if 'callback' is $type", ({ value, error }) => expect(() => fn.through([], value)).toThrow(error));
// });
describe("materialize()", () => {
it("materializes", () => {
const received: number[] = [];
const q = fn.materialize(fn.tap([1, 2, 3, 4], x => received.push(x)));
expect(q).toEqualSequence([1, 2, 3, 4]);
expect(received).toEqual([1, 2, 3, 4]);
});
});
// Joins
describe("groupJoin()", () => {
it("joins groups", () => expect(fn.toArray(fn.groupJoin(users.roles, users.users, g => g.name, u => u.role, (role, users) => ({ role: role, users: fn.toArray(users) }))))
.toEqual([
{ role: users.adminRole, users: [users.aliceUser] },
{ role: users.userRole, users: [users.bobUser, users.daveUser] },
{ role: users.guestRole, users: [] }
]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'inner' is $type", ({ value, error }) => expect(() => fn.groupJoin([], value, x => x, x => x, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'outerKeySelector' is $type", ({ value, error }) => expect(() => fn.groupJoin([], [], value, x => x, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'innerKeySelector' is $type", ({ value, error }) => expect(() => fn.groupJoin([], [], x => x, value, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'resultSelector' is $type", ({ value, error }) => expect(() => fn.groupJoin([], [], x => x, x => x, value)).toThrow(error));
});
describe("join()", () => {
it("joins", () => expect(fn.toArray(fn.join(users.roles, users.users, g => g.name, u => u.role, (role, user) => ({ role: role, user: user }))))
.toEqual([
{ role: users.adminRole, user: users.aliceUser },
{ role: users.userRole, user: users.bobUser },
{ role: users.userRole, user: users.daveUser }
]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'inner' is $type", ({ value, error }) => expect(() => fn.join([], value, x => x, x => x, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'outerKeySelector' is $type", ({ value, error }) => expect(() => fn.join([], [], value, x => x, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'innerKeySelector' is $type", ({ value, error }) => expect(() => fn.join([], [], x => x, value, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'resultSelector' is $type", ({ value, error }) => expect(() => fn.join([], [], x => x, x => x, value)).toThrow(error));
});
describe("fullJoin()", () => {
it("joins", () => expect(fn.toArray(fn.fullJoin(users.roles, users.users, g => g.name, u => u.role, (role, user) => ({ role: role, user: user }))))
.toEqual([
{ role: users.adminRole, user: users.aliceUser },
{ role: users.userRole, user: users.bobUser },
{ role: users.userRole, user: users.daveUser },
{ role: users.guestRole, user: undefined }
]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'inner' is $type", ({ value, error }) => expect(() => fn.fullJoin([], value, x => x, x => x, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'outerKeySelector' is $type", ({ value, error }) => expect(() => fn.fullJoin([], [], value, x => x, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'innerKeySelector' is $type", ({ value, error }) => expect(() => fn.fullJoin([], [], x => x, value, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'resultSelector' is $type", ({ value, error }) => expect(() => fn.fullJoin([], [], x => x, x => x, value)).toThrow(error));
});
describe("zip()", () => {
it.each`
left | right | expected
${[1, 2, 3]} | ${["a", "b", "c"]} | ${[[1, "a"], [2, "b"], [3, "c"]]}
${[1, 2]} | ${["a", "b", "c"]} | ${[[1, "a"], [2, "b"]]}
${[1, 2, 3]} | ${["a", "b"]} | ${[[1, "a"], [2, "b"]]}
`("zips with $left, $right", ({ left, right, expected }) => expect(fn.toArray(fn.zip(left, right))).toEqual(expected));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'right' is $type", ({ value, error }) => expect(() => fn.zip([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'selector' is $type", ({ value, error }) => expect(() => fn.zip([], [], value)).toThrow(error));
});
// Ordering
describe("orderBy()", () => {
it("orders", () => expect(fn.orderBy([3, 1, 2], x => x)).toEqualSequence([1, 2, 3]));
it("orders same", () => {
const q = fn.toArray(fn.orderBy(books.books_same, x => x.title));
expect(q[0]).toBe(books.bookB2);
expect(q[1]).toBe(books.bookB2_same);
});
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.orderBy([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-comparer"} | ${{}} | ${TypeError}
`("throws if 'comparison' is $type", ({ value, error }) => expect(() => fn.orderBy([], x => x, value)).toThrow(error));
});
describe("orderByDescending()", () => {
it("orders", () => expect(fn.orderByDescending([3, 1, 2], x => x)).toEqualSequence([3, 2, 1]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.orderByDescending([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-comparer"} | ${{}} | ${TypeError}
`("throws if 'comparison' is $type", ({ value, error }) => expect(() => fn.orderByDescending([], x => x, value)).toThrow(error));
});
// Scalars
describe("reduce()", () => {
it("reduces sum", () => expect(fn.reduce([1, 2, 3], (c, e) => c + e)).toBe(6));
it("reduces average", () => expect(fn.reduce([1, 2, 3], (c, e) => c + e, 0, (r, c) => r / c)).toBe(2));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'accumulator' is $type", ({ value, error }) => expect(() => fn.reduce([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'resultSelector' is $type", ({ value, error }) => expect(() => fn.reduce([], x => x, undefined, value)).toThrow(error));
});
describe("reduceRight()", () => {
it("reduces sum", () => expect(fn.reduceRight([1, 2, 3], (c, e) => c + e)).toBe(6));
it("reduces average", () => expect(fn.reduceRight([1, 2, 3], (c, e) => c + e, 0, (r, c) => r / c)).toBe(2));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'accumulator' is $type", ({ value, error }) => expect(() => fn.reduceRight([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'resultSelector' is $type", ({ value, error }) => expect(() => fn.reduceRight([], x => x, undefined, value)).toThrow(error));
});
describe("count()", () => {
it("counts array", () => expect(fn.count([1, 2, 3], )).toBe(3));
it("counts set", () => expect(fn.count(new Set([1, 2, 3]))).toBe(3));
it("counts map", () => expect(fn.count(new Map([[1, 1], [2, 2], [3, 3]]))).toBe(3));
it("counts range", () => expect(fn.count(fn.range(1, 3))).toBe(3));
it("counts odds", () => expect(fn.count([1, 2, 3], x => x % 2 === 1)).toBe(2));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.count([], value)).toThrow(error));
});
describe("first()", () => {
it("finds first", () => expect(fn.first([1, 2, 3], )).toBe(1));
it("finds first even", () => expect(fn.first([1, 2, 3, 4], x => x % 2 === 0)).toBe(2));
it("finds undefined when empty", () => expect(fn.first([], )).toBeUndefined());
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.first([], value)).toThrow(error));
});
describe("last()", () => {
it("finds last", () => expect(fn.last([1, 2, 3], )).toBe(3));
it("finds last odd", () => expect(fn.last([1, 2, 3, 4], x => x % 2 === 1)).toBe(3));
it("finds undefined when empty", () => expect(fn.last([], )).toBeUndefined());
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.last([], value)).toThrow(error));
});
describe("single()", () => {
it("finds single", () => expect(fn.single([1], )).toBe(1));
it("finds undefined when many", () => expect(fn.single([1, 2, 3], )).toBeUndefined());
it("finds undefined when empty", () => expect(fn.single([], )).toBeUndefined());
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.single([], value)).toThrow(error));
});
describe("min()", () => {
it("finds minimum", () => expect(fn.min([5, 6, 3, 9, 4], )).toBe(3));
it("finds undefined when empty", () => expect(fn.min([], )).toBeUndefined());
it("uses comparable", () => {
const a = { [Comparable.compareTo](x: any) { return -1; } };
const b = { [Comparable.compareTo](x: any) { return +1; } };
expect(fn.min([a, b], )).toBe(a);
});
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'comparison' is $type", ({ value, error }) => expect(() => fn.min([], value)).toThrow(error));
});
describe("max()", () => {
it("finds maximum", () => expect(fn.max([5, 6, 3, 9, 4], )).toBe(9));
it("finds undefined when empty", () => expect(fn.max([], )).toBeUndefined());
it("uses comparable", () => {
const a = { [Comparable.compareTo](x: any) { return -1; } };
const b = { [Comparable.compareTo](x: any) { return +1; } };
expect(fn.max([a, b], )).toBe(b);
});
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'comparison' is $type", ({ value, error }) => expect(() => fn.max([], value)).toThrow(error));
});
describe("sum()", () => {
it("calculates sum", () => expect(fn.sum([1, 2, 3], )).toBe(6));
it("calculates sum using projection", () => expect(fn.sum(["1", "2", "3"], x => +x)).toBe(6));
it("calculates zero sum when empty", () => expect(fn.sum([], )).toBe(0));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'elementSelector' is $type", ({ value, error }) => expect(() => fn.sum([], value)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${{}} | ${TypeError}
`("throws if sequence contains $type", ({ value, error }) => expect(() => fn.sum([value], )).toThrow(error));
});
describe("average()", () => {
it("calculates average", () => expect(fn.average([1, 2, 3], )).toBe(2));
it("calculates average using projection", () => expect(fn.average(["1", "2", "3"], x => +x)).toBe(2));
it("calculates zero average when empty", () => expect(fn.average([], )).toBe(0));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'elementSelector' is $type", ({ value, error }) => expect(() => fn.average([], value)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${{}} | ${TypeError}
`("throws if sequence contains $type", ({ value, error }) => expect(() => fn.average([value], )).toThrow(error));
});
describe("some()", () => {
it("false when empty", () => expect(fn.some([], )).toBe(false));
it("true when one or more", () => expect(fn.some([1], )).toBe(true));
it("false when no match", () => expect(fn.some([1, 3], x => x === 2)).toBe(false));
it("true when matched", () => expect(fn.some([1, 3], x => x === 3)).toBe(true));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.some([], value)).toThrow(error));
});
describe("every()", () => {
it("false when empty", () => expect(fn.every([], x => x % 2 === 1)).toBe(false));
it("false when no match", () => expect(fn.every([2, 4], x => x % 2 === 1)).toBe(false));
it("false when partial match", () => expect(fn.every([1, 2], x => x % 2 === 1)).toBe(false));
it("true when fully matched", () => expect(fn.every([1, 3], x => x % 2 === 1)).toBe(true));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.every([], value)).toThrow(error));
});
describe("corresponds()", () => {
it("true when both match", () => expect(fn.corresponds([1, 2, 3], [1, 2, 3])).toBe(true));
it("false when source has fewer elements", () => expect(fn.corresponds([1, 2], [1, 2, 3])).toBe(false));
it("false when other has fewer elements", () => expect(fn.corresponds([1, 2, 3], [1, 2])).toBe(false));
it("false when other has elements in different order", () => expect(fn.corresponds([1, 2, 3], [1, 3, 2])).toBe(false));
it("false when other has different elements", () => expect(fn.corresponds([1, 2, 3], [1, 2, 4])).toBe(false));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.corresponds([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'equalityComparison' is $type", ({ value, error }) => expect(() => fn.corresponds([], [], value)).toThrow(error));
});
describe("includes()", () => {
it("true when present", () => expect(fn.includes([1, 2, 3], 2)).toBe(true));
it("false when missing", () => expect(fn.includes([1, 2, 3], 4)).toBe(false));
it("false when empty", () => expect(fn.includes([], 4)).toBe(false));
});
describe("includesSequence()", () => {
it("true when included", () => expect(fn.includesSequence([1, 2, 3, 4], [2, 3])).toBe(true));
it("false when wrong order", () => expect(fn.includesSequence([1, 2, 3, 4], [3, 2])).toBe(false));
it("false when not present", () => expect(fn.includesSequence([1, 2, 3, 4], [5, 6])).toBe(false));
it("false when source empty", () => expect(fn.includesSequence([], [1, 2])).toBe(false));
it("true when other empty", () => expect(fn.includesSequence([1, 2, 3, 4], [])).toBe(true));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.includesSequence([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'equalityComparison' is $type", ({ value, error }) => expect(() => fn.includesSequence([], [], value)).toThrow(error));
});
describe("startsWith()", () => {
it("true when starts with other", () => expect(fn.startsWith([1, 2, 3, 4], [1, 2])).toBe(true));
it("false when not at start", () => expect(fn.startsWith([1, 2, 3, 4], [2, 3])).toBe(false));
it("false when wrong order", () => expect(fn.startsWith([1, 2, 3, 4], [2, 1])).toBe(false));
it("false when not present", () => expect(fn.startsWith([1, 2, 3, 4], [5, 6])).toBe(false));
it("false when source empty", () => expect(fn.startsWith([], [1, 2])).toBe(false));
it("true when other empty", () => expect(fn.startsWith([1, 2, 3, 4], [])).toBe(true));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.startsWith([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'equalityComparison' is $type", ({ value, error }) => expect(() => fn.startsWith([], [], value)).toThrow(error));
});
describe("endsWith()", () => {
it("true when ends with other", () => expect(fn.endsWith([1, 2, 3, 4], [3, 4])).toBe(true));
it("false when not at end", () => expect(fn.endsWith([1, 2, 3, 4], [2, 3])).toBe(false));
it("false when wrong order", () => expect(fn.endsWith([1, 2, 3, 4], [4, 3])).toBe(false));
it("false when not present", () => expect(fn.endsWith([1, 2, 3, 4], [5, 6])).toBe(false));
it("false when source empty", () => expect(fn.endsWith([], [1, 2])).toBe(false));
it("true when other empty", () => expect(fn.endsWith([1, 2, 3, 4], [])).toBe(true));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-iterable"} | ${{}} | ${TypeError}
`("throws if 'other' is $type", ({ value, error }) => expect(() => fn.endsWith([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'equalityComparison' is $type", ({ value, error }) => expect(() => fn.endsWith([], [], value)).toThrow(error));
});
describe("elementAt()", () => {
it("at offset 0", () => expect(fn.elementAt([1, 2, 3], 0)).toBe(1));
it("at offset 1", () => expect(fn.elementAt([1, 2, 3], 1)).toBe(2));
it("at offset -1", () => expect(fn.elementAt([1, 2, 3], -1)).toBe(3));
it("at offset -2", () => expect(fn.elementAt([1, 2, 3], -2)).toBe(2));
it("at offset ^0", () => expect(fn.elementAt([1, 2, 3], Index.fromEnd(0))).toBe(undefined));
it("at offset ^1", () => expect(fn.elementAt([1, 2, 3], Index.fromEnd(1))).toBe(3));
it("at offset ^2", () => expect(fn.elementAt([1, 2, 3], Index.fromEnd(2))).toBe(2));
it("at offset greater than size", () => expect(fn.elementAt([1, 2, 3], 3)).toBeUndefined());
it("at negative offset greater than size", () => expect(fn.elementAt([1, 2, 3], -4)).toBeUndefined());
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"float"} | ${1.5} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'offset' is $type", ({ value, error }) => expect(() => fn.elementAt([], value)).toThrow(error));
});
describe("span()", () => {
it("gets initial span", () => expect(fn.span([1, 2, 3, 4], x => x < 3).map(x => fn.toArray(x))).toEqual([[1, 2], [3, 4]]));
it("gets whole source", () => expect(fn.span([1, 2, 3, 4], x => x < 5).map(x => fn.toArray(x))).toEqual([[1, 2, 3, 4], []]));
it("gets no initial span", () => expect(fn.span([1, 2, 3, 4], x => x < 1).map(x => fn.toArray(x))).toEqual([[], [1, 2, 3, 4]]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.span([], value)).toThrow(error));
});
describe("spanUntil()", () => {
it("gets initial span", () => expect(fn.spanUntil([1, 2, 3, 4], x => x > 2).map(x => fn.toArray(x))).toEqual([[1, 2], [3, 4]]));
it("gets whole source", () => expect(fn.spanUntil([1, 2, 3, 4], x => x > 4).map(x => fn.toArray(x))).toEqual([[1, 2, 3, 4], []]));
it("gets no initial span", () => expect(fn.spanUntil([1, 2, 3, 4], x => x > 0).map(x => fn.toArray(x))).toEqual([[], [1, 2, 3, 4]]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.spanUntil([], value)).toThrow(error));
});
describe("forEach()", () => {
it("called for each item", () => {
const received: number[] = [];
fn.forEach([1, 2, 3, 4], v => received.push(v));
expect(received).toEqual([1, 2, 3, 4]);
});
// node's for..of does not call return :/
it("close iterator on error", () => {
let returnWasCalled = false;
const iterator: IterableIterator<number> = {
[Symbol.iterator]() { return this; },
next() { return { value: 1, done: false } },
return() { returnWasCalled = true; return { value: undefined, done: true } }
};
const error = new Error();
expect(() => fn.forEach(iterator, () => { throw error; })).toThrow(error);
expect(returnWasCalled).toBe(true);
});
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'callback' is $type", ({ value, error }) => expect(() => fn.forEach([], value)).toThrow(error));
});
describe("unzip()", () => {
it("unzips", () => expect(fn.unzip([[1, "a"], [2, "b"]] as [number, string][], )).toEqual([[1, 2], ["a", "b"]]));
});
describe("toArray()", () => {
it("creates array", () => expect(fn.toArray([1, 2, 3, 4], )).toEqual([1, 2, 3, 4]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'elementSelector' is $type", ({ value, error }) => expect(() => fn.toArray([], value)).toThrow(error));
});
describe("toSet()", () => {
it("result is a Set", () => expect(fn.toSet([1, 2, 3, 4], )).toBeInstanceOf(Set));
it("creates with right size", () => expect(fn.toSet([1, 2, 3, 4], ).size).toBe(4));
it("creates set in order", () => expect(fn.toSet([1, 2, 3, 4], )).toEqualSequence([1, 2, 3, 4]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'elementSelector' is $type", ({ value, error }) => expect(() => fn.toSet([], value)).toThrow(error));
});
describe("toHashSet()", () => {
it("result is a HashSet", () => expect(fn.toHashSet([1, 2, 3, 4], )).toBeInstanceOf(HashSet));
it("creates with right size", () => expect(fn.toHashSet([1, 2, 3, 4], ).size).toBe(4));
it("creates set in order", () => expect(fn.toHashSet([1, 2, 3, 4], )).toEqualSequence([1, 2, 3, 4]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'elementSelector'/'equaler' is $type", ({ value, error }) => expect(() => fn.toHashSet([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'equaler' is $type", ({ value, error }) => expect(() => fn.toHashSet([], x => x, value)).toThrow(error));
});
describe("toMap()", () => {
it("result is a Map", () => expect(fn.toMap([1, 2, 3, 4], x => x)).toBeInstanceOf(Map));
it("creates with right size", () => expect(fn.toMap([1, 2, 3, 4], x => x).size).toBe(4));
it("creates with correct keys", () => expect(fn.toMap([1, 2, 3, 4], x => x * 2).get(2)).toBe(1));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.toMap([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'elementSelector' is $type", ({ value, error }) => expect(() => fn.toMap([], x => x, value)).toThrow(error));
});
describe("toHashMap()", () => {
it("result is a HashMap", () => expect(fn.toHashMap([1, 2, 3, 4], x => x)).toBeInstanceOf(HashMap));
it("creates with right size", () => expect(fn.toHashMap([1, 2, 3, 4], x => x).size).toBe(4));
it("creates with correct keys", () => expect(fn.toHashMap([1, 2, 3, 4], x => x * 2).get(2)).toBe(1));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.toHashMap([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'elementSelector'/'keyEqualer' is $type", ({ value, error }) => expect(() => fn.toHashMap([], x => x, value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'keyEqualer' is $type", ({ value, error }) => expect(() => fn.toHashMap([], x => x, x => x, value)).toThrow(error));
});
describe("toLookup()", () => {
it("result is a Lookup", () => expect(fn.toLookup([1, 2, 3, 4], x => x)).toBeInstanceOf(Lookup));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.toLookup([], value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'elementSelector'/'keyEqualer' is $type", ({ value, error }) => expect(() => fn.toLookup([], x => x, value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-equaler"} | ${{}} | ${TypeError}
`("throws if 'keyEqualer' is $type", ({ value, error }) => expect(() => fn.toLookup([], x => x, x => x, value)).toThrow(error));
});
describe("toObject()", () => {
it("creates object with prototype", () => {
const proto = {};
const obj: any = fn.toObject(["a", "b"], proto, x => x);
expect(obj).toHaveProperty("a", "a");
expect(obj).toHaveProperty("b", "b");
expect(Object.getPrototypeOf(obj)).toBe(proto);
});
it("creates object with null prototype", () => {
const obj: any = fn.toObject(["a", "b"], null, x => x);
expect(obj.a).toBe("a");
expect(Object.getPrototypeOf(obj)).toBe(null);
});
it.each`
type | value | error
${"non-object"} | ${""} | ${TypeError}
`("throws if 'prototype' is $type", ({ value, error }) => expect(() => fn.toObject([], value, x => x)).toThrow(error));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.toObject([], null, value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'elementSelector' is $type", ({ value, error }) => expect(() => fn.toObject([], null, x => x, value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'descriptorSelector' is $type", ({ value, error }) => expect(() => fn.toObject([], null, x => x, x => x, value)).toThrow(error));
});
describe("copyTo", () => {
it("copies to array", () => {
expect(fn.copyTo([1, 2, 3, 4], Array(4))).toEqualSequence([1, 2, 3, 4]);
});
});
// Hierarchy
describe("toHierarchy()", () => {
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-object"} | ${""} | ${TypeError}
${"non-provider"} | ${{}} | ${TypeError}
`("throws if 'provider' is $type", ({ value, error }) => expect(() => fn.toHierarchy([], value)).toThrow(error));
});
describe("thenBy()", () => {
it("preserves preceding order", () => expect(fn.thenBy(fn.orderBy(books.books, x => x.title), x => x.id)).toEqualSequence([books.bookA3, books.bookA4, books.bookB1, books.bookB2]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.thenBy(fn.orderBy([], x => x), value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-comparer"} | ${{}} | ${TypeError}
`("throws if 'comparison' is $type", ({ value, error }) => expect(() => fn.thenBy(fn.orderBy([], x => x), x => x, value)).toThrow(error));
});
describe("thenByDescending()", () => {
it("preserves preceding order", () => expect(fn.thenByDescending(fn.orderBy(books.books, x => x.title), x => x.id)).toEqualSequence([books.bookA4, books.bookA3, books.bookB2, books.bookB1]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'keySelector' is $type", ({ value, error }) => expect(() => fn.thenByDescending(fn.orderBy([], x => x), value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
${"non-comparer"} | ${{}} | ${TypeError}
`("throws if 'comparison' is $type", ({ value, error }) => expect(() => fn.thenByDescending(fn.orderBy([], x => x), x => x, value)).toThrow(error));
});
describe("root()", () => {
it("gets root", () => expect(fn.root(fn.toHierarchy([nodes.nodeAAAA], nodes.nodeHierarchy))).toEqualSequence([nodes.nodeA]));
it("of undefined", () => expect(fn.root(fn.toHierarchy([undefined!], nodes.nodeHierarchy))).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.root(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("ancestors()", () => {
it("gets ancestors", () => expect(fn.ancestors(fn.toHierarchy([nodes.nodeAAAA], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAA, nodes.nodeAA, nodes.nodeA]));
it("of undefined", () => expect(fn.ancestors(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.ancestors(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("ancestorsAndSelf()", () => {
it("gets ancestors and self", () => expect(fn.ancestorsAndSelf(fn.toHierarchy([nodes.nodeAAAA], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAAA, nodes.nodeAAA, nodes.nodeAA, nodes.nodeA]));
it("of undefined", () => expect(fn.ancestorsAndSelf(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.ancestorsAndSelf(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("parents()", () => {
it("gets parents", () => expect(fn.parents(fn.toHierarchy([nodes.nodeAAA, nodes.nodeAAB, nodes.nodeAAC], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAA, nodes.nodeAA, nodes.nodeAA]));
it("of undefined", () => expect(fn.parents(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.parents(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("self()", () => {
it("gets self", () => expect(fn.self(fn.toHierarchy([nodes.nodeAAA, nodes.nodeAAB, nodes.nodeAAC], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAA, nodes.nodeAAB, nodes.nodeAAC]));
it("of undefined", () => expect(fn.self(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.self(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("siblings()", () => {
it("gets siblings", () => expect(fn.siblings(fn.toHierarchy([nodes.nodeAAA], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAB, nodes.nodeAAC]));
it("of undefined", () => expect(fn.siblings(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.siblings(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("siblingsAndSelf()", () => {
it("gets siblings and self", () => expect(fn.siblingsAndSelf(fn.toHierarchy([nodes.nodeAAA], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAA, nodes.nodeAAB, nodes.nodeAAC]));
it("of undefined", () => expect(fn.siblingsAndSelf(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.siblingsAndSelf(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("precedingSiblings()", () => {
it("gets siblings before self", () => expect(fn.precedingSiblings(fn.toHierarchy([nodes.nodeAAB], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAA]));
it("of undefined", () => expect(fn.precedingSiblings(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.precedingSiblings(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("preceding()", () => {
it("gets nodes before self", () => expect(fn.preceding(fn.toHierarchy([nodes.nodeAB], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAC, nodes.nodeAAB, nodes.nodeAAAA, nodes.nodeAAA, nodes.nodeAA]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.preceding(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("followingSiblings()", () => {
it("gets siblings after self", () => expect(fn.followingSiblings(fn.toHierarchy([nodes.nodeAAB], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAC]));
it("of undefined", () => expect(fn.followingSiblings(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.followingSiblings(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("following()", () => {
it("gets nodes after self", () => expect(fn.following(fn.toHierarchy([nodes.nodeAB], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeACA, nodes.nodeAC]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.following(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("children()", () => {
it("gets children", () => expect(fn.children(fn.toHierarchy([nodes.nodeAA, nodes.nodeAB, nodes.nodeAC], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAA, nodes.nodeAAB, nodes.nodeAAC, nodes.nodeACA]));
it("of undefined", () => expect(fn.children(fn.toHierarchy([undefined!], nodes.nodeHierarchy))).toEqualSequence([]));
it("of undefined children", () => expect(fn.children(fn.toHierarchy(books.books, books.bookHierarchy))).toEqualSequence([]));
it("of undefined child", () => expect(fn.children(fn.toHierarchy([nodes.badNode], nodes.nodeHierarchy))).toEqualSequence([]));
it("with predicate", () => expect(fn.children(fn.toHierarchy<nodes.Node>([nodes.nodeAA], nodes.nodeHierarchy), x => !!x.marker)).toEqualSequence([nodes.nodeAAB]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.children(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("nthChild()", () => {
it("gets nthChild(0)", () => expect(fn.nthChild(fn.toHierarchy([nodes.nodeAA], nodes.nodeHierarchy), 0)).toEqualSequence([nodes.nodeAAA]));
it("gets nthChild(2)", () => expect(fn.nthChild(fn.toHierarchy([nodes.nodeAA], nodes.nodeHierarchy), 2)).toEqualSequence([nodes.nodeAAC]));
it("gets nthChild(-1)", () => expect(fn.nthChild(fn.toHierarchy([nodes.nodeAA], nodes.nodeHierarchy), -1)).toEqualSequence([nodes.nodeAAC]));
it("of undefined", () => expect(fn.nthChild(fn.toHierarchy([undefined!], nodes.nodeHierarchy), 0)).toEqualSequence([]));
it.each`
type | value | error
${"undefined"} | ${undefined} | ${TypeError}
${"null"} | ${null} | ${TypeError}
${"non-number"} | ${""} | ${TypeError}
${"float"} | ${1.5} | ${RangeError}
${"NaN"} | ${NaN} | ${RangeError}
${"Infinity"} | ${Infinity} | ${RangeError}
`("throws if 'offset' is $type", ({ value, error }) => expect(() => fn.nthChild(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.nthChild(fn.toHierarchy([], nodes.nodeHierarchy), 0, value)).toThrow(error));
});
describe("firstChild(", () => {
it("gets firstChild()", () => expect(fn.firstChild(fn.toHierarchy([nodes.nodeAA], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAA]));
it("of undefined", () => expect(fn.firstChild(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.firstChild(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("lastChild(", () => {
it("gets lastChild()", () => expect(fn.lastChild(fn.toHierarchy([nodes.nodeAA], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAC]));
it("of undefined", () => expect(fn.lastChild(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.lastChild(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("descendants()", () => {
it("gets descendants", () => expect(fn.descendants(fn.toHierarchy([nodes.nodeAA], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAAA, nodes.nodeAAAA, nodes.nodeAAB, nodes.nodeAAC]));
it("of undefined", () => expect(fn.descendants(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.descendants(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
});
describe("descendantsAndSelf()", () => {
it("gets descendants and self", () => expect(fn.descendantsAndSelf(fn.toHierarchy([nodes.nodeAA], nodes.nodeHierarchy), )).toEqualSequence([nodes.nodeAA, nodes.nodeAAA, nodes.nodeAAAA, nodes.nodeAAB, nodes.nodeAAC]));
it("of undefined", () => expect(fn.descendantsAndSelf(fn.toHierarchy([undefined!], nodes.nodeHierarchy), )).toEqualSequence([]));
it.each`
type | value | error
${"null"} | ${null} | ${TypeError}
${"non-function"} | ${""} | ${TypeError}
`("throws if 'predicate' is $type", ({ value, error }) => expect(() => fn.descendantsAndSelf(fn.toHierarchy([], nodes.nodeHierarchy), value)).toThrow(error));
}); | the_stack |
export default gotalk
declare namespace gotalk {
type int = number
// connection() creates a persistent (keep-alive) connection to a gotalk responder.
// If `addr` is not provided, `defaultResponderAddress` is used.
// Equivalent to `Sock(handlers, proto).openKeepAlive(addr)`
function connection<T>(
addr :string | undefined,
handlers :Handlers<T> | undefined,
proto? :Protocol<T>
) :Sock<T>
function connection(addr? :string) :Sock<Uint8Array>
// Open a connection to a gotalk responder.
// If `addr` is not provided, `defaultResponderAddress` is used.
// Equivalent to `Sock(handlers, proto).open(addr, onconnect)`
function open<T>(
addr :string | undefined,
onconnect :((e:Error,s:Sock<Uint8Array>)=>void) | undefined,
handlers :Handlers<T> | undefined,
proto? :Protocol<T>
) :Sock<T>
function open(
addr? :string | undefined,
onconnect? :((e:Error,s:Sock<Uint8Array>)=>void) | undefined,
) :Sock<Uint8Array>
// Default `Handlers` utilized by the module-level `handle*` functions
// The type is Handlers<Uint8Array> by default.
var defaultHandlers :Handlers<Uint8Array>|Handlers<string>
// Default web socket address to connect to, when no address is provided to open() or connection().
// When the gotalk js library is served from the gotalk server, this is prepopulated with the
// correct gotalk web socket address.
// If you serve the gotalk library yourself, set this to the gotalk web socket server before
// calling open() or connect() without an address.
// Examples values:
// ws://example.com/gotalk // http-based (unencrypted) absolute address
// wss://example.com/gotalk // https-based (encrypted) absolute address
// //example.com/gotalk // protocol matches document, with absolute address
// /gotalk // protocol and host matches document, with absolute path
//
var defaultResponderAddress :string
// If set to true, some helpful messages may be logged to the console.
// The initial value is true when gotalk is served in a web browser over localhost, else its false.
var developmentMode :boolean
// Version of the gotalk JavaScript library. Example: "1.2.3".
// For what version of the Gotalk protocol this library supports, see protocol.Version.
const version :string
// Sock creates a socket
function Sock<T>(handlers :Handlers<T>, proto? :Protocol<T>) :Sock<T>
// Convenience "shortcuts" to `defaultHandlers`
//
// Register a handler for an operation `op`. If `op` is the empty string the
// handler will be registered as a "fallback" handler, meaning that if there are
// no handlers registered for request "x", the fallback handler will be invoked.
function handleRequest<In=any,Out=any>(
op :string,
h :(data :In, resolve :Resolver<Out>, op :string)=>void,
) :void
function handleBufferRequest<T=Uint8Array>(
op :string,
h :(data :T, resolve :Resolver<T>, op :string)=>void,
) :void
//
// Register a handler for notification `name`. Just as with request handlers,
// registering a handler for the empty string means it's registered as the fallback handler.
function handleNotification<In=any>(
name :string,
h :(data :In, name :string)=>void,
) :void
function handleBufferNotification<T=Uint8Array>(
name :string,
h :(data :T, name :string)=>void,
) :void
interface SockEventMap<T> {
"open" :Sock<T> // connection is open
"close" :Error|null // connection is closed. Arg is non-null if closed because of error.
"heartbeat" :{time: Date, load: number}
}
interface Sock<T> extends EventEmitter<SockEventMap<T>> {
readonly ws :WebSocket // underlying connection
readonly handlers :Handlers<T>
readonly protocol :Protocol<T>
// Open a connection to a gotalk responder.
// If `addr` is not provided, `defaultResponderAddress` is used.
open(addr :string, cb? :(e:Error,s:this)=>void) :this
open(cb? :(e:Error,s:this)=>void) :this
// Start a persistent (keep-alive) connection to a gotalk responder.
// If `addr` is not provided, `defaultResponderAddress` is used.
// Because the "open" step is abstracted away, this function does not accept any "open callback".
// You should listen to the "open" and "close" events instead.
// The Sock will stay connected, and reconnect as needed, until you call `end()`.
openKeepAlive(addr? :string) :this
// Send request for operation `op` with `value` as the payload, using JSON for encoding.
// The cb argument is optional and here for backwards compatibility with an older API.
request<R=any>(op :string, value :any, cb? :(e :Error, result :R)=>void) :Promise<R>
// Send a request for operation `op` with raw-buffer `buf` as the payload,
// if any. The type of result depends on the protocol used by the server
// — a server sending a "text" frame means the result is a string, while a
// server sending a "binary" frame causes the result to be a Uint8Array.
// The cb argument is optional and here for backwards compatibility with an older API.
bufferRequest(op :string, buf :T|null, cb? :(e :Error, result :T)=>void) :Promise<T>
// Create a StreamRequest for operation `op` which is ready to be used.
// Note that calling this method does not send any data — sending the request
// and reading the response is performed by using the returned object.
streamRequest(op :string) :StreamRequest<T>
// Send notification `name` with raw-buffer `buf` as the payload, if any.
bufferNotify(name :string, buf :T|null) :void
// Send notification `name` with `value`, using JSON for encoding.
notify(name :string, value :any) :void
// Send a heartbeat message with `load` which should be in the range [0-1]
sendHeartbeat(load :number) :void
// Returns a string representing the address to which the socket is connected.
address() :string|null
// Adopt a connection capable of being received from, written to and closed.
// It should be in an "OPEN" ready-state.
// You need to call `handshake` followed by `startReading` after adopting a previosuly
// unadopted connection.
// Throws an error if the provided connection type is not supported.
// Currently only supports WebSocket.
adopt(ws :WebSocket) :void
// Perform protocol handshake.
handshake() :void
// Schedule reading from the underlying connection. Should only be called
// once per connection.
startReading() :void
// Close the socket. If there are any outstanding responses from pending
// requests, the socket will close when all pending requests has finished.
// If you call this function a second time, the socket will close immediately,
// even if there are outstanding responses.
end() :void
// sendBufferLimit configures how many messages may be queued to be sent when
// the socket is not connected.
// A value of 0 causes send buffering to be disabled.
// Added in gotalk.js v1.2.0
sendBufferLimit :number
/** @DEPRECATED use request */
requestp<R=any>(op :string, value :any) :Promise<R>
/** @DEPRECATED use bufferRequest */
bufferRequestp(op :string, buf :T|null) :Promise<T>
}
interface Handlers<T> {
// Register a handler for an operation `op`. If `op` is the empty string the
// handler will be registered as a "fallback" handler, meaning that if there are
// no handlers registered for request "x", the fallback handler will be invoked.
handleRequest<In=any,Out=any>(
op :string,
h :(data :In, resolve :Resolver<Out>, op :string)=>void,
) :void
handleBufferRequest(op :string, h :(data :T, resolve :Resolver<T>, op :string)=>void) :void
// Register a handler for notification `name`. Just as with request handlers,
// registering a handler for the empty string means it's registered as the fallback handler.
handleNotification<In=any>(name :string, h :(data :In, name :string)=>void) :void
handleBufferNotification(name :string, h :(data :T, name :string)=>void) :void
// Find request and notification handlers
findRequestHandler(op :string) :((data:T,r:Resolver<T>,op:string)=>void) | null
findNotificationHandler(name :string) :((data:T,name:string)=>void) | null
}
// Create a new Handlers object
function Handlers<T>() :Handlers<T>
interface Resolver<T> {
(value :T) :void
error(e :Error) :void
}
interface StreamRequestEventMap<T> {
"data" :T // response chunk received
"close" :Error|null // connection has closed. Arg is non-null if closed because of error.
}
interface StreamRequest<T> extends EventEmitter<StreamRequestEventMap<T>> {
readonly op :string // Operation name
readonly id :string // Request ID
// Write a request chunk. Writing an empty `buf` or null causes the request to end,
// meaning no more chunks can be written. Calling `write()` or `end()` after the
// request has finished has no effect.
write(buf :T) :void
// End the request, indicating to the responder that it will not receive more payloads.
end() :void
}
// Create a StreamRequest operating on a certain socket `s`.
// This is a low-level function. See `Sock.streamRequest()` for a higher-level function,
// which sets up response tracking, generates a request ID, etc.
function StreamRequest<T>(s :Sock<T>, op :string, id :string) :StreamRequest<T>
interface Protocol<T> {
// Produce a fixed-digit number for integer `n`
makeFixnum(n :int, digits :int) :T
// protocol.Version as a T
versionBuf :T
// Parse value as protocol version which is expected to have a length of 2.
parseVersion(data :T) :int
// Parses a message from a T, which must not including any payload data.
parseMsg(data :T) :{t:int, id:T, name:string, size:int} | null
// Create a T representing a message, not including any payload data.
makeMsg(t :int, id :T|string, name :string, payloadSize :int) :T
}
namespace protocol {
// The version of the protocol implementation
const Version = 1
// Message type constants
const MsgTypeSingleReq = 0x72 // byte('r')
const MsgTypeStreamReq = 0x73 // byte('s')
const MsgTypeStreamReqPart = 0x70 // byte('p')
const MsgTypeSingleRes = 0x52 // byte('R')
const MsgTypeStreamRes = 0x53 // byte('S')
const MsgTypeErrorRes = 0x45 // byte('E')
const MsgTypeRetryRes = 0x65 // byte('e')
const MsgTypeNotification = 0x6E // byte('n')
const MsgTypeHeartbeat = 0x68 // byte('h')
const MsgTypeProtocolError = 0x66 // byte('f')
// ProtocolError codes
const ErrorAbnormal = 0
const ErrorUnsupported = 1
const ErrorInvalidMsg = 2
const ErrorTimeout = 3
// Maximum value of a heartbeat's "load"
const HeartbeatMsgMaxLoad = 0xffff
// Implements a byte-binary version of the gotalk protocol
const binary :Protocol<Uint8Array>
// Implements a JavaScript text version of the gotalk protocol
const text :Protocol<string>
}
type EventHandler<T=any> = (data :T)=>void
interface EventEmitter<EventMap = {[k:string]:any}> {
on<K extends keyof EventMap>(e :K, handler :EventHandler<EventMap[K]>) :this
once<K extends keyof EventMap>(e :K, handler :EventHandler<EventMap[K]>) :this
addListener<K extends keyof EventMap>(e :K, handler :EventHandler<EventMap[K]>) :this
removeListener<K extends keyof EventMap>(e :K, handler :EventHandler<EventMap[K]>) :void
removeListeners<K extends keyof EventMap>(e :K) :void
removeAllListeners() :void
emit<K extends keyof EventMap>(e :K, data? :EventMap[K]) :void
}
const EventEmitter : {
mixin<T>(obj :T) :(T & EventEmitter)
}
} // namespace gotalk | the_stack |
import { AfterContentInit, ContentChildren, Directive, ElementRef, HostBinding, Input, OnDestroy,
QueryList, Renderer2, Output, EventEmitter, HostListener, ChangeDetectorRef, Inject } from '@angular/core';
import { DOCUMENT } from '@angular/common';
import { MDCListFoundation, MDCListAdapter, strings, cssClasses } from '@material/list';
import { asBoolean } from '../../utils/value.utils';
import { AbstractMdcRipple } from '../ripple/abstract.mdc.ripple';
import { MdcEventRegistry } from '../../utils/mdc.event.registry';
import { MdcRadioDirective } from '../radio/mdc.radio.directive';
import { MdcCheckboxDirective } from '../checkbox/mdc.checkbox.directive';
import { Subject, merge, ReplaySubject } from 'rxjs';
import { takeUntil, debounceTime } from 'rxjs/operators';
/**
* Directive for a separator in a list (between list items), or as a separator in a
* list group (between lists).
*
* # Accessibility
* This directive adds a `role=separator` attribute when it is used as a separator
* between list items.
*/
@Directive({
selector: '[mdcListDivider]'
})
export class MdcListDividerDirective {
/** @internal */
@HostBinding('class.mdc-list-divider') readonly _cls = true;
/** @internal */
@HostBinding('attr.role') _role: string | null = 'separator';
/** @internal */
@HostBinding('attr.disabled') _disabled = false;
private _inset = false;
private _padded = false;
constructor(_elm: ElementRef) {
if (_elm.nativeElement.nodeName.toUpperCase() !== 'LI')
this._role = null;
}
/**
* When this input is defined and does not have value false, the divider is styled with
* an inset.
*/
@Input() @HostBinding('class.mdc-list-divider--inset')
get inset() {
return this._inset;
}
set inset(val: boolean) {
this._inset = asBoolean(val);
}
static ngAcceptInputType_inset: boolean | '';
/**
* When this input is defined and does not have value false, the divider leaves
* gaps on each site to match the padding of <code>mdcListItemMeta</code>.
*/
@Input() @HostBinding('class.mdc-list-divider--padded')
get padded() {
return this._padded;
}
set padded(val: boolean) {
this._padded = asBoolean(val);
}
static ngAcceptInputType_padded: boolean | '';
}
/**
* Directive for the items of a material list.
* This directive should be used for the direct children (list items) of an
* `mdcList`.
*
* # Children
* * Use `mdcListItemText` for the text content of the list. One line and two line
* lists are supported. See `mdcListItemText` for more info.
* * Optional: `mdcListItemGraphic` for a starting detail (typically icon or image).
* * Optional: `mdcListItemMeta` for the end detail (typically icon or image).
*
* # Accessibility
* * All items in a list will get a `tabindex=-1` attribute to make them focusable,
* but not tabbable. The focused, active/current, or first (in that preference) item will
* get `tabindex=0`, so that the list can be tabbed into. Keyboard navigation
* between list items is done with arrow, home, and end keys. Keyboard based selection of
* an item (when items are selectable), can be done with the enter or space key.
* * The `role` attribute with be set to `option` for single selection lists,
* `checkbox` for list items that can be selected with embedded checkbox inputs, `radio`
* for for list items that can be selected with embedded radio inputs, `menuitem` when the
* list is part of an `mdcMenu`. Otherwise there will be no `role` attribute, so the default
* role for a standard list item (`role=listitem`) will apply.
* * Single selection lists set the `aria-selected` or `aria-current` attributes, based on the
* chosen `selectionMode` of the list. Please see [WAI-ARIA aria-current](https://www.w3.org/TR/wai-aria-1.1/#aria-current)
* for recommendations.
* * `aria-checked` will be set for lists with embedded checkbox or radio inputs.
* * Disabled list items will be included in the keyboard navigation. This follows
* [focusability of disabled controls](https://www.w3.org/TR/wai-aria-practices-1.1/#kbd_disabled_controls)
* recommendations in the ARIA practices article. Exception: when the list is part of an `mdcMenu` or `mdcSelect`,
* disabled items are not included in the keyboard navigation.
* * As the user navigates through the list, any button and anchor elements within list items that are not focused
* will receive `tabindex=-1`. When the list item receives focus, those elements will receive `tabindex=0`.
* This allows for the user to tab through list item elements and then tab to the first element after the list.
* * Lists are interactive by default (unless `nonInteractive` is set on the `mdcList`). List items will
* show ripples when interacted with.
* * `aria-disabled` will be set for disabled list items. When the list uses checkbox or radio inputs to control
* the checked state, the disabled state will mirror the state of those inputs.
*/
@Directive({
selector: '[mdcListItem]'
})
export class MdcListItemDirective extends AbstractMdcRipple implements AfterContentInit, OnDestroy {
/** @internal */
@HostBinding('class.mdc-list-item') readonly _cls = true;
/** @internal */
@HostBinding('attr.role') public _role: string | null = null;
/** @internal */
@ContentChildren(MdcRadioDirective, {descendants: true}) _radios?: QueryList<MdcRadioDirective>;
/** @internal */
@ContentChildren(MdcCheckboxDirective, {descendants: true}) _checkBoxes?: QueryList<MdcCheckboxDirective>;
/** @internal */
_ariaActive: 'current' | 'selected' | 'checked' | null = null;
private _initialized = false;
private _interactive = true;
private _disabled = false;
private _active = false;
/** @internal (called valueChanged instead of valueChange so that library consumers cannot by accident use
* this for two-way binding) */
@Output() readonly valueChanged: EventEmitter<string | null> = new EventEmitter();
/** @internal */
_activationRequest: Subject<boolean> = new ReplaySubject<boolean>(1);
/**
* Event emitted for user action on the list item, including keyboard and mouse actions.
* This will not emit when the `mdcList` has `nonInteractive` set.
*/
@Output() readonly action: EventEmitter<void> = new EventEmitter();
/**
* Event emitted when the active state of a list item in a single-selection list
* (`selectionMode` is `single` or `active`) is changed. This event does not emit
* for lists that do not have the mentioned `selectionMode`, and therefore does also
* not emit for lists where the active/selected state is controlled by embedded checkbox
* or radio inputs. (Note that for lists controlled by an `mdcSelect`, the `selectionMode`
* will be either `single` or `active`).
*/
@Output() readonly selectedChange: EventEmitter<boolean> = new EventEmitter<boolean>();
private _value: string | null = null;
constructor(public _elm: ElementRef, rndr: Renderer2, registry: MdcEventRegistry, @Inject(DOCUMENT) doc: any) {
super(_elm, rndr, registry, doc as Document);
}
ngAfterContentInit() {
this._initialized = true;
if (this._interactive)
this.initRipple();
}
ngOnDestroy() {
this.destroyRipple();
}
/** @internal */
_setInteractive(interactive: boolean) {
if (this._interactive !== interactive) {
this._interactive = interactive;
if (this._initialized) {
if (this._interactive)
this.initRipple();
else
this.destroyRipple();
}
}
}
/**
* If set to a value other than false, the item will be disabled. This affects styling
* and selectability, and may affect keyboard navigation.
* This input is ignored for lists where the selection is controlled by embedded checkbox
* or radio inputs. In those cases the disabled state of the input will be used instead.
*/
@HostBinding('class.mdc-list-item--disabled') @Input()
get disabled() {
if (this._ariaActive === 'checked') {
const input = this._getInput();
return input ? input._elm.nativeElement.disabled : false;
}
return this._disabled;
}
set disabled(val: boolean) {
this._disabled = asBoolean(val);
}
static ngAcceptInputType_disabled: boolean | '';
/**
* Assign this field with a value that should be reflected in the `value` property of
* a `selectionMode=single|active` or and `mdcMenu` or `mdcSelect` for the active property.
* Ignored for lists that don't offer a selection, and for lists that use checkbox/radio
* inputs for selection.
*/
@Input() get value() {
return this._value;
}
set value(newValue: string | null) {
if (this._value !== newValue) {
this._value = newValue;
this.valueChanged.emit(newValue);
}
}
/**
* This input can be used to change the active or selected state of the item. This should *not* be used for lists
* inside an `mdcSelect`/`mdcMenu`, or for lists that use checkbox/radio inputs for selection.
* Depending on the `selectionMode` of the list this will update the `selected` or `active` state of the item.
*/
@Input() set selected(val: boolean) {
let newValue = asBoolean(val);
if (newValue !== this._active)
this._activationRequest.next(val);
}
static ngAcceptInputType_selected: boolean | '';
/** @internal */
@HostBinding('class.mdc-list-item--selected')
get _selected() {
return (this._ariaActive === 'selected' && this._active)
|| (!this._role && this._active);
}
/** @internal */
@HostBinding('class.mdc-list-item--activated')
get _activated() {
return this._ariaActive === 'current' && this._active;
}
/** @internal */
@HostBinding('attr.aria-disabled') get _ariaDisabled() {
if (this.disabled) // checks checkbox/radio disabled state when appropriate
return 'true';
return null;
}
/** @internal */
@HostBinding('attr.aria-current') get _ariaCurrent() {
if (this._ariaActive === 'current')
return this._active ? 'true' : 'false';
return null;
}
/** @internal */
@HostBinding('attr.aria-selected') get _ariaSelected() {
if (this._ariaActive === 'selected')
return this._active ? 'true' : 'false';
return null;
}
/** @internal */
@HostBinding('attr.aria-checked') get _ariaChecked() {
if (this._ariaActive === 'checked')
// (this.active: returns checked value of embedded input if appropriate)
return this.active ? 'true' : 'false';
return null;
}
/** @internal */
get active() {
if (this._ariaActive === 'checked') {
const input = this._getInput();
return input ? input._elm.nativeElement.checked : false;
}
return this._active;
}
/** @internal */
set active(value: boolean) {
if (value !== this._active) {
this._active = value;
this.selectedChange.emit(value);
}
}
/** @internal */
_getRadio() {
return this._radios?.first;
}
/** @internal */
_getCheckbox() {
return this._checkBoxes?.first;
}
/** @internal */
_getInput() {
return (this._getCheckbox() || this._getRadio())?._input;
}
}
/**
* Directive to mark the text portion(s) of an `mdcListItem`. This directive should be the child of an `mdcListItem`.
* For single line lists, the text can be added directly to this directive.
* For two line lists, add `mdcListItemPrimaryText` and `mdcListItemSecondaryText` children.
*/
@Directive({
selector: '[mdcListItemText]'
})
export class MdcListItemTextDirective {
/** @internal */
@HostBinding('class.mdc-list-item__text') readonly _cls = true;
}
/**
* Directive to mark the first line of an item with "two line list" styling.
* This directive, if used, should be the child of an `mdcListItemText`.
* Using this directive will put the list "two line" mode.
*/
@Directive({
selector: '[mdcListItemPrimaryText]'
})
export class MdcListItemPrimaryTextDirective {
/** @internal */
@HostBinding('class.mdc-list-item__primary-text') readonly _cls = true;
}
/**
* Directive for the secondary text of an item with "two line list" styling.
* This directive, if used, should be the child of an `mdcListItemText`, and
* come after the `mdcListItemPrimaryText`.
*/
@Directive({
selector: '[mdcListItemSecondaryText]',
})
export class MdcListItemSecondaryTextDirective {
/** @internal */
@HostBinding('class.mdc-list-item__secondary-text') readonly _cls = true;
}
/**
* Directive for the start detail item of a list item.
* This directive, if used, should be the child of an`mdcListItem`.
*/
@Directive({
selector: '[mdcListItemGraphic]',
})
export class MdcListItemGraphicDirective {
/** @internal */
@HostBinding('class.mdc-list-item__graphic') readonly _cls = true;
}
/**
* Directive for the end detail item of a list item.
* This directive, if used, should be the child of an `mdcListItem`.
*/
@Directive({
selector: '[mdcListItemMeta]',
})
export class MdcListItemMetaDirective {
/** @internal */
@HostBinding('class.mdc-list-item__meta') readonly _cls = true;
}
/** @docs-private */
export enum MdcListFunction {
plain, menu, select
};
// attributes on list-items that we maintain ourselves, so should be ignored
// in the adapter:
const ANGULAR_ITEM_ATTRIBUTES = [
strings.ARIA_CHECKED, strings.ARIA_SELECTED, strings.ARIA_CURRENT, strings.ARIA_DISABLED
];
// classes on list-items that we maintain ourselves, so should be ignored
// in the adapter:
const ANGULAR_ITEM_CLASSES = [
cssClasses.LIST_ITEM_DISABLED_CLASS, cssClasses.LIST_ITEM_ACTIVATED_CLASS, cssClasses.LIST_ITEM_SELECTED_CLASS
];
/**
* Lists are continuous, vertical indexes of text or images. They can be interactive, and may support
* selaction/activation of list of items. Single-line and Two-line lists are supported, as well as
* starting and end details (images or controls) on a list. A list contains `mdcListItem` children,
* and may also contain `mdcListDivider` children.
*
* A list can be used by itself, or contained inside `mdcListGroup`, `mdcMenu`, or `mdcSelect`.
*
* # Accessibility
* * See Accessibility section of `mdcListItem` for navigation, focus, and tab(index) affordances.
* * The `role` attribute will be set to `listbox` for single selection lists (`selectionMode` is `single`
* or `active`), to `radiogroup` when selection is triggered by embedded radio inputs, to
* `checkbox` when selection is triggered by embedded checkbox inputs, to `menu` when used inside
* `mdcMenu`. Otherwise there will be no `role` attribute, so the default role for a standard list
* (`role=list`) will apply.
* * You should set an appropriate `label` for checkbox based selection lists. The
* `label` will be reflected to the `aria-label` attribute.
*/
@Directive({
selector: '[mdcList]',
})
export class MdcListDirective implements AfterContentInit, OnDestroy {
private onDestroy$: Subject<any> = new Subject();
private document: Document;
/** @internal */
@HostBinding('class.mdc-list') readonly _cls = true;
/** @internal */
@ContentChildren(MdcListItemDirective) _items?: QueryList<MdcListItemDirective>;
/** @internal */
@ContentChildren(MdcListItemPrimaryTextDirective, {descendants: true}) _primaryTexts?: QueryList<MdcListItemTextDirective>;
/** @internal */
@ContentChildren(MdcCheckboxDirective, {descendants: true}) _checkboxes?: QueryList<MdcListItemTextDirective>;
/** @internal */
@ContentChildren(MdcRadioDirective, {descendants: true}) _radios?: QueryList<MdcListItemTextDirective>;
/** @internal */
@Output() readonly itemsChanged: EventEmitter<void> = new EventEmitter();
/** @internal */
@Output() readonly itemValuesChanged: EventEmitter<void> = new EventEmitter();
/** @internal */
@Output() readonly itemAction: EventEmitter<{index: number, value: string | null}> = new EventEmitter();
/** @internal */
@HostBinding('class.mdc-list--two-line') _twoLine = false;
/**
* Label announcing the purpose of the list. Should be set for lists that embed checkbox inputs
* for activation/selection. The label is reflected in the `aria-label` attribute value.
*
* @internal
*/
@HostBinding('attr.aria-label') @Input() label: string | null = null;
/**
* Link to the id of an element that announces the purpose of the list. This will be set automatically
* to the id of the `mdcFloatingLabel` when the list is part of an `mdcSelect`.
*
* @internal
*/
@HostBinding('attr.aria-labelledBy') @Input() labelledBy: string | null = null;
private _function: MdcListFunction = MdcListFunction.plain;
/** @internal */
_hidden = false;
private _dense = false;
private _avatar = false;
private _nonInteractive = false;
private _selectionMode: 'single' | 'active' | null = null;
private _wrapFocus = false;
private mdcAdapter: MDCListAdapter = {
getAttributeForElementIndex: (index, attr) => {
if (attr === strings.ARIA_CURRENT)
return this.getItem(index)?._ariaCurrent;
return this.getItem(index)?._elm.nativeElement.getAttribute(attr);
},
getListItemCount: () => this._items!.length,
getFocusedElementIndex: () => this._items!.toArray().findIndex(i => i._elm.nativeElement === this.document.activeElement!),
setAttributeForElementIndex: (index, attribute, value) => {
// ignore attributes we maintain ourselves
if (!ANGULAR_ITEM_ATTRIBUTES.find(a => a === attribute)) {
const elm = this.getItem(index)?._elm.nativeElement;
if (elm)
this.rndr.setAttribute(elm, attribute, value);
}
},
addClassForElementIndex: (index, className) => {
if (!ANGULAR_ITEM_CLASSES.find(c => c === className)) {
const elm = this.getItem(index)?._elm.nativeElement;
if (elm)
this.rndr.addClass(elm, className);
}
},
removeClassForElementIndex: (index, className) => {
if (!ANGULAR_ITEM_CLASSES.find(c => c === className)) {
const elm = this.getItem(index)?._elm.nativeElement;
if (elm)
this.rndr.addClass(elm, className);
}
},
focusItemAtIndex: (index: number) => this.getItem(index)?._elm.nativeElement.focus(),
setTabIndexForListItemChildren: (index, tabIndexValue) => {
// TODO check this plays nice with our own components (mdcButton etc.)
// TODO build this in an abstract class for our own elements?
// TODO limit to our own elements/custom directive?
const elm = this.getItem(index)?._elm.nativeElement;
const listItemChildren: Element[] = [].slice.call(elm.querySelectorAll(strings.CHILD_ELEMENTS_TO_TOGGLE_TABINDEX));
listItemChildren.forEach((el) => this.rndr.setAttribute(el, 'tabindex', tabIndexValue));
},
hasRadioAtIndex: () => this._role === 'radiogroup',
hasCheckboxAtIndex: () => this._role === 'group',
isCheckboxCheckedAtIndex: (index) => !!this.getItem(index)?._getCheckbox()?._input?.checked,
isRootFocused: () => this.document.activeElement === this._elm.nativeElement,
listItemAtIndexHasClass: (index, className) => {
if (className === cssClasses.LIST_ITEM_DISABLED_CLASS)
return !!this.getItem(index)?.disabled;
return !!this.getItem(index)?._elm.nativeElement.classList.contains(className);
},
setCheckedCheckboxOrRadioAtIndex: (index, isChecked) => {
const item = this.getItem(index);
const input = (item?._getRadio() || item?._getCheckbox())?._input?._elm.nativeElement;
if (input) {
input.checked = isChecked;
// simulate user interaction, as this is triggered from a user interaction:
const event = this.document.createEvent('Event');
event.initEvent('change', true, true);
input.dispatchEvent(event);
// checkbox input listens to clicks, not changed events, so let it know about the change:
item?._getCheckbox()?._input?._onChange();
}
},
notifyAction: (index) => {
const item = this.getItem(index);
if (item && !item?.disabled) {
item.action.emit();
this.itemAction.emit({index, value: item.value});
}
},
isFocusInsideList: () => {
return this._elm.nativeElement.contains(this.document.activeElement);
},
};
/** @internal */
foundation?: MDCListFoundation | null;
constructor(public _elm: ElementRef, private rndr: Renderer2, private cdRef: ChangeDetectorRef, @Inject(DOCUMENT) doc: any) {
this.document = doc as Document; // work around ngc issue https://github.com/angular/angular/issues/20351
}
ngAfterContentInit() {
merge(
this._checkboxes!.changes,
this._radios!.changes
).pipe(takeUntil(this.onDestroy$)).subscribe(() => {
this.updateItems();
this.updateLayout();
this.updateFoundationSelections();
});
this._items!.changes.subscribe(() => {
// when number of items changes, we have to reinitialize the foundation, because
// the focusused item index that the foundation keeps may be invalidated:
this.destroyFoundation();
this.updateItems();
this.initFoundation();
this.itemsChanged.emit();
this.itemValuesChanged.emit();
merge(this._items!.map(item => item.valueChanged.asObservable())).pipe(
takeUntil(this.onDestroy$),
takeUntil(this.itemsChanged),
debounceTime(1)
).subscribe(() => {
this.itemValuesChanged.emit();
});
this.subscribeItemActivationRequests();
});
this._primaryTexts!.changes.subscribe(_ => this._twoLine = this._primaryTexts!.length > 0);
this.updateItems();
this._twoLine = this._primaryTexts!.length > 0;
this.initFoundation();
this.subscribeItemActivationRequests();
}
ngOnDestroy() {
this.onDestroy$.next(); this.onDestroy$.complete();
this.destroyFoundation();
}
private initFoundation() {
this.foundation = new MDCListFoundation(this.mdcAdapter);
this.foundation.init();
this.updateLayout();
const focus = this.getListItemIndex({target: this.document.activeElement as EventTarget});
if (focus !== -1) // only way to restore focus when a list item already had focus:
(<any>this.foundation)['focusedItemIndex_'] = focus;
this.updateFoundationSelections();
this.foundation.setWrapFocus(this._wrapFocus);
}
private destroyFoundation() {
this.foundation?.destroy();
this.foundation = null;
}
private subscribeItemActivationRequests() {
this._items!.map(item => {
item._activationRequest.asObservable().pipe(
takeUntil(this.onDestroy$),
takeUntil(this.itemsChanged)
).subscribe(active => this.activateOrSelectItem(item, active));
});
}
private updateItems() {
let itemRole = {
'menu': 'menuitem',
'listbox': 'option',
'group': 'checkbox',
'radiogroup': 'radio'
}[this._role!] || null;
let ariaActive = {
'menu': null,
'listbox': this._selectionMode === 'active' ? 'current' : 'selected',
'group': 'checked',
'radiogroup': 'checked'
}[this._role!] || null;
if (this._items) {
const firstTabbable = this._nonInteractive ? null :
this._items.find(item => item._elm.nativeElement.tabIndex === 0) ||
this._items.find(item => item.active) ||
this._items.first;
this._items.forEach(item => {
item._role = itemRole;
item._ariaActive = <any>ariaActive;
item._setInteractive(!this._nonInteractive);
if (this._nonInteractive)
// not focusable if not interactive:
this.rndr.removeAttribute(item._elm.nativeElement, 'tabindex');
this.rndr.setAttribute(item._elm.nativeElement, 'tabindex', item === firstTabbable ? '0' : '-1');
});
// child components were updated (in updateItems above)
// let angular know to prevent ExpressionChangedAfterItHasBeenCheckedError:
this.cdRef.detectChanges();
}
}
private updateLayout() {
this.foundation?.layout();
}
private updateFoundationSelections() {
this.foundation?.setSingleSelection(this._role === 'listbox');
this.foundation?.setSelectedIndex(this.getSelection());
}
private updateItemSelections(active: number | number[]) {
const activeIndexes = typeof active === 'number' ? [active] : active;
// first deactivate, then activate
this._items!.toArray().forEach((item, idx) => {
if (activeIndexes.indexOf(idx) === -1)
item.active = false;
});
this._items!.toArray().forEach((item, idx) => {
if (activeIndexes.indexOf(idx) !== -1)
item.active = true;
});
}
private activateOrSelectItem(item: MdcListItemDirective, active: boolean) {
let activeIndexes: number | number[] = -1;
if (!active) {
if (this._role === 'group' || !this._role)
activeIndexes = <number[]>this._items!.toArray().map((v, i) => v.active && v !== item ? i : null).filter(i => i != null);
else if (this._role === 'listbox' || this._role === 'radiogroup' || this._role === 'menu')
activeIndexes = this._items!.toArray().findIndex(i => i.active && i !== item);
} else {
if (this._role === 'group' || !this._role)
activeIndexes = <number[]>this._items!.toArray().map((v, i) => v.active || v === item ? i : null).filter(i => i != null);
else if (this._role === 'listbox' || this._role === 'radiogroup' || this._role === 'menu')
activeIndexes = this._items!.toArray().findIndex(i => i === item);
}
if (this._role === 'group' || this._role === 'listbox' || this._role === 'radiogroup' || this._role === 'menu')
this.foundation?.setSelectedIndex(activeIndexes);
this.updateItemSelections(activeIndexes);
this.cdRef.detectChanges();
}
private getSelection(forFoundation = true): number | number[] {
if (this._role === 'listbox' || this._role === 'radiogroup' || this._role === 'menu')
return this._items!.toArray().findIndex(i => i.active);
if (this._role === 'group')
return <number[]>this._items!.toArray().map((v, i) => v.active ? i : null).filter(i => i != null);
return forFoundation ? -1 : <number[]>this._items!.toArray().map((v, i) => v.active ? i : null).filter(i => i != null);
}
/** @internal */
getSelectedItem() {
if (this._role === 'listbox' || this._role === 'radiogroup' || this._role === 'menu')
return this._items!.find(i => i.active);
return null;
}
/** @internal */
@HostBinding('attr.role') get _role() {
if (this._function === MdcListFunction.menu)
return 'menu';
if (this._function === MdcListFunction.select)
return 'listbox';
if (this._selectionMode === 'single' || this._selectionMode === 'active')
return 'listbox';
if (this._checkboxes && this._checkboxes.length > 0)
return 'group';
if (this._radios && this._radios.length > 0)
return 'radiogroup';
return null;
}
/** @internal */
@HostBinding('attr.aria-hidden') get _ariaHidden() {
return (this._hidden && this._function === MdcListFunction.menu) ? 'true' : null;
}
/** @internal */
@HostBinding('attr.aria-orientation') get _ariaOrientation() {
return this._function === MdcListFunction.menu ? 'vertical' : null;
}
/** @internal */
@HostBinding('class.mdc-menu__items') get _isMenu() {
return this._function === MdcListFunction.menu;
}
/** @internal */
@HostBinding('attr.tabindex') get _tabindex() {
// the root of a menu should be focusable
return this._function === MdcListFunction.menu ? "-1" : null;
}
/** @internal */
_setFunction(val: MdcListFunction) {
this._function = val;
this.foundation?.setSingleSelection(this._role === 'listbox');
this.updateItems();
}
/**
* When this input is defined and does not have value false, the list will be styled more
* compact.
*/
@Input() @HostBinding('class.mdc-list--dense')
get dense() {
return this._dense;
}
set dense(val: boolean) {
this._dense = asBoolean(val);
}
static ngAcceptInputType_dense: boolean | '';
/**
* When set to `single` or `active`, the list will act as a single-selection-list.
* This enables the enter and space keys for selecting/deselecting a list item,
* and sets the appropriate accessibility options.
* When not set, the list will not act as a single-selection-list.
*
* When using `single`, the active selection is announced with `aria-selected`
* attributes on the list elements. When using `active`, the active selection
* is announced with `aria-current`. See [WAI-ARIA aria-current](https://www.w3.org/TR/wai-aria-1.1/#aria-current)
* article for recommendations on usage.
*
* The selectionMode is ignored when there are embedded checkbox or radio inputs inside the list, in which case
* those inputs will trigger selection of list items.
*/
@Input()
get selectionMode() {
return this._selectionMode;
}
set selectionMode(val: 'single' | 'active' | null) {
if (val !== this._selectionMode) {
if (val === 'single' || val === 'active')
this._selectionMode = val;
else
this._selectionMode = null;
this.updateItems();
if (this.foundation) {
this.foundation.setSingleSelection(this._role === 'listbox');
this.foundation.setSelectedIndex(this.getSelection());
this.updateItemSelections(this.getSelection(false));
}
}
}
static ngAcceptInputType_selectionMode: 'single' | 'active' | '' | null;
/**
* When this input is defined and does not have value false, the list will be made
* non-interactive. Users will not be able to interact with list items, and the styling will
* reflect this (e.g. by not adding ripples to the items).
*/
@Input() @HostBinding('class.mdc-list--non-interactive')
get nonInteractive() {
return this._nonInteractive;
}
set nonInteractive(val: boolean) {
let newValue = asBoolean(val);
if (newValue !== this._nonInteractive) {
this._nonInteractive = newValue;
this.updateItems();
}
}
static ngAcceptInputType_nonInteractive: boolean | '';
/**
* When this input is defined and does not have value false, focus will wrap from last to
* first and vice versa when using keyboard navigation through list items.
*/
@Input()
get wrapFocus() {
return this._wrapFocus;
}
set wrapFocus(val: boolean) {
this._wrapFocus = asBoolean(val);
this.foundation?.setWrapFocus(this._wrapFocus);
}
static ngAcceptInputType_wrapFocus: boolean | '';
/**
* When this input is defined and does not have value false, elements with directive <code>mdcListItemGraphic</code>
* will be styled for avatars: large, circular elements that lend themselves well to contact images, profile pictures, etc.
*/
@Input() @HostBinding('class.mdc-list--avatar-list')
get avatarList() {
return this._avatar;
}
set avatarList(val: boolean) {
this._avatar = asBoolean(val);
}
static ngAcceptInputType_avatarList: boolean | '';
/** @internal */
@HostListener('focusin', ['$event']) _onFocusIn(event: FocusEvent) {
if (this.foundation && !this._nonInteractive) {
this.foundation.setSelectedIndex(this.getSelection());
const index = this.getListItemIndex(event as {target: EventTarget});
this.foundation.handleFocusIn(event, index);
}
}
/** @internal */
@HostListener('focusout', ['$event']) _onFocusOut(event: FocusEvent) {
if (this.foundation && !this._nonInteractive) {
this.foundation.setSelectedIndex(this.getSelection());
const index = this.getListItemIndex(event as {target: EventTarget});
this.foundation.handleFocusOut(event, index);
}
}
/** @internal */
@HostListener('keydown', ['$event']) _onKeydown(event: KeyboardEvent) {
if (this.foundation && !this._nonInteractive) {
this.foundation.setSelectedIndex(this.getSelection());
const index = this.getListItemIndex(event as {target: EventTarget});
const onRoot = this.getItem(index)?._elm.nativeElement === event.target;
this.foundation.handleKeydown(event, onRoot, index);
if (this._role === 'listbox')
this.updateItemSelections(this.foundation!.getSelectedIndex());
}
}
/** @internal */
@HostListener('click', ['$event']) _onClick(event: MouseEvent) {
if (this.foundation && !this._nonInteractive) {
this.foundation.setSelectedIndex(this.getSelection());
const index = this.getListItemIndex(event as {target: EventTarget});
// only toggle radio/checkbox input if it's not already toggled from the event:
const inputElement = this.getItem(index)?._getCheckbox()?._input!._elm.nativeElement ||
this.getItem(index)?._getRadio()?._input!._elm.nativeElement;
const toggleInput = event.target !== inputElement;
this.foundation.handleClick(index, toggleInput);
if (this._role === 'listbox')
this.updateItemSelections(this.foundation!.getSelectedIndex());
}
}
/** @internal */
getItem(index: number): MdcListItemDirective | null {
if (index >= 0 && index < this._items!.length)
return this._items!.toArray()[index];
return null;
}
/** @internal */
getItems(): MdcListItemDirective[] {
return this._items?.toArray() || [];
}
/** @internal */
getItemByElement(element: Element): MdcListItemDirective | null {
return this._items?.find(i => i._elm.nativeElement === element) || null;
}
private getListItemIndex(evt: {target: EventTarget}) {
let eventTarget: Element | null = evt.target as Element;
const itemElements = this._items!.map(item => <Element>item._elm.nativeElement);
while (eventTarget && eventTarget !== this._elm.nativeElement) {
const index = itemElements.findIndex(e => e === eventTarget);
if (index !== -1)
return index;
eventTarget = eventTarget.parentElement;
}
return -1;
}
}
/**
* Directive for a header inside a list group (<code>mdcListGroup</code>) directive.
*/
@Directive({
selector: '[mdcListGroupSubHeader]'
})
export class MdcListGroupSubHeaderDirective {
/** @internal */
@HostBinding('class.mdc-list-group__subheader') readonly _cls = true;
}
/**
* Directive for a material designed list group, grouping several `mdcList` lists.
* List groups should contain elements with `mdcListGroupSubHeader`,
* and `mdcList` directives. Lists may be separated by `mdcListSeparator` directives.
*/
@Directive({
selector: '[mdcListGroup]'
})
export class MdcListGroupDirective {
/** @internal */
@HostBinding('class.mdc-list-group') readonly _cls = true;
}
export const LIST_DIRECTIVES = [
MdcListDividerDirective,
MdcListItemDirective,
MdcListItemTextDirective,
MdcListItemPrimaryTextDirective,
MdcListItemSecondaryTextDirective,
MdcListItemGraphicDirective,
MdcListItemMetaDirective,
MdcListDirective,
MdcListGroupSubHeaderDirective,
MdcListGroupDirective
]; | the_stack |
import { mat4, vec4, vec3, vec2 } from '@tlaukkan/tsm';
import { AvVolume, EVolumeType, EVolumeContext, AABB, matMultiplyPoint } from '@aardvarkxr/aardvark-shared';
const createRay = require( 'ray-aabb' );
export interface TransformedVolume extends AvVolume
{
universeFromVolume: mat4;
}
function closestPointWithinRadius( origin: vec3, dest: vec3, radius: number )
{
let ray = new vec3( [ dest.x - origin.x, dest.y - origin.y, dest.z - origin.z ]);
ray = ray.normalize();
return new vec3( [ origin.x + ray.x * radius, origin.y + ray.y * radius, origin.z + ray.z * radius ] );
}
function spheresIntersect( v1: TransformedVolume, v2: TransformedVolume ) : [ boolean, vec3 | null ]
{
let v1Center = new vec3( v1.universeFromVolume.multiplyVec4( new vec4( [ 0, 0, 0, 1 ] ) ).xyz );
let v2Center = new vec3( v2.universeFromVolume.multiplyVec4( new vec4( [ 0, 0, 0, 1 ] ) ).xyz );
let v1ScaledRadius = v1.universeFromVolume.multiplyVec4( new vec4( [ 1, 0, 0, 0 ] ) )
.length() * v1.radius;
let v2ScaledRadius = v2.universeFromVolume.multiplyVec4( new vec4( [ 1, 0, 0, 0 ] ) )
.length() * v2.radius;
let dist = vec3.distance( v1Center, v2Center );
if( dist > ( v1ScaledRadius + v2ScaledRadius ) )
{
return [ false, null ];
}
else
{
let range = Math.max( 0, Math.min( v1ScaledRadius, dist - v2ScaledRadius ) );
return [ true, closestPointWithinRadius( v1Center, v2Center, range ) ];
}
}
function boxCenter( aabb: AABB )
{
return new vec3( [
aabb.xMax - aabb.xMin,
aabb.yMax - aabb.yMin,
aabb.zMax - aabb.zMin,
] );
}
function sphereBoxIntersect( sphere: TransformedVolume, box: TransformedVolume ) : [ boolean, vec3 | null ]
{
if( !box.aabb )
{
return [ false, null ];
}
let boxFromUniverse = box.universeFromVolume.copy( new mat4() ).inverse();
let boxFromSphere = mat4.product( boxFromUniverse, sphere.universeFromVolume, new mat4() );
let sphereCenter = boxFromSphere.multiplyVec4( new vec4( [ 0, 0, 0, 1 ] ) );
let sphereScaledRadius = boxFromSphere.multiplyVec4( new vec4( [ 1, 0, 0, 0 ] ) )
.length() * sphere.radius;
let xDist = Math.max( Math.max( box.aabb.xMin - sphereCenter.x, sphereCenter.x - box.aabb.xMax ), 0 );
let yDist = Math.max( Math.max( box.aabb.yMin - sphereCenter.y, sphereCenter.y - box.aabb.yMax ), 0 );
let zDist = Math.max( Math.max( box.aabb.zMin - sphereCenter.z, sphereCenter.z - box.aabb.zMax ), 0 );
//console.log( xDist, yDist, zDist, sphereScaledRadius, sphereCenter, box.aabb );
// TODO: This is wrong in the face of non-uniform scale of the box. I think each axis needs
// to be compared indendently in that case.
if ( ( xDist * xDist + yDist * yDist + zDist * zDist ) <= ( sphereScaledRadius * sphereScaledRadius ) )
{
let intersectionInBox = new vec3( sphereCenter.xyz );
if( sphereCenter.x < box.aabb.xMin )
{
intersectionInBox.x = box.aabb.xMin;
}
else if( sphereCenter.x > box.aabb.xMax )
{
intersectionInBox.x = box.aabb.xMax;
}
if( sphereCenter.y < box.aabb.yMin )
{
intersectionInBox.y = box.aabb.yMin;
}
else if( sphereCenter.y > box.aabb.yMax )
{
intersectionInBox.y = box.aabb.yMax;
}
if( sphereCenter.z < box.aabb.zMin )
{
intersectionInBox.z = box.aabb.zMin;
}
else if( sphereCenter.z > box.aabb.zMax )
{
intersectionInBox.z = box.aabb.zMax;
}
return [ true, matMultiplyPoint( box.universeFromVolume, intersectionInBox ) ];
}
else
{
return [ false, null ];
}
}
function boxBoxIntersect( box1: TransformedVolume, box2: TransformedVolume ) : [ boolean, vec3 | null ]
{
if( !box1.aabb || !box2.aabb )
{
return [ false, null ];
}
// TODO: For now do the rough "turn one box into an AABB in the other's space" approach.
// Eventually this should do the actual unaligned box intersection
let box1FromUniverse = box1.universeFromVolume.copy( new mat4() ).inverse();
let box1FromBox2 = mat4.product( box1FromUniverse, box2.universeFromVolume, new mat4() );
let box2Points =
[
new vec4( [ box2.aabb.xMin, box2.aabb.yMin, box2.aabb.zMin, 1 ] ),
new vec4( [ box2.aabb.xMin, box2.aabb.yMin, box2.aabb.zMax, 1 ] ),
new vec4( [ box2.aabb.xMin, box2.aabb.yMax, box2.aabb.zMin, 1 ] ),
new vec4( [ box2.aabb.xMin, box2.aabb.yMax, box2.aabb.zMax, 1 ] ),
new vec4( [ box2.aabb.xMax, box2.aabb.yMin, box2.aabb.zMin, 1 ] ),
new vec4( [ box2.aabb.xMax, box2.aabb.yMin, box2.aabb.zMax, 1 ] ),
new vec4( [ box2.aabb.xMax, box2.aabb.yMax, box2.aabb.zMin, 1 ] ),
new vec4( [ box2.aabb.xMax, box2.aabb.yMax, box2.aabb.zMax, 1 ] ),
];
let xMin: number;
let xMax: number;
let yMin: number;
let yMax: number;
let zMin: number;
let zMax: number;
for( let point of box2Points )
{
let pointInBox1 = box1FromBox2.multiplyVec4( point );
xMin = Math.min( pointInBox1.x, xMin ?? pointInBox1.x );
xMax = Math.max( pointInBox1.x, xMax ?? pointInBox1.x );
yMin = Math.min( pointInBox1.y, yMin ?? pointInBox1.y );
yMax = Math.max( pointInBox1.y, yMax ?? pointInBox1.y );
zMin = Math.min( pointInBox1.z, zMin ?? pointInBox1.z );
zMax = Math.max( pointInBox1.z, zMax ?? pointInBox1.z );
};
//console.log( xMin, xMax, yMin, yMax, zMin, zMax, box1.aabb );
if ( xMax < box1.aabb.xMin || xMin > box1.aabb.xMax ||
yMax < box1.aabb.yMin || yMin > box1.aabb.yMax ||
zMax < box1.aabb.zMin || zMin > box1.aabb.zMax )
{
return [ false, null ];
}
return [ true, matMultiplyPoint( box1.universeFromVolume,
new vec3( [ xMax - xMin, yMax - yMin, zMax - zMin ] ) ) ];
}
function sphereRayIntersect( s: TransformedVolume, r: TransformedVolume ) : [ boolean, vec3 | null ]
{
let rayFromUniverse = new mat4( r.universeFromVolume.all() ).inverse();
let rayFromSphere = mat4.product( rayFromUniverse, s.universeFromVolume, new mat4() );
let center = new vec3( rayFromSphere.multiplyVec4( new vec4( [ 0, 0, 0, 1 ] ),
new vec4() ).xyz );
let negCenter = new vec3( [ -center.x, -center.y, -center.z ] );
let rayDotCenter = vec3.dot( vec3.right, negCenter );
let centerDist2 = vec3.dot( negCenter, negCenter );
let a = 1; // vec3.right dotted with itself
let b = 2 * vec3.dot( vec3.right, negCenter );
let c = vec3.dot( negCenter, negCenter ) - s.radius * s.radius;
let disSq = rayDotCenter * rayDotCenter - ( centerDist2 - s.radius * s.radius );
if ( disSq < 0 )
{
return [ false, null ];
}
else
{
let dis = -rayDotCenter - Math.sqrt( disSq );
return [ true, matMultiplyPoint( r.universeFromVolume, new vec3( [ dis, 0, 0 ] ) ) ]
}
}
function boxRayIntersect( b: TransformedVolume, r: TransformedVolume ) : [ boolean, vec3 | null ]
{
let boxFromUniverse = new mat4( b.universeFromVolume.all() ).inverse();
let boxFromRay = mat4.product( boxFromUniverse, r.universeFromVolume, new mat4() );
let start = new vec3( boxFromRay.multiplyVec4( new vec4( [ 0, 0, 0, 1 ] ) ).xyz );
let dir = new vec3( boxFromRay.multiplyVec4( new vec4( [ 1, 0, 0, 0 ] ) ).xyz ).normalize();
if( start.x >= b.aabb.xMin && start.x <= b.aabb.xMax
&& start.y >= b.aabb.yMin && start.y <= b.aabb.yMax
&& start.z >= b.aabb.zMin && start.z <= b.aabb.zMax )
{
// start point of the ray is inside the box. Intersection point
// is the ray origin in this case, rather than some random point on the edge
// of the box.
return [ true, matMultiplyPoint( b.universeFromVolume, start ) ];
}
let normal = [0, 0, 0 ];
let ray = createRay( start.xyz, dir.xyz );
let res = ray.intersects( [ [ b.aabb.xMin, b.aabb.yMin, b.aabb.zMin ],
[ b.aabb.xMax, b.aabb.yMax, b.aabb.zMax ] ], normal );
if( res === false )
{
return [ false, null ];
}
else
{
let scaledDir =new vec3( [ dir.x * res, dir.y * res, dir.z * res ] );
let ptInBox = start.add( scaledDir );
let pt = matMultiplyPoint( b.universeFromVolume, ptInBox );
return [true, pt ];
}
}
export function rayFromMatrix( m: mat4 )
{
return [
new vec3( m.multiplyVec4( new vec4( [ 0, 0, 0, 1 ] ) ).xyz ),
new vec3( m.multiplyVec4( new vec4( [ 1, 0, 0, 0 ] ) ).xyz ).normalize()
];
}
function rayRayIntersect( r0: TransformedVolume, r1: TransformedVolume ) : [ boolean, vec3 | null ]
{
let r0FromUniverse = new mat4( r0.universeFromVolume.all() ).inverse();
let r0FromR1 = mat4.product( r0FromUniverse, r1.universeFromVolume, new mat4() );
let [ s1, d1 ] = rayFromMatrix( r0FromR1 );
if( d1.equals( vec3.right, 0.001 ) )
{
// lines are coincident.
return [ true, new vec3( r0.universeFromVolume.multiplyVec4( new vec4( [ 0, 0, 0, 1 ] ) ).xyz ) ];
}
let s1_2d = new vec2( s1.xy );
let d1_2d = new vec2( d1.xy );
let t1 = -s1_2d.y / d1_2d.y;
if( t1 < 0 )
{
// rays don't intersect in 2d because of r1
return [ false, null ];
}
let x = s1_2d.x + d1_2d.x * t1;
if( x < 0 )
{
// rays don't intersect in 2d because of r0;
}
// now we know our t value and can compute the theoretical point of
// intersection for ray 1
let line = new vec3( [ d1.x * t1, d1.y * t1, d1.z * t1 ] );
let p1 = vec3.sum( s1, line, new vec3() );
if( !( p1.x >= 0 && Math.abs( p1.y ) < 0.001 && Math.abs( p1.z ) < 0.001 ) )
{
return [false, null ];
}
else
{
return [ true, matMultiplyPoint( r1.universeFromVolume, p1 ) ];
}
}
export function volumeMatchesContext( v: TransformedVolume, context: EVolumeContext )
{
let volumeContext = v.context ?? EVolumeContext.Always;
return context == EVolumeContext.Always || volumeContext == EVolumeContext.Always
|| context == volumeContext;
}
export function volumesIntersect( v1: TransformedVolume, v2: TransformedVolume, context: EVolumeContext )
: [ boolean, vec3 | null ]
{
if( v1.type == EVolumeType.Skeleton || v2.type == EVolumeType.Skeleton )
{
throw new Error( "Skeleton volumes cannot be intersected directly. Call getHandVolumes to get an"
+ " array of volumes before you can pass those to volumesIntersect" );
}
if( !volumeMatchesContext( v1, context ) || !volumeMatchesContext( v2, context ) )
{
return [ false, null ];
}
if( v1.type == EVolumeType.Empty || v2.type == EVolumeType.Empty )
{
// empty volumes don't intersect with anything, including infinite volumes
return [ false, null ];
}
if( v1.type == EVolumeType.Infinite || v2.type == EVolumeType.Infinite )
{
return [ true, new vec3( [ 0, 0, 0 ]) ];
}
let va: TransformedVolume, vb: TransformedVolume;
if( v1.type <= v2.type )
{
va = v1;
vb = v2;
}
else
{
va = v2;
vb = v1;
}
// we only have to deal with matching with types >= our own now. The order is:
// Sphere = 0,
// ModelBox = 1,
// AABB = 1,
// Infinite = 3,
// Empty = 4,
// Ray = 5, // ray is always down the positive X axis from the origin
switch( va.type )
{
case EVolumeType.Sphere:
switch( vb.type )
{
case EVolumeType.Sphere:
return spheresIntersect( va, vb );
case EVolumeType.AABB:
return sphereBoxIntersect( va, vb );
case EVolumeType.Ray:
return sphereRayIntersect( va, vb );
default:
return [ false, null ];
}
case EVolumeType.AABB:
switch( vb.type )
{
case EVolumeType.AABB:
return boxBoxIntersect( va, vb );
case EVolumeType.Ray:
return boxRayIntersect( va, vb );
default:
return [ false, null ];
}
case EVolumeType.Ray:
switch( vb.type )
{
case EVolumeType.Ray:
return rayRayIntersect( va, vb );
default:
return [ false, null ];
}
default:
return [ false, null ];
}
} | the_stack |
import { ApolloServer, gql } from "apollo-server-micro";
import { versionManagementEnabled } from "./db";
import dbDriver from "../../src/database/drivers";
import ModuleManager from "../../src/managers/Module";
import VersionManager from "../../src/managers/Version";
import { privateConfig } from "../../src/config";
import auth0 from "../../src/auth0";
import "../../src/webhooks";
import url from "native-url";
const typeDefs = gql`
scalar Date
type Query {
dashboard: DashboardInfo!
userByEmail(email: String): User
groups(name: String): [Group!]!
siteSettings: SiteSettings!
}
type Mutation {
updateApplicationSettings(
group: String!
application: String!
settings: ApplicationSettingsInput!
): ApplicationSettings
updateGroupSettings(
group: String!
settings: GroupSettingsInput!
): GroupSettings
publishVersion(
group: String!
application: String!
version: String!
): ApplicationVersion!
setRemoteVersion(
group: String!
application: String!
remote: String!
version: String
): Application!
updateUser(user: UserInput!): User!
updateSiteSettings(settings: SiteSettingsInput): SiteSettings!
addMetric(
group: String!
application: String
name: String!
date: String!
value: Float!
url: String
q1: Float
q2: Float
q3: Float
max: Float
min: Float
): Boolean!
updateMetric(
group: String!
application: String
name: String!
date: String!
value: Float!
url: String
q1: Float
q2: Float
q3: Float
max: Float
min: Float
): Boolean!
}
enum WebhookEventType {
updateApplication
deleteApplication
updateApplicationVersion
deleteApplicationVersion
}
type Webhook {
event: WebhookEventType!
url: String!
}
type Token {
key: String!
value: String!
}
type SiteSettings {
webhooks: [Webhook]
tokens: [Token]
}
input WebhookInput {
event: WebhookEventType!
url: String!
}
input TokenInput {
key: String!
value: String!
}
input SiteSettingsInput {
webhooks: [WebhookInput]
tokens: [TokenInput]
}
input MetadataInput {
name: String!
value: String!
}
input TrackedURLVariantInput {
name: String!
search: String
new: Boolean
}
input TrackedURLInput {
url: String!
metadata: [MetadataInput]
variants: [TrackedURLVariantInput!]!
}
input GroupSettingsInput {
trackedURLs: [TrackedURLInput]
}
input ApplicationSettingsInput {
trackedURLs: [TrackedURLInput]
}
type DashboardInfo {
versionManagementEnabled: Boolean!
}
type Dependency {
name: String!
type: String!
version: String!
}
type Remote {
internalName: String!
name: String!
}
type ApplicationVersion {
environment: String!
version: String!
latest: Boolean!
posted: String!
remote: String!
remotes: [Remote!]!
dependencies: [Dependency]!
overrides: [Override!]!
modules(name: String): [Module!]!
consumes: [Consume!]!
}
type ApplicationOverride {
version: String!
application: Application!
name: String!
}
type Override {
id: ID!
application: Application!
version: String
name: String!
}
type Consume {
consumingApplication: Application!
application: Application
name: String!
usedIn: [FileLocation!]!
}
type FileLocation {
file: String!
url: String
}
type MetricValue {
url: String
name: String!
date: Date!
value: Float!
}
type Module {
id: ID!
application: Application!
name: String!
file: String
requires: [String!]!
consumedBy: [Consume]!
metadata: [Metadata!]!
tags: [String!]!
}
type TrackedURL {
url: String!
variants: [TrackedURLVariant!]!
metadata: [Metadata!]!
}
type ApplicationSettings {
trackedURLs: [TrackedURL]
}
type TrackedURLVariant {
name: String!
search: String!
new: Boolean!
}
type Application {
id: String!
name: String!
group: String!
metadata: [Metadata!]!
tags: [String!]!
metrics(names: [String!]): [MetricValue!]!
overrides: [ApplicationOverride!]!
versions(environment: String, latest: Boolean): [ApplicationVersion!]!
settings: ApplicationSettings
}
type GroupSettings {
trackedURLs: [TrackedURL]
}
type Group {
id: String!
name: String!
metadata: [Metadata!]!
applications(id: String): [Application!]!
metrics(names: [String!]): [MetricValue!]!
settings: GroupSettings
}
type Metadata {
name: String!
value: String!
}
input UserInput {
email: String!
name: String!
groups: [String!]
defaultGroup: String!
}
type User {
id: String!
email: String!
name: String!
groups: [String!]
defaultGroup: String!
}
type Versions {
versions: [String!]!
latest: String!
override: [Dependency!]
}
`;
const resolvers = {
Query: {
dashboard: () => {
return {
versionManagementEnabled: versionManagementEnabled(),
};
},
userByEmail: async (_: any, { email }: any) => {
await dbDriver.setup();
return dbDriver.user_findByEmail(email);
},
groups: async (_: any, { name }: any, ctx: any) => {
await dbDriver.setup();
if (name) {
const found = await dbDriver.group_findByName(name);
return found ? [found] : [];
} else {
return dbDriver.group_findAll();
}
},
siteSettings: () => {
return dbDriver.siteSettings_get();
},
},
Mutation: {
updateApplicationSettings: async (
_: any,
{ group, application, settings }: any
) => {
await dbDriver.setup();
const app = await dbDriver.application_find(application);
app.settings = settings;
await dbDriver.application_update(app);
return settings;
},
updateGroupSettings: async (_: any, { group, settings }: any) => {
await dbDriver.setup();
const grp = await dbDriver.group_find(group);
grp.settings = settings;
await dbDriver.group_update(grp);
return settings;
},
addMetric: async (
_: any,
{ group, application, date, name, value, url }: any
) => {
await dbDriver.setup();
dbDriver.application_addMetrics(application, {
date: new Date(Date.parse(date)),
id: application ? application : group,
type: application ? "application" : "group",
name,
value,
url,
//TODO add extra keys
});
return true;
},
updateMetric: async (
_: any,
{ group, application, date, name, value, url }: any
) => {
await dbDriver.setup();
console.log("Mutation", group, value, name);
dbDriver.group_updateMetric(application, {
id: application ? application : group,
type: application ? "application" : "group",
name,
value,
url,
//TODO add extra keys
});
return true;
},
publishVersion: async (_: any, { group, application, version }: any) => {
const out = await VersionManager.publishVersion(
group,
application,
version
);
return out;
},
setRemoteVersion: async (
_: any,
{ group, application, remote, version }: any
) => {
return VersionManager.setRemoteVersion(
group,
application,
remote,
version
);
},
updateUser: async (_: any, { user }: any) => {
await dbDriver.setup();
await dbDriver.user_update({
id: user.email,
...user,
});
return dbDriver.user_find(user.email);
},
updateSiteSettings: async (_: any, { settings }: any) => {
await dbDriver.setup();
await dbDriver.siteSettings_update(settings);
return dbDriver.siteSettings_get();
},
},
Application: {
versions: async ({ id }: any, { environment, latest }: any, ctx: any) => {
ctx.environment = environment;
await dbDriver.setup();
let found = await dbDriver.applicationVersion_findAll(id, environment);
if (latest !== undefined) {
found = found.filter(({ latest }: any) => latest);
}
return found;
},
metrics: async ({ id }: any, { names }: any, ctx: any) => {
await dbDriver.setup();
const metrics = await dbDriver.application_getMetrics(id);
if (names) {
} else {
}
return names
? metrics.filter(({ name }: any) => names.includes(name))
: metrics;
},
},
Consume: {
consumingApplication: async (parent: any, args: any, ctx: any) => {
await dbDriver.setup();
return dbDriver.application_find(parent.consumingApplicationID);
},
application: async (parent: any, args: any, ctx: any) => {
await dbDriver.setup();
return dbDriver.application_find(parent.applicationID);
},
},
Module: {
consumedBy: async (parent: any, args: any, ctx: any) => {
await dbDriver.setup();
return ModuleManager.getConsumedBy(
ctx.group,
ctx.environment,
parent.applicationID,
parent.name
);
},
},
ApplicationVersion: {
modules: async ({ modules }: any, { name }: any) => {
return name
? modules.filter(({ name: moduleName }) => name === moduleName)
: modules;
},
},
ApplicationOverride: {
application: async ({ name }: { name: string }) => {
await dbDriver.setup();
return dbDriver.application_find(name);
},
},
Group: {
metrics: async ({ id }: any, { names }: any, ctx: any) => {
await dbDriver.setup();
const metrics = await dbDriver.group_getMetrics(id);
if (names) {
} else {
}
return names
? metrics.filter(({ name }: any) => names.includes(name))
: metrics;
},
applications: async ({ id }: any, { id: applicationId }, ctx: any) => {
ctx.group = id;
await dbDriver.setup();
if (!applicationId) {
return dbDriver.application_findInGroups([id]);
} else {
const found = await dbDriver.application_find(applicationId);
return found ? [found] : [];
}
},
},
};
const apolloServer = new ApolloServer({ typeDefs, resolvers });
const apolloHandler = apolloServer.createHandler({
path: "/api/graphql",
});
function runMiddleware(req: any, res: any, fn: any) {
return new Promise((resolve, reject) => {
fn(req, res, (result: any) => {
if (result instanceof Error) {
return reject(result);
}
return resolve(result);
});
});
}
const allowCors = async (req: any, res: any, next: any) => {
res.setHeader("Access-Control-Allow-Credentials", true);
res.setHeader("Access-Control-Allow-Origin", "*");
// another common pattern
// res.setHeader('Access-Control-Allow-Origin', req.headers.origin);
res.setHeader(
"Access-Control-Allow-Methods",
"GET,OPTIONS,PATCH,DELETE,POST,PUT"
);
res.setHeader(
"Access-Control-Allow-Headers",
"X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version"
);
if (req.method === "OPTIONS") {
res.status(200).end();
return;
}
return next(req, res);
};
const fetchToken = (headers) => {
return fetch(url.resolve(privateConfig.EXTERNAL_URL, "api/graphql"), {
method: "POST",
headers: {
...headers,
"Content-Type": "application/json",
Accept: "application/json",
},
body: JSON.stringify({
query: `query {
{
siteSettings {
tokens {
key
value
}
}
}
}`,
}),
});
};
const checkForTokens = async () => {
const { tokens } = await dbDriver.siteSettings_get();
if (Array.isArray(tokens) && tokens.length === 0) {
return false;
} else {
return tokens;
}
};
async function handler(req: any, res: any) {
await runMiddleware(req, res, allowCors);
// @ts-expect-error ts-migrate(2554) FIXME: Expected 1 arguments, but got 0.
let session: { noAuth: boolean; user: {} } = false;
if (process.env.NODE_ENV === "production") {
session = await auth0.getSession(req);
}
const tokens = await checkForTokens();
if (
!tokens ||
req?.headers?.Authorization?.find((token) => tokens.includes(token))
) {
session = {
user: {},
noAuth: false,
};
}
const hasValidToken =
tokens &&
tokens.some((token) => {
return req.query.token === token;
});
if (process.env.NODE_ENV === "production") {
console.log("has valid token", hasValidToken);
}
// @ts-expect-error ts-migrate(2339) FIXME: Property 'INTERNAL_TOKEN' does not exist on type '... Remove this comment to see the full error message
if (!hasValidToken) {
// // @ts-expect-error ts-migrate(2339) FIXME: Property 'user' does not exist on type '{ noAuth: ... Remove this comment to see the full error message
// if (!session || !session.user) {
// // @ts-expect-error ts-migrate(2339) FIXME: Property 'noAuth' does not exist on type '{ noAuth... Remove this comment to see the full error message
// if (!session.noAuth) {
// res.status(401).json({
// errors: [
// {
// message: "Unauthorized",
// extensions: { code: "UNAUTHENTICATED" },
// },
// ],
// });
// }
// }
}
console.log("runMiddleware");
await runMiddleware(req, res, apolloHandler);
}
export const config = {
api: {
bodyParser: false,
},
};
export default handler; | the_stack |
import Point from '../geometry/Point';
import { CURSOR } from '../../util/Constants';
import Rectangle from '../geometry/Rectangle';
import { contains } from '../../util/mathUtils';
import { setOpacity } from '../../util/styleUtils';
import ElbowEdgeHandler from './ElbowEdgeHandler';
import CellState from '../cell/CellState';
import Cell from '../cell/Cell';
import InternalMouseEvent from '../event/InternalMouseEvent';
class EdgeSegmentHandler extends ElbowEdgeHandler {
constructor(state: CellState) {
super(state);
}
points: Point[] = [];
/**
* Returns the current absolute points.
*/
getCurrentPoints() {
let pts = this.state.absolutePoints;
// Special case for straight edges where we add a virtual middle handle for moving the edge
const tol = Math.max(1, this.graph.view.scale);
if (
(pts.length === 2 && pts[0] && pts[1]) ||
(pts.length === 3 &&
pts[0] &&
pts[1] &&
pts[2] &&
((Math.abs(pts[0].x - pts[1].x) < tol && Math.abs(pts[1].x - pts[2].x) < tol) ||
(Math.abs(pts[0].y - pts[1].y) < tol && Math.abs(pts[1].y - pts[2].y) < tol)))
) {
const cx = pts[0].x + (pts[pts.length - 1]!.x - pts[0].x) / 2;
const cy = pts[0].y + (pts[pts.length - 1]!.y - pts[0].y) / 2;
pts = [pts[0], new Point(cx, cy), new Point(cx, cy), pts[pts.length - 1]];
}
return pts;
}
/**
* Updates the given preview state taking into account the state of the constraint handler.
*/
getPreviewPoints(point: Point) {
if (this.isSource || this.isTarget) {
return super.getPreviewPoints(point);
}
const pts = this.getCurrentPoints();
let last = this.convertPoint(pts[0]!.clone(), false);
point = this.convertPoint(point.clone(), false);
let result: Point[] = [];
for (let i = 1; i < pts.length; i += 1) {
const pt = this.convertPoint(pts[i]!.clone(), false);
if (i === this.index) {
if (Math.round(last.x - pt.x) === 0) {
last.x = point.x;
pt.x = point.x;
}
if (Math.round(last.y - pt.y) === 0) {
last.y = point.y;
pt.y = point.y;
}
}
if (i < pts.length - 1) {
result.push(pt);
}
last = pt;
}
// Replaces single point that intersects with source or target
if (result.length === 1) {
const source = this.state.getVisibleTerminalState(true);
const target = this.state.getVisibleTerminalState(false);
const scale = this.state.view.getScale();
const tr = this.state.view.getTranslate();
const x = result[0].x * scale + tr.x;
const y = result[0].y * scale + tr.y;
if (
(source != null && contains(source, x, y)) ||
(target != null && contains(target, x, y))
) {
result = [point, point];
}
}
return result;
}
/**
* Overridden to perform optimization of the edge style result.
*/
updatePreviewState(
edge: CellState,
point: Point,
terminalState: CellState,
me: InternalMouseEvent
): void {
super.updatePreviewState(edge, point, terminalState, me);
// Checks and corrects preview by running edge style again
if (!this.isSource && !this.isTarget) {
point = this.convertPoint(point.clone(), false);
const pts = edge.absolutePoints;
let pt0 = pts[0] as Point;
let pt1 = pts[1] as Point;
let result = [];
for (let i = 2; i < pts.length; i += 1) {
const pt2 = pts[i] as Point;
// Merges adjacent segments only if more than 2 to allow for straight edges
if (
(Math.round(pt0.x - pt1.x) !== 0 || Math.round(pt1.x - pt2.x) !== 0) &&
(Math.round(pt0.y - pt1.y) !== 0 || Math.round(pt1.y - pt2.y) !== 0)
) {
result.push(this.convertPoint(pt1.clone(), false));
}
pt0 = pt1;
pt1 = pt2;
}
const source = this.state.getVisibleTerminalState(true);
const target = this.state.getVisibleTerminalState(false);
const rpts = this.state.absolutePoints;
const end = pts[pts.length - 1];
// A straight line is represented by 3 handles
if (
result.length === 0 &&
pts[0] &&
end &&
(Math.round(pts[0].x - end.x) === 0 || Math.round(pts[0].y - end.y) === 0)
) {
result = [point, point];
}
// Handles special case of transitions from straight vertical to routed
else if (
pts.length === 5 &&
result.length === 2 &&
source != null &&
target != null &&
rpts != null &&
Math.round(rpts[0]!.x - rpts[rpts.length - 1]!.x) === 0
) {
const view = this.graph.getView();
const scale = view.getScale();
const tr = view.getTranslate();
let y0 = view.getRoutingCenterY(source) / scale - tr.y;
// Use fixed connection point y-coordinate if one exists
const sc = this.graph.getConnectionConstraint(edge, source, true);
if (sc != null) {
const pt = this.graph.getConnectionPoint(source, sc);
if (pt != null) {
this.convertPoint(pt, false);
y0 = pt.y;
}
}
let ye = view.getRoutingCenterY(target) / scale - tr.y;
// Use fixed connection point y-coordinate if one exists
const tc = this.graph.getConnectionConstraint(edge, target, false);
if (tc) {
const pt = this.graph.getConnectionPoint(target, tc);
if (pt != null) {
this.convertPoint(pt, false);
ye = pt.y;
}
}
result = [new Point(point.x, y0), new Point(point.x, ye)];
}
this.points = result;
// LATER: Check if points and result are different
edge.view.updateFixedTerminalPoints(edge, source, target);
edge.view.updatePoints(edge, this.points, source, target);
edge.view.updateFloatingTerminalPoints(edge, source, target);
}
}
/**
* Overriden to merge edge segments.
*/
connect(
edge: Cell,
terminal: Cell,
isSource: boolean,
isClone: boolean,
me: InternalMouseEvent
) {
const model = this.graph.getDataModel();
let geo = edge.getGeometry();
let result: Point[] | null = null;
// Merges adjacent edge segments
if (geo != null && geo.points != null && geo.points.length > 0) {
const pts = this.abspoints;
let pt0 = pts[0];
let pt1 = pts[1];
result = [];
for (let i = 2; i < pts.length; i += 1) {
const pt2 = pts[i];
// Merges adjacent segments only if more than 2 to allow for straight edges
if (
pt0 &&
pt1 &&
pt2 &&
(Math.round(pt0.x - pt1.x) !== 0 || Math.round(pt1.x - pt2.x) !== 0) &&
(Math.round(pt0.y - pt1.y) !== 0 || Math.round(pt1.y - pt2.y) !== 0)
) {
result.push(this.convertPoint(pt1.clone(), false));
}
pt0 = pt1;
pt1 = pt2;
}
}
this.graph.batchUpdate(() => {
if (result != null) {
geo = edge.getGeometry();
if (geo != null) {
geo = geo.clone();
geo.points = result;
model.setGeometry(edge, geo);
}
}
edge = super.connect(edge, terminal, isSource, isClone, me);
});
return edge;
}
/**
* Returns no tooltips.
*/
getTooltipForNode(node: Element): string | null {
return null;
}
/**
* Adds custom bends for the center of each segment.
*/
start(x: number, y: number, index: number) {
super.start(x, y, index);
if (
this.bends != null &&
this.bends[index] != null &&
!this.isSource &&
!this.isTarget
) {
setOpacity(this.bends[index].node, 100);
}
}
/**
* Adds custom bends for the center of each segment.
*/
createBends() {
const bends = [];
// Source
let bend = this.createHandleShape(0);
this.initBend(bend);
bend.setCursor(CURSOR.TERMINAL_HANDLE);
bends.push(bend);
const pts = this.getCurrentPoints();
// Waypoints (segment handles)
if (this.graph.isCellBendable(this.state.cell)) {
if (this.points == null) {
this.points = [];
}
for (let i = 0; i < pts.length - 1; i += 1) {
bend = this.createVirtualBend();
bends.push(bend);
let horizontal = Math.round(pts[i]!.x - pts[i + 1]!.x) === 0;
// Special case where dy is 0 as well
if (Math.round(pts[i]!.y - pts[i + 1]!.y) === 0 && i < pts.length - 2) {
horizontal = Math.round(pts[i]!.x - pts[i + 2]!.x) === 0;
}
bend.setCursor(horizontal ? 'col-resize' : 'row-resize');
this.points.push(new Point(0, 0));
}
}
// Target
bend = this.createHandleShape(pts.length);
this.initBend(bend);
bend.setCursor(CURSOR.TERMINAL_HANDLE);
bends.push(bend);
return bends;
}
/**
* Overridden to invoke <refresh> before the redraw.
*/
redraw() {
this.refresh();
super.redraw();
}
/**
* Updates the position of the custom bends.
*/
redrawInnerBends(p0: Point, pe: Point) {
if (this.graph.isCellBendable(this.state.cell)) {
const pts = this.getCurrentPoints();
if (pts != null && pts.length > 1) {
let straight = false;
// Puts handle in the center of straight edges
if (
pts.length === 4 &&
pts[0] &&
pts[1] &&
pts[2] &&
pts[3] &&
Math.round(pts[1].x - pts[2].x) === 0 &&
Math.round(pts[1].y - pts[2].y) === 0
) {
straight = true;
if (Math.round(pts[0].y - pts[pts.length - 1]!.y) === 0) {
const cx = pts[0].x + (pts[pts.length - 1]!.x - pts[0].x) / 2;
pts[1] = new Point(cx, pts[1].y);
pts[2] = new Point(cx, pts[2].y);
} else {
const cy = pts[0].y + (pts[pts.length - 1]!.y - pts[0].y) / 2;
pts[1] = new Point(pts[1].x, cy);
pts[2] = new Point(pts[2].x, cy);
}
}
for (let i = 0; i < pts.length - 1; i += 1) {
if (this.bends[i + 1] != null) {
p0 = pts[i] as Point;
pe = pts[i + 1] as Point;
const pt = new Point(p0.x + (pe.x - p0.x) / 2, p0.y + (pe.y - p0.y) / 2);
const b = this.bends[i + 1].bounds as Rectangle;
this.bends[i + 1].bounds = new Rectangle(
Math.floor(pt.x - b.width / 2),
Math.floor(pt.y - b.height / 2),
b.width,
b.height
);
this.bends[i + 1].redraw();
if (this.manageLabelHandle) {
this.checkLabelHandle(this.bends[i + 1].bounds as Rectangle);
}
}
}
if (straight) {
setOpacity(this.bends[1].node, this.virtualBendOpacity);
setOpacity(this.bends[3].node, this.virtualBendOpacity);
}
}
}
}
}
export default EdgeSegmentHandler; | the_stack |
import * as parser from "fast-xml-parser";
import * as he from 'he';
import * as cheerio from 'cheerio';
import * as iconv from 'iconv-lite';
import { URL } from "url";
import { isString, isArray, isNumber } from "util";
import { Entry, Summary } from "./content";
import * as crypto from 'crypto';
import { CheerioAPI, Cheerio, Element } from "cheerio";
function isStringified(s: any) {
return isString(s) || isNumber(s);
}
function order(attr: any) {
if (!attr) {
return -2;
}
if (attr.rel === 'alternate') {
return 1;
} else if (!attr.rel) {
return 0;
} else {
return -1;
}
}
function parseLink(link: any) {
if (isArray(link) && link.length > 0) {
link = link.reduce((a, b) => order(a.__attr) > order(b.__attr) ? a : b);
}
let ans;
if (isStringified(link)) {
ans = link;
} else if (isStringified(link.__attr?.href)) {
ans = link.__attr.href;
} else if (isStringified(link.__text)) {
ans = link.__text;
} else if ('__cdata' in link) {
if (isStringified(link.__cdata)) {
ans = link.__cdata;
} else if(isArray(link.__cdata)) {
ans = link.__cdata.join('');
}
}
return ans;
}
function dom2html(name: string, node: any) {
if (isStringified(node)) {
return `<${name}>${node}</${name}>`;
}
let html = '<' + name;
if ('__attr' in node) {
for (const key in node.__attr) {
const value = node.__attr[key];
html += ` ${key}="${value}"`;
}
}
html += '>';
if (isStringified(node.__text)) {
html += node.__text;
}
for (const key in node) {
if (key.startsWith('__')) {continue;}
const value = node[key];
if (isArray(value)) {
for (const item of value) {
html += dom2html(key, item);
}
} else {
html += dom2html(key, value);
}
}
html += `</${name}>`;
return html;
}
function extractText(content: any) {
let ans;
if (isStringified(content)) {
ans = content;
} else if (isStringified(content.__text)) {
ans = content.__text;
} else if ('__cdata' in content) {
if (isStringified(content.__cdata)) {
ans = content.__cdata;
} else if(isArray(content.__cdata)) {
ans = content.__cdata.join('');
}
} else if (content.__attr?.type === 'html') {
// XXX: temporary solution. convert dom object to html string.
ans = dom2html('html', content);
}
return ans;
}
function parseEntry(dom: any, baseURL: string, exclude: Set<string>): Entry | undefined {
let link;
if (dom.link) {
link = parseLink(dom.link);
} else if (dom.source) {
link = dom.source;
}
if (isStringified(link)) {
link = new URL(link, baseURL).href;
} else {
link = undefined;
}
let id;
if (dom.id) {
id = extractText(dom.id);
} else if (dom.guid) {
id = extractText(dom.guid);
} else {
id = link;
}
if (!isStringified(id)) {
throw new Error("Feed Format Error: Entry Missing ID");
}
id = crypto.createHash("sha256").update(baseURL + id).digest('hex');
if (exclude.has(id)) {
return undefined;
}
let title;
if ('title' in dom) {
title = extractText(dom.title);
}
if (!isStringified(title)) {
throw new Error("Feed Format Error: Entry Missing Title");
}
title = he.decode(title);
let content;
if ('content' in dom) {
content = extractText(dom.content);
} else if ("content:encoded" in dom) {
content = extractText(dom["content:encoded"]);
} else if ('description' in dom) {
content = extractText(dom.description);
} else if ('summary' in dom) {
content = extractText(dom.summary);
} else {
content = title;
}
if (!isStringified(content)) {
throw new Error("Feed Format Error: Entry Missing Content");
}
content = he.decode(content);
const $ = cheerio.load(content);
$('a').each((_, ele) => {
const $ele = $(ele);
const href = $ele.attr('href');
if (href) {
try {
$ele.attr('href', new URL(href, baseURL).href);
} catch {}
}
});
$('img').each((_, ele) => {
const $ele = $(ele);
const src = $ele.attr('src');
if (src) {
try {
$ele.attr('src', new URL(src, baseURL).href);
} catch {}
}
$ele.removeAttr('height');
});
$('script').remove();
content = $.html();
let date;
if (dom.published) {
date = dom.published;
} else if (dom.pubDate) {
date = dom.pubDate;
} else if (dom.updated) {
date = dom.updated;
} else if (dom["dc:date"]) {
date = dom["dc:date"];
}
if (!isStringified(date)) {
date = new Date().getTime();
} else {
date = new Date(date).getTime();
}
if (isNaN(date)) {
throw new Error("Feed Format Error: Invalid Date");
}
return new Entry(id, title, content, date, link, false);
}
export function parseXML(xml: string, exclude: Set<string>): [Entry[], Summary] {
const match = xml.match(/<\?xml.*encoding="(\S+)".*\?>/);
xml = iconv.decode(Buffer.from(xml, 'binary'), match ? match[1]: 'utf-8');
const dom = parser.parse(xml, {
attributeNamePrefix: "",
attrNodeName: "__attr",
textNodeName: "__text",
cdataTagName: "__cdata",
cdataPositionChar: "",
ignoreAttributes: false,
parseAttributeValue: true,
});
let feed;
if (dom.rss) {
if (dom.rss.channel) {
feed = dom.rss.channel;
} else if (dom.rss.feed) {
feed = dom.rss.feed;
}
} else if (dom.channel) {
feed = dom.channel;
} else if (dom.feed) {
feed = dom.feed;
} else if (dom["rdf:RDF"]) {
feed = dom["rdf:RDF"];
}
if (!feed) {
throw new Error('Feed Format Error');
}
let title;
if ('title' in feed) {
title = extractText(feed.title);
} else if (feed.channel?.title !== undefined) {
title = extractText(feed.channel.title);
}
if (!isStringified(title)) {
throw new Error('Feed Format Error: Missing Title');
}
title = he.decode(title);
let link: any;
if (feed.link) {
link = parseLink(feed.link);
} else if (feed.channel?.link) {
link = parseLink(feed.channel.link);
}
if (!isStringified(link)) {
throw new Error('Feed Format Error: Missing Link');
}
if (!link.match(/^https?:\/\//)) {
if (link.match(/^\/\//)) {
link = 'http:' + link;
} else {
link = 'http://' + link;
}
}
let items: any;
if (feed.item) {
items = feed.item;
} else if (feed.entry) {
items = feed.entry;
}
if (!items) {
items = [];
} else if (!isArray(items)) {
items = [items];
}
const entries: Entry[] = [];
for (const item of items) {
const entry = parseEntry(item, link, exclude);
if (entry) {
entries.push(entry);
}
}
const summary = new Summary(link, title);
return [entries, summary];
}
function getLink($link: Cheerio<Element>): string {
let target = '';
$link.each((_, ele) => {
const $ele = cheerio.default(ele);
if (!target || $ele.attr('rel') === 'alternate') {
target = $ele.attr('href') || $ele.text();
}
});
return target;
}
function resolveAttr($: CheerioAPI, base: string, selector: string, attr: string) {
$(selector).each((_, ele) => {
const $ele = $(ele);
const url = $ele.attr(attr);
if (url) {
try {
$ele.attr(attr, new URL(url, base).href);
} catch {}
}
});
}
function resolveRelativeLinks(content: string, base: string): string {
const $ = cheerio.load(content);
resolveAttr($, base, 'a', 'href');
resolveAttr($, base, 'img', 'src');
resolveAttr($, base, 'video', 'src');
resolveAttr($, base, 'audio', 'src');
$('script').remove();
return $.html();
}
// https://www.rssboard.org/rss-2-0
function parseRSS($dom: CheerioAPI): [Entry[], Summary] {
const title = $dom('channel > title').text();
const base = getLink($dom('channel > link'));
const summary = new Summary(base, title);
const entries: Entry[] = [];
$dom('channel > item').each((_, ele) => {
const $ele = $dom(ele);
let id = $ele.find('guid').text();
let title = $ele.find('title').text();
let description = $ele.find('description').text();
let content = $ele.find('content').text() || $ele.find('content\\:encoded').text();
let date: string | number = $ele.find('pubDate').text();
let link = getLink($ele.find('link'));
id = id || link;
title = title || description || content;
content = content || description || title;
date = date ? new Date(date).getTime() : new Date().getTime();
if (!id) {
throw new Error('Feed Format Error: Entry Missing ID');
}
id = crypto.createHash("sha256").update(base + id).digest('hex');
content = resolveRelativeLinks(content, base);
entries.push(new Entry(id, title, content, date, link, false));
});
return [entries, summary];
}
// https://validator.w3.org/feed/docs/rss1.html
function parseRDF($dom: CheerioAPI): [Entry[], Summary] {
const title = $dom('channel > title').text();
const base = getLink($dom('channel > link'));
const summary = new Summary(base, title);
const entries: Entry[] = [];
$dom('rdf\\:RDF > item').each((_, ele) => {
const $ele = $dom(ele);
let title = $ele.find('title').text();
let content = $ele.find('description').text();
let date: string | number = $ele.find('dc\\:date').text();
let link = getLink($ele.find('link'));
if (!link) {
throw new Error('Feed Format Error: Entry Missing Link');
}
title = title || content;
content = content || title;
date = date ? new Date(date).getTime() : new Date().getTime();
const id = crypto.createHash("sha256").update(base + link).digest('hex');
content = resolveRelativeLinks(content, base);
entries.push(new Entry(id, title, content, date, link, false));
});
return [entries, summary];
}
// https://tools.ietf.org/html/rfc4287
function parseAtom($dom: CheerioAPI): [Entry[], Summary] {
const title = $dom('feed > title').text();
const base = getLink($dom('feed > link'));
const summary = new Summary(base, title);
const entries: Entry[] = [];
$dom('feed > entry').each((_, ele) => {
const $ele = $dom(ele);
let id = $ele.find('id').text();
let title = $ele.find('title').text();
let summary = $ele.find('summary').text();
let content = $ele.find('content').text();
let date: string | number = $ele.find('published').text();
let link = getLink($ele.find('link'));
id = id || link;
title = title || summary || content;
content = content || summary || title;
date = date ? new Date(date).getTime() : new Date().getTime();
if (!id) {
throw new Error('Feed Format Error: Entry Missing ID');
}
id = crypto.createHash("sha256").update(base + id).digest('hex');
content = resolveRelativeLinks(content, base);
entries.push(new Entry(id, title, content, date, link, false));
});
return [entries, summary];
}
export function parseXML2(xml: string): [Entry[], Summary] {
const match = xml.match(/<\?xml.*encoding="(\S+)".*\?>/);
xml = iconv.decode(Buffer.from(xml, 'binary'), match ? match[1]: 'utf-8');
const $dom = cheerio.load(xml, {xmlMode: true});
const root = $dom.root().children()[0].name;
switch (root) {
case 'rss':
return parseRSS($dom);
case 'rdf:RDF':
return parseRDF($dom);
case 'feed':
return parseAtom($dom);
default:
throw new Error('Unsupported format: ' + root);
}
} | the_stack |
import {
IExecuteFunctions,
} from 'n8n-core';
import {
IDataObject,
INode,
INodeExecutionData,
INodeType,
INodeTypeDescription,
NodeOperationError,
} from 'n8n-workflow';
import {
get,
isEqual,
isObject,
lt,
merge,
pick,
reduce,
set,
unset,
} from 'lodash';
const {
NodeVM,
} = require('vm2');
export class ItemLists implements INodeType {
description: INodeTypeDescription = {
displayName: 'Item Lists',
name: 'itemLists',
icon: 'file:itemLists.svg',
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Helper for working with lists of items and transforming arrays',
defaults: {
name: 'Item Lists',
},
inputs: ['main'],
outputs: ['main'],
credentials: [],
properties: [
{
displayName: 'Resource',
name: 'resource',
type: 'hidden',
options: [
{
name: 'Item List',
value: 'itemList',
},
],
default: 'itemList',
},
{
displayName: 'Operation',
name: 'operation',
type: 'options',
options: [
{
name: 'Split Out Items',
value: 'splitOutItems',
description: 'Turn a list inside item(s) into separate items',
},
{
name: 'Aggregate Items',
value: 'aggregateItems',
description: 'Merge fields into a single new item',
},
{
name: 'Remove Duplicates',
value: 'removeDuplicates',
description: 'Remove extra items that are similar',
},
{
name: 'Sort',
value: 'sort',
description: 'Change the item order',
},
{
name: 'Limit',
value: 'limit',
description: 'Remove items if there are too many',
},
],
default: 'splitOutItems',
},
// Split out items - Fields
{
displayName: 'Field To Split Out',
name: 'fieldToSplitOut',
type: 'string',
default: '',
required: true,
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'splitOutItems',
],
},
},
description: 'The name of the input field to break out into separate items',
},
{
displayName: 'Include',
name: 'include',
type: 'options',
options: [
{
name: 'No Other Fields',
value: 'noOtherFields',
},
{
name: 'All Other Fields',
value: 'allOtherFields',
},
{
name: 'Selected Other Fields',
value: 'selectedOtherFields',
},
],
default: 'noOtherFields',
description: 'Whether to copy any other fields into the new items',
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'splitOutItems',
],
},
},
},
{
displayName: 'Fields To Include',
name: 'fieldsToInclude',
type: 'fixedCollection',
typeOptions: {
multipleValues: true,
},
placeholder: 'Add Field To Include',
default: {},
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'splitOutItems',
],
include: [
'selectedOtherFields',
],
},
},
options: [
{
displayName: '',
name: 'fields',
values: [
{
displayName: 'Field Name',
name: 'fieldName',
type: 'string',
default: '',
description: 'A field in the input items to aggregate together',
},
],
},
],
},
{
displayName: 'Fields To Aggregate',
name: 'fieldsToAggregate',
type: 'fixedCollection',
typeOptions: {
multipleValues: true,
},
placeholder: 'Add Field To Aggregate',
default: {},
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'aggregateItems',
],
},
},
options: [
{
displayName: '',
name: 'fieldToAggregate',
values: [
{
displayName: 'Input Field Name',
name: 'fieldToAggregate',
type: 'string',
default: '',
description: 'The name of a field in the input items to aggregate together',
},
{
displayName: 'Rename Field',
name: 'renameField',
type: 'boolean',
default: false,
description: 'Whether to give the field a different name in the output',
},
{
displayName: 'Output Field Name',
name: 'outputFieldName',
displayOptions: {
show: {
renameField: [
true,
],
},
},
type: 'string',
default: '',
description: 'The name of the field to put the aggregated data in. Leave blank to use the input field name',
},
],
},
],
},
// Remove duplicates - Fields
{
displayName: 'Compare',
name: 'compare',
type: 'options',
options: [
{
name: 'All Fields',
value: 'allFields',
},
{
name: 'All Fields Except',
value: 'allFieldsExcept',
},
{
name: 'Selected Fields',
value: 'selectedFields',
},
],
default: 'allFields',
description: 'The fields of the input items to compare to see if they are the same',
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'removeDuplicates',
],
},
},
},
{
displayName: 'Fields To Exclude',
name: 'fieldsToExclude',
type: 'fixedCollection',
typeOptions: {
multipleValues: true,
},
placeholder: 'Add Field To Exclude',
default: {},
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'removeDuplicates',
],
compare: [
'allFieldsExcept',
],
},
},
options: [
{
displayName: '',
name: 'fields',
values: [
{
displayName: 'Field Name',
name: 'fieldName',
type: 'string',
default: '',
description: 'A field in the input to exclude from the comparison',
},
],
},
],
},
{
displayName: 'Fields To Compare',
name: 'fieldsToCompare',
type: 'fixedCollection',
typeOptions: {
multipleValues: true,
},
placeholder: 'Add Field To Exclude',
default: {},
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'removeDuplicates',
],
compare: [
'selectedFields',
],
},
},
options: [
{
displayName: '',
name: 'fields',
values: [
{
displayName: 'Field Name',
name: 'fieldName',
type: 'string',
default: '',
description: 'A field in the input to add to the comparison',
},
],
},
],
},
// Sort - Fields
{
displayName: 'Type',
name: 'type',
type: 'options',
options: [
{
name: 'Simple',
value: 'simple',
},
{
name: 'Random',
value: 'random',
},
{
name: 'Code',
value: 'code',
},
],
default: 'simple',
description: 'The fields of the input items to compare to see if they are the same',
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'sort',
],
},
},
},
{
displayName: 'Fields To Sort By',
name: 'sortFieldsUi',
type: 'fixedCollection',
typeOptions: {
multipleValues: true,
},
placeholder: 'Add Field To Sort By',
options: [
{
displayName: '',
name: 'sortField',
values: [
{
displayName: 'Field Name',
name: 'fieldName',
type: 'string',
required: true,
default: '',
description: 'The field to sort by',
},
{
displayName: 'Order',
name: 'order',
type: 'options',
options: [
{
name: 'Ascending',
value: 'ascending',
},
{
name: 'Descending',
value: 'descending',
},
],
default: 'ascending',
description: 'The order to sort by',
},
],
},
],
default: {},
description: 'The fields of the input items to compare to see if they are the same',
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'sort',
],
type: [
'simple',
],
},
},
},
{
displayName: 'Code',
name: 'code',
type: 'string',
typeOptions: {
alwaysOpenEditWindow: true,
editor: 'code',
rows: 10,
},
default: `// The two items to compare are in the variables a and b
// Access the fields in a.json and b.json
// Return -1 if a should go before b
// Return 1 if b should go before a
// Return 0 if there's no difference
fieldName = 'myField';
if (a.json[fieldName] < b.json[fieldName]) {
return -1;
}
if (a.json[fieldName] > b.json[fieldName]) {
return 1;
}
return 0;`,
description: 'Javascript code to determine the order of any two items',
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'sort',
],
type: [
'code',
],
},
},
},
// Limit - Fields
{
displayName: 'Max Items',
name: 'maxItems',
type: 'number',
typeOptions: {
minValue: 1,
},
default: 1,
description: 'If there are more items than this number, some are removed',
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'limit',
],
},
},
},
{
displayName: 'Keep',
name: 'keep',
type: 'options',
options: [
{
name: 'First Items',
value: 'firstItems',
},
{
name: 'Last Items',
value: 'lastItems',
},
],
default: 'firstItems',
description: 'When removing items, whether to keep the ones at the start or the ending',
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'limit',
],
},
},
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
placeholder: 'Add Field',
default: {},
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'removeDuplicates',
],
compare: [
'allFieldsExcept',
'selectedFields',
],
},
},
options: [
{
displayName: 'Remove Other Fields',
name: 'removeOtherFields',
type: 'boolean',
default: false,
description: 'Whether to remove any fields that are not being compared. If disabled, will keep the values from the first of the duplicates',
},
{
displayName: 'Disable Dot Notation',
name: 'disableDotNotation',
type: 'boolean',
default: false,
description: 'Whether to disallow referencing child fields using `parent.child` in the field name',
},
],
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
placeholder: 'Add Field',
default: {},
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'sort',
],
type: [
'simple',
],
},
},
options: [
{
displayName: 'Disable Dot Notation',
name: 'disableDotNotation',
type: 'boolean',
default: false,
description: 'Whether to disallow referencing child fields using `parent.child` in the field name',
},
],
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
placeholder: 'Add Field',
default: {},
displayOptions: {
show: {
resource: [
'itemList',
],
operation: [
'splitOutItems',
'aggregateItems',
],
},
},
options: [
{
displayName: 'Disable Dot Notation',
name: 'disableDotNotation',
type: 'boolean',
displayOptions: {
show: {
'/operation': [
'splitOutItems',
'aggregateItems',
],
},
},
default: false,
description: 'Whether to disallow referencing child fields using `parent.child` in the field name',
},
{
displayName: 'Destination Field Name',
name: 'destinationFieldName',
type: 'string',
displayOptions: {
show: {
'/operation': [
'splitOutItems',
],
},
},
default: '',
description: 'The field in the output under which to put the split field contents',
},
{
displayName: 'Merge Lists',
name: 'mergeLists',
type: 'boolean',
displayOptions: {
show: {
'/operation': [
'aggregateItems',
],
},
},
default: false,
description: 'If the field to aggregate is a list, whether to merge the output into a single flat list (rather than a list of lists)',
},
{
displayName: 'Keep Missing And Null Values',
name: 'keepMissing',
type: 'boolean',
displayOptions: {
show: {
'/operation': [
'aggregateItems',
],
},
},
default: false,
description: 'Whether to add a null entry to the aggregated list when there is a missing or null value',
},
],
},
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const length = (items.length as unknown) as number;
const returnData: INodeExecutionData[] = [];
const resource = this.getNodeParameter('resource', 0) as string;
const operation = this.getNodeParameter('operation', 0) as string;
if (resource === 'itemList') {
if (operation === 'splitOutItems') {
for (let i = 0; i < length; i++) {
const fieldToSplitOut = this.getNodeParameter('fieldToSplitOut', i) as string;
const disableDotNotation = this.getNodeParameter('options.disableDotNotation', 0, false) as boolean;
const destinationFieldName = this.getNodeParameter('options.destinationFieldName', i, '') as string;
const include = this.getNodeParameter('include', i) as string;
let arrayToSplit;
if (disableDotNotation === false) {
arrayToSplit = get(items[i].json, fieldToSplitOut);
} else {
arrayToSplit = items[i].json[fieldToSplitOut as string];
}
if (arrayToSplit === undefined) {
if (fieldToSplitOut.includes('.') && disableDotNotation === true) {
throw new NodeOperationError(this.getNode(), `Couldn't find the field '${fieldToSplitOut}' in the input data`, { description: `If you're trying to use a nested field, make sure you turn off 'disable dot notation' in the node options` });
} else {
throw new NodeOperationError(this.getNode(), `Couldn't find the field '${fieldToSplitOut}' in the input data`);
}
}
if (!Array.isArray(arrayToSplit)) {
throw new NodeOperationError(this.getNode(), `The provided field '${fieldToSplitOut}' is not an array`);
} else {
for (const element of arrayToSplit) {
let newItem = {};
if (include === 'selectedOtherFields') {
const fieldsToInclude = (this.getNodeParameter('fieldsToInclude.fields', i, []) as [{ fieldName: string }]).map(field => field.fieldName);
if (!fieldsToInclude.length) {
throw new NodeOperationError(this.getNode(), 'No fields specified', { description: 'Please add a field to include' });
}
newItem = {
...fieldsToInclude.reduce((prev, field) => {
if (field === fieldToSplitOut) {
return prev;
}
let value;
if (disableDotNotation === false) {
value = get(items[i].json, field);
} else {
value = items[i].json[field as string];
}
prev = { ...prev, [field as string]: value, };
return prev;
}, {}),
};
} else if (include === 'allOtherFields') {
const keys = Object.keys(items[i].json);
newItem = {
...keys.reduce((prev, field) => {
let value;
if (disableDotNotation === false) {
value = get(items[i].json, field);
} else {
value = items[i].json[field as string];
}
prev = { ...prev, [field as string]: value, };
return prev;
}, {}),
};
unset(newItem, fieldToSplitOut);
}
if (typeof element === 'object' && include === 'noOtherFields' && destinationFieldName === '') {
newItem = { ...newItem, ...element };
} else {
newItem = { ...newItem, [destinationFieldName as string || fieldToSplitOut as string]: element };
}
returnData.push({ json: newItem });
}
}
}
return this.prepareOutputData(returnData);
} else if (operation === 'aggregateItems') {
const disableDotNotation = this.getNodeParameter('options.disableDotNotation', 0, false) as boolean;
const mergeLists = this.getNodeParameter('options.mergeLists', 0, false) as boolean;
const fieldsToAggregate = this.getNodeParameter('fieldsToAggregate.fieldToAggregate', 0, []) as [{ fieldToAggregate: string, renameField: boolean, outputFieldName: string }];
const keepMissing = this.getNodeParameter('options.keepMissing', 0, false) as boolean;
if (!fieldsToAggregate.length) {
throw new NodeOperationError(this.getNode(), 'No fields specified', { description: 'Please add a field to aggregate' });
}
for (const { fieldToAggregate } of fieldsToAggregate) {
let found = false;
for (const item of items) {
if (fieldToAggregate === '') {
throw new NodeOperationError(this.getNode(), 'Field to aggregate is blank', { description: 'Please add a field to aggregate' });
}
if (disableDotNotation === false) {
if (get(item.json, fieldToAggregate) !== undefined) {
found = true;
}
} else if (item.json.hasOwnProperty(fieldToAggregate)) {
found = true;
}
}
if (found === false && disableDotNotation && fieldToAggregate.includes('.')) {
throw new NodeOperationError(this.getNode(), `Couldn't find the field '${fieldToAggregate}' in the input data`, { description: `If you're trying to use a nested field, make sure you turn off 'disable dot notation' in the node options` });
} else if (found === false && keepMissing === false) {
throw new NodeOperationError(this.getNode(), `Couldn't find the field '${fieldToAggregate}' in the input data`);
}
}
let newItem: INodeExecutionData;
newItem = { json: {} };
// tslint:disable-next-line: no-any
const values: { [key: string]: any } = {};
const outputFields: string[] = [];
for (const { fieldToAggregate, outputFieldName, renameField } of fieldsToAggregate) {
const field = (renameField) ? outputFieldName : fieldToAggregate;
if (outputFields.includes(field)) {
throw new NodeOperationError(this.getNode(), `The '${field}' output field is used more than once`, { description: `Please make sure each output field name is unique` });
} else {
outputFields.push(field);
}
const getFieldToAggregate = () => ((disableDotNotation === false && fieldToAggregate.includes('.')) ? fieldToAggregate.split('.').pop() : fieldToAggregate);
const _outputFieldName = (outputFieldName) ? (outputFieldName) : getFieldToAggregate() as string;
if (fieldToAggregate !== '') {
values[_outputFieldName] = [];
for (let i = 0; i < length; i++) {
if (disableDotNotation === false) {
let value = get(items[i].json, fieldToAggregate);
if (!keepMissing) {
if (Array.isArray(value)) {
value = value.filter(value => value !== null);
} else if (value === null || value === undefined) {
continue;
}
}
if (Array.isArray(value) && mergeLists) {
values[_outputFieldName].push(...value);
} else {
values[_outputFieldName].push(value);
}
} else {
let value = items[i].json[fieldToAggregate];
if (!keepMissing) {
if (Array.isArray(value)) {
value = value.filter(value => value !== null);
} else if (value === null || value === undefined) {
continue;
}
}
if (Array.isArray(value) && mergeLists) {
values[_outputFieldName].push(...value);
} else {
values[_outputFieldName].push(value);
}
}
}
}
}
for (const key of Object.keys(values)) {
if (disableDotNotation === false) {
set(newItem.json, key, values[key]);
} else {
newItem.json[key] = values[key];
}
}
returnData.push(newItem);
return this.prepareOutputData(returnData);
} else if (operation === 'removeDuplicates') {
const compare = this.getNodeParameter('compare', 0) as string;
const disableDotNotation = this.getNodeParameter('options.disableDotNotation', 0, false) as boolean;
const removeOtherFields = this.getNodeParameter('options.removeOtherFields', 0, false) as boolean;
let keys = (disableDotNotation) ? Object.keys(items[0].json) : Object.keys(flattenKeys(items[0].json));
for (const item of items) {
for (const key of (disableDotNotation) ? Object.keys(item.json) : Object.keys(flattenKeys(item.json))) {
if (!keys.includes(key)) {
keys.push(key);
}
}
}
if (compare === 'allFieldsExcept') {
const fieldsToExclude = (this.getNodeParameter('fieldsToExclude.fields', 0, []) as [{ fieldName: string }]).map(field => field.fieldName);
if (!fieldsToExclude.length) {
throw new NodeOperationError(this.getNode(), 'No fields specified. Please add a field to exclude from comparison');
}
if (disableDotNotation === false) {
keys = Object.keys(flattenKeys(items[0].json));
}
keys = keys.filter(key => !fieldsToExclude.includes(key));
} if (compare === 'selectedFields') {
const fieldsToCompare = (this.getNodeParameter('fieldsToCompare.fields', 0, []) as [{ fieldName: string }]).map(field => field.fieldName);
if (!fieldsToCompare.length) {
throw new NodeOperationError(this.getNode(), 'No fields specified. Please add a field to compare on');
}
if (disableDotNotation === false) {
keys = Object.keys(flattenKeys(items[0].json));
}
keys = fieldsToCompare.map(key => (key.trim()));
}
// This solution is O(nlogn)
// add original index to the items
const newItems = items.map((item, index) => ({ json: { ...item['json'], __INDEX: index, }, } as INodeExecutionData));
//sort items using the compare keys
newItems.sort((a, b) => {
let result = 0;
for (const key of keys) {
let equal;
if (disableDotNotation === false) {
equal = isEqual(get(a.json, key), get(b.json, key));
} else {
equal = isEqual(a.json[key], b.json[key]);
}
if (!equal) {
let lessThan;
if (disableDotNotation === false) {
lessThan = lt(get(a.json, key), get(b.json, key));
} else {
lessThan = lt(a.json[key], b.json[key]);
}
result = lessThan ? -1 : 1;
break;
}
}
return result;
});
for (const key of keys) {
// tslint:disable-next-line: no-any
let type: any = undefined;
for (const item of newItems) {
if (key === '') {
throw new NodeOperationError(this.getNode(), `Name of field to compare is blank`);
}
const value = ((!disableDotNotation) ? get(item.json, key) : item.json[key]);
if (value === undefined && disableDotNotation && key.includes('.')) {
throw new NodeOperationError(this.getNode(), `'${key}' field is missing from some input items`, { description: `If you're trying to use a nested field, make sure you turn off 'disable dot notation' in the node options` });
} else if (value === undefined) {
throw new NodeOperationError(this.getNode(), `'${key}' field is missing from some input items`);
}
if (type !== undefined && value !== undefined && type !== typeof value) {
throw new NodeOperationError(this.getNode(), `'${key}' isn't always the same type`, { description: 'The type of this field varies between items' });
} else {
type = typeof value;
}
}
}
// collect the original indexes of items to be removed
const removedIndexes: number[] = [];
let temp = newItems[0];
for (let index = 1; index < newItems.length; index++) {
if (compareItems(newItems[index], temp, keys, disableDotNotation, this.getNode())) {
removedIndexes.push(newItems[index].json.__INDEX as unknown as number);
} else {
temp = newItems[index];
}
}
let data = items.filter((_, index) => !removedIndexes.includes(index));
if (removeOtherFields) {
data = data.map(item => ({ json: pick(item.json, ...keys) }));
}
// return the filtered items
return this.prepareOutputData(data);
} else if (operation === 'sort') {
let newItems = [...items];
const type = this.getNodeParameter('type', 0) as string;
const disableDotNotation = this.getNodeParameter('options.disableDotNotation', 0, false) as boolean;
if (type === 'random') {
shuffleArray(newItems);
return this.prepareOutputData(newItems);
}
if (type === 'simple') {
const sortFieldsUi = this.getNodeParameter('sortFieldsUi', 0) as IDataObject;
const sortFields = sortFieldsUi.sortField as Array<{
fieldName: string;
order: 'ascending' | 'descending'
}>;
if (!sortFields || !sortFields.length) {
throw new NodeOperationError(this.getNode(), 'No sorting specified. Please add a field to sort by');
}
for (const { fieldName } of sortFields) {
let found = false;
for (const item of items) {
if (disableDotNotation === false) {
if (get(item.json, fieldName) !== undefined) {
found = true;
}
} else if (item.json.hasOwnProperty(fieldName)) {
found = true;
}
}
if (found === false && disableDotNotation && fieldName.includes('.')) {
throw new NodeOperationError(this.getNode(), `Couldn't find the field '${fieldName}' in the input data`, { description: `If you're trying to use a nested field, make sure you turn off 'disable dot notation' in the node options` });
} else if (found === false) {
throw new NodeOperationError(this.getNode(), `Couldn't find the field '${fieldName}' in the input data`);
}
}
const sortFieldsWithDirection = sortFields.map(field => ({ name: field.fieldName, dir: field.order === 'ascending' ? 1 : -1 }));
newItems.sort((a, b) => {
let result = 0;
for (const field of sortFieldsWithDirection) {
let equal;
if (disableDotNotation === false) {
const _a = (typeof get(a.json, field.name) === 'string') ? (get(a.json, field.name) as string).toLowerCase() : get(a.json, field.name);
const _b = (typeof get(b.json, field.name) === 'string') ? (get(b.json, field.name) as string).toLowerCase() : get(b.json, field.name);
equal = isEqual(_a, _b);
} else {
const _a = (typeof a.json[field.name as string] === 'string') ? (a.json[field.name as string] as string).toLowerCase() : a.json[field.name as string];
const _b = (typeof b.json[field.name as string] === 'string') ? (b.json[field.name as string] as string).toLowerCase() : b.json[field.name as string];
equal = isEqual(_a, _b);
}
if (!equal) {
let lessThan;
if (disableDotNotation === false) {
const _a = (typeof get(a.json, field.name) === 'string') ? (get(a.json, field.name) as string).toLowerCase() : get(a.json, field.name);
const _b = (typeof get(b.json, field.name) === 'string') ? (get(b.json, field.name) as string).toLowerCase() : get(b.json, field.name);
lessThan = lt(_a, _b);
} else {
const _a = (typeof a.json[field.name as string] === 'string') ? (a.json[field.name as string] as string).toLowerCase() : a.json[field.name as string];
const _b = (typeof b.json[field.name as string] === 'string') ? (b.json[field.name as string] as string).toLowerCase() : b.json[field.name as string];
lessThan = lt(_a, _b);
}
if (lessThan) {
result = -1 * field.dir;
} else {
result = 1 * field.dir;
}
break;
}
}
return result;
});
} else {
const code = this.getNodeParameter('code', 0) as string;
const regexCheck = /\breturn\b/g.exec(code);
if (regexCheck && regexCheck.length) {
const sandbox = {
newItems,
};
const mode = this.getMode();
const options = {
console: (mode === 'manual') ? 'redirect' : 'inherit',
sandbox,
};
const vm = new NodeVM(options);
newItems = (await vm.run(`
module.exports = async function() {
newItems.sort( (a,b) => {
${code}
})
return newItems;
}()`, __dirname));
} else {
throw new NodeOperationError(this.getNode(), `Sort code doesn't return. Please add a 'return' statement to your code`);
}
}
return this.prepareOutputData(newItems);
} else if (operation === 'limit') {
let newItems = items;
const maxItems = this.getNodeParameter('maxItems', 0) as number;
const keep = this.getNodeParameter('keep', 0) as string;
if (maxItems > items.length) {
return this.prepareOutputData(newItems);
}
if (keep === 'firstItems') {
newItems = items.slice(0, maxItems);
} else {
newItems = items.slice(items.length - maxItems, items.length);
}
return this.prepareOutputData(newItems);
} else {
throw new NodeOperationError(this.getNode(), `Operation '${operation}' is not recognized`);
}
} else {
throw new NodeOperationError(this.getNode(), `Resource '${resource}' is not recognized`);
}
}
}
const compareItems = (obj: INodeExecutionData, obj2: INodeExecutionData, keys: string[], disableDotNotation: boolean, node: INode) => {
let result = true;
for (const key of keys) {
if (disableDotNotation === false) {
if (!isEqual(get(obj.json, key), get(obj2.json, key))) {
result = false;
break;
}
} else {
if (!isEqual(obj.json[key as string], obj2.json[key as string])) {
result = false;
break;
}
}
}
return result;
};
const flattenKeys = (obj: {}, path: string[] = []): {} => {
return !isObject(obj)
? { [path.join('.')]: obj }
: reduce(obj, (cum, next, key) => merge(cum, flattenKeys(next, [...path, key])), {});
};
// tslint:disable-next-line: no-any
const shuffleArray = (array: any[]) => {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
}; | the_stack |
import { GeoCoordinates, mercatorProjection, sphereProjection } from "@here/harp-geoutils";
import { MapView, MapViewEventNames, MapViewOptions, MapViewUtils } from "@here/harp-mapview";
import * as TestUtils from "@here/harp-test-utils/lib/WebGLStub";
import * as chai from "chai";
import * as chaiAsPromised from "chai-as-promised";
import * as sinon from "sinon";
import * as THREE from "three";
chai.use(chaiAsPromised);
// Needed for using expect(...).true for example
const { expect } = chai;
import { MapControls } from "../lib/MapControls";
declare const global: any;
describe("MapControls", function () {
const DEFAULT_CANVAS_WIDTH = 800;
const DEFAULT_CANVAS_HEIGHT = 600;
let sandbox: sinon.SinonSandbox;
let domElement: any;
const inNodeContext = typeof window === "undefined";
let canvas: HTMLCanvasElement;
let mapViewOptions: MapViewOptions;
let mapView: MapView;
let mapControls: MapControls;
let camera: THREE.Camera;
let updateStub: sinon.SinonStub<any>;
let lookAtStub: sinon.SinonStub<any>;
let orbitAroundScreenPointSpy: sinon.SinonSpy<any>;
const eventMap: Map<string, EventListener> = new Map();
function wheel(delta: number) {
const mouseWheelHandler = eventMap.get("wheel")!;
mouseWheelHandler({
offsetX: 0,
offsetY: 0,
delta,
preventDefault: () => {
/*noop*/
},
stopPropagation: () => {
/*noop*/
}
} as any);
}
function dblClick() {
const mouseDblClickHandler = eventMap.get("dblclick")!;
mouseDblClickHandler({ clientX: 0, clientY: 0 } as any);
}
function dblTap() {
const touchStartHandler = eventMap.get("touchstart")!;
const touchEndHandler = eventMap.get("touchend")!;
const fakeTouchEvent = {
touches: [],
preventDefault: () => {
/*noop*/
},
stopPropagation: () => {
/*noop*/
}
} as any;
touchStartHandler(fakeTouchEvent);
touchEndHandler(fakeTouchEvent);
}
function mouseMove(button: number, x: number, y: number) {
eventMap.get("mousedown")!({
clientX: 0,
clientY: 0,
button,
preventDefault: () => {
/*noop*/
},
stopPropagation: () => {
/*noop*/
}
} as any);
if (inNodeContext) {
const moveHandler = eventMap.get("mousemove");
// If interaction is disabled, move handler may not even be installed.
if (!moveHandler) {
return;
}
moveHandler({
clientX: x,
clientY: y,
preventDefault: () => {
/*noop*/
},
stopPropagation: () => {
/*noop*/
}
} as any);
eventMap.get("mouseup")!({
clientX: x,
clientY: y,
button,
preventDefault: () => {
/*noop*/
},
stopPropagation: () => {
/*noop*/
}
} as any);
} else {
window.dispatchEvent(new MouseEvent("mousemove", { clientX: x, clientY: y }));
window.dispatchEvent(new MouseEvent("mouseup", { clientX: x, clientY: y, button }));
}
}
function touchMove(touchCount: number, x: number, y: number) {
const initTouches = new Array();
initTouches.length = touchCount;
initTouches.fill({ clientX: 0, clientY: 0 });
const endTouches = new Array();
endTouches.length = touchCount;
endTouches.fill({ clientX: x, clientY: y });
eventMap.get("touchstart")!({
touches: initTouches,
preventDefault: () => {
/*noop*/
},
stopPropagation: () => {
/*noop*/
}
} as any);
eventMap.get("touchmove")!({
touches: endTouches,
preventDefault: () => {
/*noop*/
},
stopPropagation: () => {
/*noop*/
}
} as any);
eventMap.get("touchend")!({
touches: endTouches,
preventDefault: () => {
/*noop*/
},
stopPropagation: () => {
/*noop*/
}
} as any);
}
before(function () {
if (inNodeContext) {
const theGlobal: any = global;
theGlobal.requestAnimationFrame = (callback: (time: DOMHighResTimeStamp) => void) => {
setTimeout(callback, 0, 1);
};
let time = 0;
theGlobal.performance = {
now: () => {
// Time in ms, i.e. 20ms gives us a FPS of 50.
return (time += 20);
}
};
(global as any).window = {
addEventListener: (eventName: string, func: EventListener) => {
eventMap.set(eventName, func);
},
removeEventListener: () => {
/* noop */
}
};
}
});
beforeEach(function () {
sandbox = sinon.createSandbox();
domElement = {
addEventListener: (eventName: string, func: EventListener) => {
eventMap.set(eventName, func);
},
removeEventListener: (eventName: string, func: EventListener) => {
eventMap.delete(eventName);
},
getBoundingClientRect: sandbox.stub().callsFake(() => {
return {
left: 0,
top: 0,
width: DEFAULT_CANVAS_WIDTH,
height: DEFAULT_CANVAS_HEIGHT
};
}),
style: { width: `${DEFAULT_CANVAS_WIDTH}`, height: `${DEFAULT_CANVAS_HEIGHT}` },
clientWidth: DEFAULT_CANVAS_WIDTH,
clientHeight: DEFAULT_CANVAS_HEIGHT
} as any;
mapView = sandbox.createStubInstance(MapView) as any;
sandbox.stub(mapView, "renderer").get(() => ({ domElement }));
updateStub = mapView.update as any;
lookAtStub = mapView.lookAt as any;
orbitAroundScreenPointSpy = sandbox.spy(MapViewUtils, "orbitAroundScreenPoint");
sandbox.stub(mapView, "projection").get(() => {
return mercatorProjection;
});
sandbox.stub(mapView, "target").get(() => {
return GeoCoordinates.fromDegrees(0, 0);
});
sandbox.stub(mapView, "tilt").get(() => {
return 0;
});
mapView.minZoomLevel = 0;
mapView.maxZoomLevel = 20;
camera = new THREE.PerspectiveCamera(40);
sandbox.stub(mapView, "camera").get(() => camera);
updateStub.resetHistory();
});
afterEach(function () {
sandbox.restore();
eventMap.clear();
});
after(function () {
if (inNodeContext) {
delete global.requestAnimationFrame;
delete global.performance;
delete global.window;
}
});
describe("on object creation", function () {
let maxZoom: number;
let minZoom: number;
let minCameraHeight: number;
beforeEach(function () {
maxZoom = 10;
minZoom = 5;
minCameraHeight = 100;
sandbox.stub(mapView, "maxZoomLevel").get(() => maxZoom);
sandbox.stub(mapView, "minZoomLevel").get(() => minZoom);
sandbox.stub(mapView, "minCameraHeight").get(() => minCameraHeight);
sandbox.stub(mapView, "projection").get(() => mercatorProjection);
mapControls = new MapControls(mapView);
});
it("initializes camera property using value from constructor param", function () {
expect(mapControls.camera).to.be.equals(camera);
});
it("initializes domElement property using value from constructor param", function () {
expect(mapControls.domElement).to.be.equals(domElement);
});
it("initializes minZoomLevel property using value from constructor param", function () {
expect(mapControls.minZoomLevel).to.be.equals(minZoom);
});
it("initializes maxZoomLevel property using value from constructor param", function () {
expect(mapControls.maxZoomLevel).to.be.equals(maxZoom);
});
it("initializes minCameraHeight property using value from constructor param", function () {
expect(mapControls.minCameraHeight).to.be.equals(minCameraHeight);
});
});
it("correctly updates mapView on mouse move", function () {
const controls = new MapControls(mapView);
sandbox.stub(controls, "dispatchEvent");
sandbox.stub(controls as any, "getPointerPosition").returns({ x: 0, y: 0 });
expect(updateStub.callCount).to.be.equal(0);
(controls as any).mouseMove({
preventDefault: sandbox.stub(),
stopPropagation: sandbox.stub()
});
expect(updateStub.callCount).to.be.equal(1);
});
it("correctly updates mapView on touch move", function () {
const controls = new MapControls(mapView);
(controls as any).m_touchState.touches = { length: 5 };
sandbox.stub(controls as any, "updateTouches");
sandbox.stub(controls, "dispatchEvent");
sandbox.stub(controls as any, "getPointerPosition").returns({ x: 0, y: 0 });
expect(updateStub.callCount).to.be.equal(0);
(controls as any).touchMove({
touches: [],
preventDefault: sandbox.stub(),
stopPropagation: sandbox.stub()
});
expect(updateStub.callCount).to.be.equal(1);
});
it("dispose", function () {
const controls = new MapControls(mapView);
controls.dispose();
expect(controls.eventTypes.length).to.be.equal(0, `events not removed.`);
});
describe("zoomOnTargetPosition", function () {
function resetCamera(pitch: number, zoomLevel?: number) {
const target = GeoCoordinates.fromDegrees(0, 0);
const heading = 0;
const distance = zoomLevel
? MapViewUtils.calculateDistanceFromZoomLevel(mapView, zoomLevel)
: 1e6;
MapViewUtils.getCameraRotationAtTarget(
mapView.projection,
target,
-heading,
pitch,
camera.quaternion
);
MapViewUtils.getCameraPositionFromTargetCoordinates(
target,
distance,
-heading,
pitch,
mapView.projection,
camera.position
);
camera.updateMatrixWorld(true);
}
function computeZoomLevel() {
const distance = MapViewUtils.getTargetAndDistance(mapView.projection, camera).distance;
return MapViewUtils.calculateZoomLevelFromDistance(mapView, distance);
}
for (const { projName, projection } of [
{ projName: "mercator", projection: mercatorProjection },
{ projName: "sphere", projection: sphereProjection }
]) {
describe(`${projName} projection`, function () {
beforeEach(function () {
const worldTarget = projection.projectPoint(
GeoCoordinates.fromDegrees(0, 0),
new THREE.Vector3()
);
sandbox.stub(mapView, "projection").get(() => projection);
sandbox.stub(mapView, "focalLength").get(() => 2000);
sandbox.stub(mapView, "minZoomLevel").get(() => 1);
sandbox.stub(mapView, "maxZoomLevel").get(() => 20);
sandbox.stub(mapView, "worldTarget").get(() => {
return worldTarget;
});
mapControls = new MapControls(mapView);
});
for (const pitch of [0, 45]) {
it(`camera is moved along view direction (pitch ${pitch})`, function () {
resetCamera(pitch);
mapControls.zoomOnTargetPosition(0, 0, 10);
const initWorldDir = mapView.worldTarget
.clone()
.sub(camera.position)
.normalize();
mapControls.zoomOnTargetPosition(0, 0, 11);
const endWorldDir = mapView.worldTarget
.clone()
.sub(camera.position)
.normalize();
expect(initWorldDir.dot(endWorldDir)).closeTo(1, 1e-5);
});
it(`camera target is recomputed (pitch ${pitch})`, function () {
resetCamera(pitch, 5);
mapControls.maxTiltAngle = 90;
mapControls.zoomOnTargetPosition(0, 0.1, 6);
const oldTarget = MapViewUtils.getTargetAndDistance(projection, camera)
.target;
const expAzimuth = MapViewUtils.extractSphericalCoordinatesFromLocation(
mapView,
camera,
projection.unprojectPoint(oldTarget)
).azimuth;
mapControls.zoomOnTargetPosition(0, 0.2, 7);
const newTarget = MapViewUtils.getTargetAndDistance(projection, camera)
.target;
const actualAzimuth = MapViewUtils.extractSphericalCoordinatesFromLocation(
mapView,
camera,
projection.unprojectPoint(newTarget)
).azimuth;
expect(actualAzimuth).to.be.closeTo(expAzimuth, 1e-5);
});
it(`zoom target stays at the same screen coords (pitch ${pitch})`, function () {
resetCamera(pitch);
const initZoomTarget = MapViewUtils.rayCastWorldCoordinates(
mapView,
0.5,
0.5
);
mapControls.zoomOnTargetPosition(0.5, 0.5, 10);
const endZoomTarget = MapViewUtils.rayCastWorldCoordinates(
mapView,
0.5,
0.5
);
expect(initZoomTarget).to.not.equal(undefined);
expect(endZoomTarget).to.not.equal(undefined);
expect(initZoomTarget!.distanceTo(endZoomTarget!)).to.be.closeTo(0, 1);
});
it(`zl is applied even if target is not valid (pitch ${pitch})`, function () {
const eps = 1e-5;
resetCamera(pitch, 3);
mapControls.maxTiltAngle = 90;
{
const expectedZl = 2;
mapControls.zoomOnTargetPosition(1, 1, expectedZl);
const actualZl = computeZoomLevel();
expect(actualZl).closeTo(expectedZl, eps);
}
resetCamera(pitch, 3);
{
const expectedZl = 4;
mapControls.zoomOnTargetPosition(1, 1, expectedZl);
const actualZl = computeZoomLevel();
expect(actualZl).closeTo(expectedZl, eps);
}
});
}
});
}
});
describe("enable/disable interactions", function () {
const initialZoomLevel = 15;
beforeEach(function () {
const cameraPosition = new THREE.Vector3(0, 0, 10);
camera.position.set(0, 0, 10);
camera.lookAt(new THREE.Vector3(0, 0, 0));
camera.updateMatrixWorld(true);
(camera as THREE.PerspectiveCamera).far = cameraPosition.length();
(camera as THREE.PerspectiveCamera).updateProjectionMatrix();
mapControls = new MapControls(mapView);
mapControls.inertiaEnabled = false;
sandbox.stub(mapView, "zoomLevel").get(() => {
return initialZoomLevel;
});
const worldTarget = mapView.projection.projectPoint(
GeoCoordinates.fromDegrees(0, 0),
new THREE.Vector3()
);
sandbox.stub(mapView, "worldTarget").get(() => {
return worldTarget;
});
sandbox.stub(mapView, "focalLength").get(() => {
return 100;
});
// needed to get the initial zoom level from MapView.
mapControls["assignZoomAfterTouchZoomRender"]();
expect(mapControls.zoomLevelTargeted).to.equal(initialZoomLevel);
});
for (const { enabled, allEnabled, suffix } of [
{ enabled: true, allEnabled: true, suffix: "enabled" },
{ enabled: false, allEnabled: true, suffix: "disabled with specific flag" },
{ enabled: true, allEnabled: false, suffix: "disabled with general flag" }
]) {
it(`zoom interactions can be ${suffix}`, function () {
mapControls.zoomEnabled = enabled;
mapControls.enabled = allEnabled;
const isEnabled = allEnabled && enabled;
mapControls.setZoomLevel(initialZoomLevel + 1);
expect(mapControls.zoomLevelTargeted - initialZoomLevel !== 0).to.equal(isEnabled);
mapControls.setZoomLevel(initialZoomLevel - 1);
expect(mapControls.zoomLevelTargeted - initialZoomLevel !== 0).to.equal(isEnabled);
wheel(1);
expect(mapControls.zoomLevelTargeted - initialZoomLevel !== 0).to.equal(isEnabled);
wheel(-1);
expect(mapControls.zoomLevelTargeted - initialZoomLevel !== 0).to.equal(isEnabled);
dblClick();
expect(mapControls.zoomLevelTargeted - initialZoomLevel !== 0).to.equal(isEnabled);
dblTap();
expect(mapControls.zoomLevelTargeted - initialZoomLevel !== 0).to.equal(isEnabled);
});
it(`pan interactions can be ${suffix}`, function () {
const initX = camera.position.x;
const initY = camera.position.y;
mapControls.panEnabled = enabled;
mapControls.enabled = allEnabled;
const isEnabled = allEnabled && enabled;
mouseMove(0, domElement.clientWidth / 3, domElement.clientHeight / 3);
expect(camera.position.x - initX !== 0).equals(isEnabled);
expect(camera.position.y - initY !== 0).equals(isEnabled);
touchMove(1, domElement.clientWidth / 3, domElement.clientHeight / 3);
expect(camera.position.x - initX !== 0).equals(isEnabled);
expect(camera.position.y - initY !== 0).equals(isEnabled);
});
it(`tilt interactions can be ${suffix}`, function () {
lookAtStub.resetHistory();
orbitAroundScreenPointSpy.resetHistory();
mapControls.tiltEnabled = enabled;
mapControls.enabled = allEnabled;
const isEnabled = allEnabled && enabled;
mapControls.toggleTilt();
expect(orbitAroundScreenPointSpy.called).to.equal(isEnabled);
mouseMove(2, domElement.clientWidth / 3, domElement.clientHeight / 3);
expect(orbitAroundScreenPointSpy.called).to.equal(isEnabled);
touchMove(3, domElement.clientWidth / 3, domElement.clientHeight / 3);
expect(orbitAroundScreenPointSpy.called).to.equal(isEnabled);
});
}
});
describe("toggletilt with inertia", () => {
beforeEach(function () {
// This tests runs a non mocked version of MapView, hence we need to mock some other
// methods to get it working correctl.
const clearColorStub: sinon.SinonStub = sandbox.stub();
sandbox
.stub(THREE, "WebGLRenderer")
.returns(TestUtils.getWebGLRendererStub(sandbox, clearColorStub));
sandbox
.stub(THREE, "WebGL1Renderer")
.returns(TestUtils.getWebGLRendererStub(sandbox, clearColorStub));
canvas = ({
clientWidth: 1,
clientHeight: 1,
addEventListener: sinon.stub(),
removeEventListener: sinon.stub()
} as unknown) as HTMLCanvasElement;
mapViewOptions = {
canvas,
// Both options cause the `addDataSource` method to be called, which we can't
// `await` on because it is called in the constructor, but we can disable them being
// added.
addBackgroundDatasource: false,
enablePolarDataSource: false
};
mapView = new MapView(mapViewOptions);
mapControls = new MapControls(mapView);
});
afterEach(() => {
// Needed to clear any `setTimeout` calls which might rely on our global stubs.
mapView.dispose();
});
it("toggle tilt reaches configured tilt angle and 0", async () => {
mapControls.inertiaEnabled = true;
let resolvePromise: (value: unknown) => void;
const checkReachedTarget = () => {
const mapViewTiltRad = THREE.MathUtils.degToRad(mapView.tilt);
resolvePromise(mapViewTiltRad);
};
mapView.addEventListener(MapViewEventNames.MovementFinished, checkReachedTarget);
const tiltCamera = new Promise(resolve => {
resolvePromise = resolve;
});
// Tilt to `mapControls.tiltAngle`
mapControls.toggleTilt();
await expect(tiltCamera).to.eventually.be.closeTo(
mapControls.tiltAngle,
Number.EPSILON
);
const tiltBackToZero = new Promise(resolve => {
resolvePromise = resolve;
});
// Tilt back to 0
mapControls.toggleTilt();
await expect(tiltBackToZero).to.eventually.be.closeTo(0, Number.EPSILON);
});
});
}); | the_stack |
import { RGBA } from "./types";
export const COLORS = {
DARK: { r: 0.03, g: 0.03, b: 0.04, a: 1 },
DARK1: { r: 0.07, g: 0.07, b: 0.08, a: 1 },
DARK2: { r: 0.1, g: 0.1, b: 0.12, a: 1 },
DARK3: { r: 0.14, g: 0.14, b: 0.16, a: 1 },
DARK4: { r: 0.18, g: 0.18, b: 0.2, a: 1 },
DARK5: { r: 0.21, g: 0.21, b: 0.24, a: 1 },
DARK6: { r: 0.25, g: 0.25, b: 0.28, a: 1 },
DARK7: { r: 0.29, g: 0.29, b: 0.32, a: 1 },
DARK8: { r: 0.33, g: 0.33, b: 0.36, a: 1 },
DARK9: { r: 0.38, g: 0.38, b: 0.4, a: 1 },
LIGHT: { r: 1.0, g: 1.0, b: 1.0, a: 1 },
LIGHT1: { r: 0.94, g: 0.94, b: 0.94, a: 1 },
LIGHT2: { r: 0.79, g: 0.79, b: 0.8, a: 1 },
GRAY: { r: 0.62, g: 0.62, b: 0.64, a: 1 },
GRAY2: { r: 0.18, g: 0.17, b: 0.2, a: 1 },
MAGENTAL1: { r: 0.88, g: 0.37, b: 0.98, a: 1 },
MAGENTA: { r: 0.78, g: 0.24, b: 0.92, a: 1 },
MAGENTA1: { r: 0.69, g: 0.14, b: 0.84, a: 1 },
PURPLEL1: { r: 0.6, g: 0.53, b: 1.0, a: 1 },
PURPLE: { r: 0.49, g: 0.42, b: 1.0, a: 1 },
PURPLE1: { r: 0.41, g: 0.35, b: 0.96, a: 1 },
BLUEL1: { r: 0.27, g: 0.65, b: 1.0, a: 1 },
BLUE: { r: 0.14, g: 0.56, b: 1.0, a: 1 },
BLUE1: { r: 0.06, g: 0.44, b: 0.95, a: 1 },
TEALL1: { r: 0.16, g: 0.75, b: 0.82, a: 1 },
TEAL: { r: 0.0, g: 0.66, b: 0.76, a: 1 },
TEAL1: { r: 0.0, g: 0.56, b: 0.68, a: 1 },
GREENL1: { r: 0.1, g: 0.74, b: 0.54, a: 1 },
GREEN: { r: 0.0, g: 0.64, b: 0.46, a: 1 },
GREEN1: { r: 0.0, g: 0.53, b: 0.41, a: 1 },
LIMEL1: { r: 0.42, g: 0.84, b: 0.44, a: 1 },
LIME: { r: 0.29, g: 0.76, b: 0.32, a: 1 },
LIME1: { r: 0.19, g: 0.68, b: 0.29, a: 1 },
YELLOWL1: { r: 0.96, g: 0.83, b: 0.35, a: 1 },
YELLOW: { r: 0.97, g: 0.75, b: 0.0, a: 1 },
YELLOW1: { r: 0.92, g: 0.66, b: 0.0, a: 1 },
ORANGEL1: { r: 0.99, g: 0.54, b: 0.26, a: 1 },
ORANGE: { r: 0.97, g: 0.42, b: 0.11, a: 1 },
ORANGE1: { r: 0.9, g: 0.33, b: 0.04, a: 1 },
REDL1: { r: 1.0, g: 0.42, b: 0.51, a: 1 },
RED: { r: 0.96, g: 0.29, b: 0.4, a: 1 },
RED1: { r: 0.86, g: 0.21, b: 0.33, a: 1 },
RED2: { r: 1.0, g: 0.49, b: 0.59, a: 1 },
};
export const SEMANTIC_CLASS_TO_COLOR_MAP = {
"1": COLORS.BLUE, // CAR
"2": COLORS.TEAL, // HUMAN
"3": COLORS.PURPLE, // BIKE
"4": COLORS.PURPLE1, // MOTORCYCLE
"5": COLORS.BLUE, // TRUCK
"6": COLORS.BLUE, // BUS
"7": COLORS.BLUE, // SCHOOL_BUS
"8": COLORS.RED, // EMV
"9": COLORS.BLUE, // TRAIN
"10": COLORS.BLUE, // ANIMAL
"2000": COLORS.LIME, // STATIC_UNKNOWN
defaultColor: COLORS.LIME,
};
export type HSL = {
h: number;
s: number;
l: number;
};
/**
* "Rotates" the given color around the HSL color wheel by an amount between 0 and 1
* Similar to CSS hue-rotate() function: https://www.quackit.com/css/functions/css_hue-rotate_function.cfm
*/
export function rotateHue(color: RGBA, amount: number): RGBA {
const { h, s, l } = rgbToHsl(color);
const { r, g, b } = hslToRgb({ h: (h + amount + 1) % 1, s, l });
return { r, g, b, a: color.a };
}
/**
* Converts an RGBA color into a hue-saturation-luminance (HSL) color
*/
export function rgbToHsl(rgbColor: RGBA): HSL {
const { r, g, b } = rgbColor;
const max = Math.max(r, g, b);
const min = Math.min(r, g, b);
const l = (max + min) / 2;
let h = 0;
let s;
if (Math.abs(max - min) <= 0.001) {
h = s = 0; // achromatic
} else {
const d = max - min;
s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
switch (max) {
case r:
h = (g - b) / d + (g < b ? 6 : 0);
break;
case g:
h = (b - r) / d + 2;
break;
case b:
h = (r - g) / d + 4;
break;
}
h /= 6;
}
return { h, s, l };
}
/**
* Converts an HSL color into a RGBA color
*/
export function hslToRgb(color: HSL): RGBA {
const { h, s, l } = color;
let r, g, b;
if (s === 0) {
r = g = b = l; // achromatic
} else {
const q = l < 0.5 ? l * (1 + s) : l + s - l * s;
const p = 2 * l - q;
r = hueToColorComponent(p, q, h + 1 / 3);
g = hueToColorComponent(p, q, h);
b = hueToColorComponent(p, q, h - 1 / 3);
}
return { r, g, b, a: 1 };
}
function hueToColorComponent(p: number, q: number, tRaw: number): number {
const t = (tRaw + 1) % 1;
if (t < 1 / 6) {
return p + (q - p) * 6 * t;
}
if (t < 1 / 2) {
return q;
}
if (t < 2 / 3) {
return p + (q - p) * (2 / 3 - t) * 6;
}
return p;
}
/**
* Samples a colormap.
* @param colormap 256 entry rgb array
* @param x value between 0-1 that linearly interpolates the colormap
* @returns RGBA color
*/
export function interpolateColormap(colormap: number[][], x: number): RGBA {
x = Math.max(0.0, Math.min(1.0, x));
const a = Math.floor(x * 255.0);
const b = Math.min(255, a + 1);
const f = x * 255.0 - a;
return {
r: colormap[a][0] + (colormap[b][0] - colormap[a][0]) * f,
g: colormap[a][1] + (colormap[b][1] - colormap[a][1]) * f,
b: colormap[a][2] + (colormap[b][2] - colormap[a][2]) * f,
a: 1.0
};
}
export const VIRIDIS_COLORMAP = [
[0.267004, 0.004874, 0.329415],
[0.268510, 0.009605, 0.335427],
[0.269944, 0.014625, 0.341379],
[0.271305, 0.019942, 0.347269],
[0.272594, 0.025563, 0.353093],
[0.273809, 0.031497, 0.358853],
[0.274952, 0.037752, 0.364543],
[0.276022, 0.044167, 0.370164],
[0.277018, 0.050344, 0.375715],
[0.277941, 0.056324, 0.381191],
[0.278791, 0.062145, 0.386592],
[0.279566, 0.067836, 0.391917],
[0.280267, 0.073417, 0.397163],
[0.280894, 0.078907, 0.402329],
[0.281446, 0.084320, 0.407414],
[0.281924, 0.089666, 0.412415],
[0.282327, 0.094955, 0.417331],
[0.282656, 0.100196, 0.422160],
[0.282910, 0.105393, 0.426902],
[0.283091, 0.110553, 0.431554],
[0.283197, 0.115680, 0.436115],
[0.283229, 0.120777, 0.440584],
[0.283187, 0.125848, 0.444960],
[0.283072, 0.130895, 0.449241],
[0.282884, 0.135920, 0.453427],
[0.282623, 0.140926, 0.457517],
[0.282290, 0.145912, 0.461510],
[0.281887, 0.150881, 0.465405],
[0.281412, 0.155834, 0.469201],
[0.280868, 0.160771, 0.472899],
[0.280255, 0.165693, 0.476498],
[0.279574, 0.170599, 0.479997],
[0.278826, 0.175490, 0.483397],
[0.278012, 0.180367, 0.486697],
[0.277134, 0.185228, 0.489898],
[0.276194, 0.190074, 0.493001],
[0.275191, 0.194905, 0.496005],
[0.274128, 0.199721, 0.498911],
[0.273006, 0.204520, 0.501721],
[0.271828, 0.209303, 0.504434],
[0.270595, 0.214069, 0.507052],
[0.269308, 0.218818, 0.509577],
[0.267968, 0.223549, 0.512008],
[0.266580, 0.228262, 0.514349],
[0.265145, 0.232956, 0.516599],
[0.263663, 0.237631, 0.518762],
[0.262138, 0.242286, 0.520837],
[0.260571, 0.246922, 0.522828],
[0.258965, 0.251537, 0.524736],
[0.257322, 0.256130, 0.526563],
[0.255645, 0.260703, 0.528312],
[0.253935, 0.265254, 0.529983],
[0.252194, 0.269783, 0.531579],
[0.250425, 0.274290, 0.533103],
[0.248629, 0.278775, 0.534556],
[0.246811, 0.283237, 0.535941],
[0.244972, 0.287675, 0.537260],
[0.243113, 0.292092, 0.538516],
[0.241237, 0.296485, 0.539709],
[0.239346, 0.300855, 0.540844],
[0.237441, 0.305202, 0.541921],
[0.235526, 0.309527, 0.542944],
[0.233603, 0.313828, 0.543914],
[0.231674, 0.318106, 0.544834],
[0.229739, 0.322361, 0.545706],
[0.227802, 0.326594, 0.546532],
[0.225863, 0.330805, 0.547314],
[0.223925, 0.334994, 0.548053],
[0.221989, 0.339161, 0.548752],
[0.220057, 0.343307, 0.549413],
[0.218130, 0.347432, 0.550038],
[0.216210, 0.351535, 0.550627],
[0.214298, 0.355619, 0.551184],
[0.212395, 0.359683, 0.551710],
[0.210503, 0.363727, 0.552206],
[0.208623, 0.367752, 0.552675],
[0.206756, 0.371758, 0.553117],
[0.204903, 0.375746, 0.553533],
[0.203063, 0.379716, 0.553925],
[0.201239, 0.383670, 0.554294],
[0.199430, 0.387607, 0.554642],
[0.197636, 0.391528, 0.554969],
[0.195860, 0.395433, 0.555276],
[0.194100, 0.399323, 0.555565],
[0.192357, 0.403199, 0.555836],
[0.190631, 0.407061, 0.556089],
[0.188923, 0.410910, 0.556326],
[0.187231, 0.414746, 0.556547],
[0.185556, 0.418570, 0.556753],
[0.183898, 0.422383, 0.556944],
[0.182256, 0.426184, 0.557120],
[0.180629, 0.429975, 0.557282],
[0.179019, 0.433756, 0.557430],
[0.177423, 0.437527, 0.557565],
[0.175841, 0.441290, 0.557685],
[0.174274, 0.445044, 0.557792],
[0.172719, 0.448791, 0.557885],
[0.171176, 0.452530, 0.557965],
[0.169646, 0.456262, 0.558030],
[0.168126, 0.459988, 0.558082],
[0.166617, 0.463708, 0.558119],
[0.165117, 0.467423, 0.558141],
[0.163625, 0.471133, 0.558148],
[0.162142, 0.474838, 0.558140],
[0.160665, 0.478540, 0.558115],
[0.159194, 0.482237, 0.558073],
[0.157729, 0.485932, 0.558013],
[0.156270, 0.489624, 0.557936],
[0.154815, 0.493313, 0.557840],
[0.153364, 0.497000, 0.557724],
[0.151918, 0.500685, 0.557587],
[0.150476, 0.504369, 0.557430],
[0.149039, 0.508051, 0.557250],
[0.147607, 0.511733, 0.557049],
[0.146180, 0.515413, 0.556823],
[0.144759, 0.519093, 0.556572],
[0.143343, 0.522773, 0.556295],
[0.141935, 0.526453, 0.555991],
[0.140536, 0.530132, 0.555659],
[0.139147, 0.533812, 0.555298],
[0.137770, 0.537492, 0.554906],
[0.136408, 0.541173, 0.554483],
[0.135066, 0.544853, 0.554029],
[0.133743, 0.548535, 0.553541],
[0.132444, 0.552216, 0.553018],
[0.131172, 0.555899, 0.552459],
[0.129933, 0.559582, 0.551864],
[0.128729, 0.563265, 0.551229],
[0.127568, 0.566949, 0.550556],
[0.126453, 0.570633, 0.549841],
[0.125394, 0.574318, 0.549086],
[0.124395, 0.578002, 0.548287],
[0.123463, 0.581687, 0.547445],
[0.122606, 0.585371, 0.546557],
[0.121831, 0.589055, 0.545623],
[0.121148, 0.592739, 0.544641],
[0.120565, 0.596422, 0.543611],
[0.120092, 0.600104, 0.542530],
[0.119738, 0.603785, 0.541400],
[0.119512, 0.607464, 0.540218],
[0.119423, 0.611141, 0.538982],
[0.119483, 0.614817, 0.537692],
[0.119699, 0.618490, 0.536347],
[0.120081, 0.622161, 0.534946],
[0.120638, 0.625828, 0.533488],
[0.121380, 0.629492, 0.531973],
[0.122312, 0.633153, 0.530398],
[0.123444, 0.636809, 0.528763],
[0.124780, 0.640461, 0.527068],
[0.126326, 0.644107, 0.525311],
[0.128087, 0.647749, 0.523491],
[0.130067, 0.651384, 0.521608],
[0.132268, 0.655014, 0.519661],
[0.134692, 0.658636, 0.517649],
[0.137339, 0.662252, 0.515571],
[0.140210, 0.665859, 0.513427],
[0.143303, 0.669459, 0.511215],
[0.146616, 0.673050, 0.508936],
[0.150148, 0.676631, 0.506589],
[0.153894, 0.680203, 0.504172],
[0.157851, 0.683765, 0.501686],
[0.162016, 0.687316, 0.499129],
[0.166383, 0.690856, 0.496502],
[0.170948, 0.694384, 0.493803],
[0.175707, 0.697900, 0.491033],
[0.180653, 0.701402, 0.488189],
[0.185783, 0.704891, 0.485273],
[0.191090, 0.708366, 0.482284],
[0.196571, 0.711827, 0.479221],
[0.202219, 0.715272, 0.476084],
[0.208030, 0.718701, 0.472873],
[0.214000, 0.722114, 0.469588],
[0.220124, 0.725509, 0.466226],
[0.226397, 0.728888, 0.462789],
[0.232815, 0.732247, 0.459277],
[0.239374, 0.735588, 0.455688],
[0.246070, 0.738910, 0.452024],
[0.252899, 0.742211, 0.448284],
[0.259857, 0.745492, 0.444467],
[0.266941, 0.748751, 0.440573],
[0.274149, 0.751988, 0.436601],
[0.281477, 0.755203, 0.432552],
[0.288921, 0.758394, 0.428426],
[0.296479, 0.761561, 0.424223],
[0.304148, 0.764704, 0.419943],
[0.311925, 0.767822, 0.415586],
[0.319809, 0.770914, 0.411152],
[0.327796, 0.773980, 0.406640],
[0.335885, 0.777018, 0.402049],
[0.344074, 0.780029, 0.397381],
[0.352360, 0.783011, 0.392636],
[0.360741, 0.785964, 0.387814],
[0.369214, 0.788888, 0.382914],
[0.377779, 0.791781, 0.377939],
[0.386433, 0.794644, 0.372886],
[0.395174, 0.797475, 0.367757],
[0.404001, 0.800275, 0.362552],
[0.412913, 0.803041, 0.357269],
[0.421908, 0.805774, 0.351910],
[0.430983, 0.808473, 0.346476],
[0.440137, 0.811138, 0.340967],
[0.449368, 0.813768, 0.335384],
[0.458674, 0.816363, 0.329727],
[0.468053, 0.818921, 0.323998],
[0.477504, 0.821444, 0.318195],
[0.487026, 0.823929, 0.312321],
[0.496615, 0.826376, 0.306377],
[0.506271, 0.828786, 0.300362],
[0.515992, 0.831158, 0.294279],
[0.525776, 0.833491, 0.288127],
[0.535621, 0.835785, 0.281908],
[0.545524, 0.838039, 0.275626],
[0.555484, 0.840254, 0.269281],
[0.565498, 0.842430, 0.262877],
[0.575563, 0.844566, 0.256415],
[0.585678, 0.846661, 0.249897],
[0.595839, 0.848717, 0.243329],
[0.606045, 0.850733, 0.236712],
[0.616293, 0.852709, 0.230052],
[0.626579, 0.854645, 0.223353],
[0.636902, 0.856542, 0.216620],
[0.647257, 0.858400, 0.209861],
[0.657642, 0.860219, 0.203082],
[0.668054, 0.861999, 0.196293],
[0.678489, 0.863742, 0.189503],
[0.688944, 0.865448, 0.182725],
[0.699415, 0.867117, 0.175971],
[0.709898, 0.868751, 0.169257],
[0.720391, 0.870350, 0.162603],
[0.730889, 0.871916, 0.156029],
[0.741388, 0.873449, 0.149561],
[0.751884, 0.874951, 0.143228],
[0.762373, 0.876424, 0.137064],
[0.772852, 0.877868, 0.131109],
[0.783315, 0.879285, 0.125405],
[0.793760, 0.880678, 0.120005],
[0.804182, 0.882046, 0.114965],
[0.814576, 0.883393, 0.110347],
[0.824940, 0.884720, 0.106217],
[0.835270, 0.886029, 0.102646],
[0.845561, 0.887322, 0.099702],
[0.855810, 0.888601, 0.097452],
[0.866013, 0.889868, 0.095953],
[0.876168, 0.891125, 0.095250],
[0.886271, 0.892374, 0.095374],
[0.896320, 0.893616, 0.096335],
[0.906311, 0.894855, 0.098125],
[0.916242, 0.896091, 0.100717],
[0.926106, 0.897330, 0.104071],
[0.935904, 0.898570, 0.108131],
[0.945636, 0.899815, 0.112838],
[0.955300, 0.901065, 0.118128],
[0.964894, 0.902323, 0.123941],
[0.974417, 0.903590, 0.130215],
[0.983868, 0.904867, 0.136897],
[0.993248, 0.906157, 0.143936]
];
export const INFERNO_COLORMAP = [
[0.001462, 0.000466, 0.013866],
[0.002267, 0.001270, 0.018570],
[0.003299, 0.002249, 0.024239],
[0.004547, 0.003392, 0.030909],
[0.006006, 0.004692, 0.038558],
[0.007676, 0.006136, 0.046836],
[0.009561, 0.007713, 0.055143],
[0.011663, 0.009417, 0.063460],
[0.013995, 0.011225, 0.071862],
[0.016561, 0.013136, 0.080282],
[0.019373, 0.015133, 0.088767],
[0.022447, 0.017199, 0.097327],
[0.025793, 0.019331, 0.105930],
[0.029432, 0.021503, 0.114621],
[0.033385, 0.023702, 0.123397],
[0.037668, 0.025921, 0.132232],
[0.042253, 0.028139, 0.141141],
[0.046915, 0.030324, 0.150164],
[0.051644, 0.032474, 0.159254],
[0.056449, 0.034569, 0.168414],
[0.061340, 0.036590, 0.177642],
[0.066331, 0.038504, 0.186962],
[0.071429, 0.040294, 0.196354],
[0.076637, 0.041905, 0.205799],
[0.081962, 0.043328, 0.215289],
[0.087411, 0.044556, 0.224813],
[0.092990, 0.045583, 0.234358],
[0.098702, 0.046402, 0.243904],
[0.104551, 0.047008, 0.253430],
[0.110536, 0.047399, 0.262912],
[0.116656, 0.047574, 0.272321],
[0.122908, 0.047536, 0.281624],
[0.129285, 0.047293, 0.290788],
[0.135778, 0.046856, 0.299776],
[0.142378, 0.046242, 0.308553],
[0.149073, 0.045468, 0.317085],
[0.155850, 0.044559, 0.325338],
[0.162689, 0.043554, 0.333277],
[0.169575, 0.042489, 0.340874],
[0.176493, 0.041402, 0.348111],
[0.183429, 0.040329, 0.354971],
[0.190367, 0.039309, 0.361447],
[0.197297, 0.038400, 0.367535],
[0.204209, 0.037632, 0.373238],
[0.211095, 0.037030, 0.378563],
[0.217949, 0.036615, 0.383522],
[0.224763, 0.036405, 0.388129],
[0.231538, 0.036405, 0.392400],
[0.238273, 0.036621, 0.396353],
[0.244967, 0.037055, 0.400007],
[0.251620, 0.037705, 0.403378],
[0.258234, 0.038571, 0.406485],
[0.264810, 0.039647, 0.409345],
[0.271347, 0.040922, 0.411976],
[0.277850, 0.042353, 0.414392],
[0.284321, 0.043933, 0.416608],
[0.290763, 0.045644, 0.418637],
[0.297178, 0.047470, 0.420491],
[0.303568, 0.049396, 0.422182],
[0.309935, 0.051407, 0.423721],
[0.316282, 0.053490, 0.425116],
[0.322610, 0.055634, 0.426377],
[0.328921, 0.057827, 0.427511],
[0.335217, 0.060060, 0.428524],
[0.341500, 0.062325, 0.429425],
[0.347771, 0.064616, 0.430217],
[0.354032, 0.066925, 0.430906],
[0.360284, 0.069247, 0.431497],
[0.366529, 0.071579, 0.431994],
[0.372768, 0.073915, 0.432400],
[0.379001, 0.076253, 0.432719],
[0.385228, 0.078591, 0.432955],
[0.391453, 0.080927, 0.433109],
[0.397674, 0.083257, 0.433183],
[0.403894, 0.085580, 0.433179],
[0.410113, 0.087896, 0.433098],
[0.416331, 0.090203, 0.432943],
[0.422549, 0.092501, 0.432714],
[0.428768, 0.094790, 0.432412],
[0.434987, 0.097069, 0.432039],
[0.441207, 0.099338, 0.431594],
[0.447428, 0.101597, 0.431080],
[0.453651, 0.103848, 0.430498],
[0.459875, 0.106089, 0.429846],
[0.466100, 0.108322, 0.429125],
[0.472328, 0.110547, 0.428334],
[0.478558, 0.112764, 0.427475],
[0.484789, 0.114974, 0.426548],
[0.491022, 0.117179, 0.425552],
[0.497257, 0.119379, 0.424488],
[0.503493, 0.121575, 0.423356],
[0.509730, 0.123769, 0.422156],
[0.515967, 0.125960, 0.420887],
[0.522206, 0.128150, 0.419549],
[0.528444, 0.130341, 0.418142],
[0.534683, 0.132534, 0.416667],
[0.540920, 0.134729, 0.415123],
[0.547157, 0.136929, 0.413511],
[0.553392, 0.139134, 0.411829],
[0.559624, 0.141346, 0.410078],
[0.565854, 0.143567, 0.408258],
[0.572081, 0.145797, 0.406369],
[0.578304, 0.148039, 0.404411],
[0.584521, 0.150294, 0.402385],
[0.590734, 0.152563, 0.400290],
[0.596940, 0.154848, 0.398125],
[0.603139, 0.157151, 0.395891],
[0.609330, 0.159474, 0.393589],
[0.615513, 0.161817, 0.391219],
[0.621685, 0.164184, 0.388781],
[0.627847, 0.166575, 0.386276],
[0.633998, 0.168992, 0.383704],
[0.640135, 0.171438, 0.381065],
[0.646260, 0.173914, 0.378359],
[0.652369, 0.176421, 0.375586],
[0.658463, 0.178962, 0.372748],
[0.664540, 0.181539, 0.369846],
[0.670599, 0.184153, 0.366879],
[0.676638, 0.186807, 0.363849],
[0.682656, 0.189501, 0.360757],
[0.688653, 0.192239, 0.357603],
[0.694627, 0.195021, 0.354388],
[0.700576, 0.197851, 0.351113],
[0.706500, 0.200728, 0.347777],
[0.712396, 0.203656, 0.344383],
[0.718264, 0.206636, 0.340931],
[0.724103, 0.209670, 0.337424],
[0.729909, 0.212759, 0.333861],
[0.735683, 0.215906, 0.330245],
[0.741423, 0.219112, 0.326576],
[0.747127, 0.222378, 0.322856],
[0.752794, 0.225706, 0.319085],
[0.758422, 0.229097, 0.315266],
[0.764010, 0.232554, 0.311399],
[0.769556, 0.236077, 0.307485],
[0.775059, 0.239667, 0.303526],
[0.780517, 0.243327, 0.299523],
[0.785929, 0.247056, 0.295477],
[0.791293, 0.250856, 0.291390],
[0.796607, 0.254728, 0.287264],
[0.801871, 0.258674, 0.283099],
[0.807082, 0.262692, 0.278898],
[0.812239, 0.266786, 0.274661],
[0.817341, 0.270954, 0.270390],
[0.822386, 0.275197, 0.266085],
[0.827372, 0.279517, 0.261750],
[0.832299, 0.283913, 0.257383],
[0.837165, 0.288385, 0.252988],
[0.841969, 0.292933, 0.248564],
[0.846709, 0.297559, 0.244113],
[0.851384, 0.302260, 0.239636],
[0.855992, 0.307038, 0.235133],
[0.860533, 0.311892, 0.230606],
[0.865006, 0.316822, 0.226055],
[0.869409, 0.321827, 0.221482],
[0.873741, 0.326906, 0.216886],
[0.878001, 0.332060, 0.212268],
[0.882188, 0.337287, 0.207628],
[0.886302, 0.342586, 0.202968],
[0.890341, 0.347957, 0.198286],
[0.894305, 0.353399, 0.193584],
[0.898192, 0.358911, 0.188860],
[0.902003, 0.364492, 0.184116],
[0.905735, 0.370140, 0.179350],
[0.909390, 0.375856, 0.174563],
[0.912966, 0.381636, 0.169755],
[0.916462, 0.387481, 0.164924],
[0.919879, 0.393389, 0.160070],
[0.923215, 0.399359, 0.155193],
[0.926470, 0.405389, 0.150292],
[0.929644, 0.411479, 0.145367],
[0.932737, 0.417627, 0.140417],
[0.935747, 0.423831, 0.135440],
[0.938675, 0.430091, 0.130438],
[0.941521, 0.436405, 0.125409],
[0.944285, 0.442772, 0.120354],
[0.946965, 0.449191, 0.115272],
[0.949562, 0.455660, 0.110164],
[0.952075, 0.462178, 0.105031],
[0.954506, 0.468744, 0.099874],
[0.956852, 0.475356, 0.094695],
[0.959114, 0.482014, 0.089499],
[0.961293, 0.488716, 0.084289],
[0.963387, 0.495462, 0.079073],
[0.965397, 0.502249, 0.073859],
[0.967322, 0.509078, 0.068659],
[0.969163, 0.515946, 0.063488],
[0.970919, 0.522853, 0.058367],
[0.972590, 0.529798, 0.053324],
[0.974176, 0.536780, 0.048392],
[0.975677, 0.543798, 0.043618],
[0.977092, 0.550850, 0.039050],
[0.978422, 0.557937, 0.034931],
[0.979666, 0.565057, 0.031409],
[0.980824, 0.572209, 0.028508],
[0.981895, 0.579392, 0.026250],
[0.982881, 0.586606, 0.024661],
[0.983779, 0.593849, 0.023770],
[0.984591, 0.601122, 0.023606],
[0.985315, 0.608422, 0.024202],
[0.985952, 0.615750, 0.025592],
[0.986502, 0.623105, 0.027814],
[0.986964, 0.630485, 0.030908],
[0.987337, 0.637890, 0.034916],
[0.987622, 0.645320, 0.039886],
[0.987819, 0.652773, 0.045581],
[0.987926, 0.660250, 0.051750],
[0.987945, 0.667748, 0.058329],
[0.987874, 0.675267, 0.065257],
[0.987714, 0.682807, 0.072489],
[0.987464, 0.690366, 0.079990],
[0.987124, 0.697944, 0.087731],
[0.986694, 0.705540, 0.095694],
[0.986175, 0.713153, 0.103863],
[0.985566, 0.720782, 0.112229],
[0.984865, 0.728427, 0.120785],
[0.984075, 0.736087, 0.129527],
[0.983196, 0.743758, 0.138453],
[0.982228, 0.751442, 0.147565],
[0.981173, 0.759135, 0.156863],
[0.980032, 0.766837, 0.166353],
[0.978806, 0.774545, 0.176037],
[0.977497, 0.782258, 0.185923],
[0.976108, 0.789974, 0.196018],
[0.974638, 0.797692, 0.206332],
[0.973088, 0.805409, 0.216877],
[0.971468, 0.813122, 0.227658],
[0.969783, 0.820825, 0.238686],
[0.968041, 0.828515, 0.249972],
[0.966243, 0.836191, 0.261534],
[0.964394, 0.843848, 0.273391],
[0.962517, 0.851476, 0.285546],
[0.960626, 0.859069, 0.298010],
[0.958720, 0.866624, 0.310820],
[0.956834, 0.874129, 0.323974],
[0.954997, 0.881569, 0.337475],
[0.953215, 0.888942, 0.351369],
[0.951546, 0.896226, 0.365627],
[0.950018, 0.903409, 0.380271],
[0.948683, 0.910473, 0.395289],
[0.947594, 0.917399, 0.410665],
[0.946809, 0.924168, 0.426373],
[0.946392, 0.930761, 0.442367],
[0.946403, 0.937159, 0.458592],
[0.946903, 0.943348, 0.474970],
[0.947937, 0.949318, 0.491426],
[0.949545, 0.955063, 0.507860],
[0.951740, 0.960587, 0.524203],
[0.954529, 0.965896, 0.540361],
[0.957896, 0.971003, 0.556275],
[0.961812, 0.975924, 0.571925],
[0.966249, 0.980678, 0.587206],
[0.971162, 0.985282, 0.602154],
[0.976511, 0.989753, 0.616760],
[0.982257, 0.994109, 0.631017],
[0.988362, 0.998364, 0.644924]
];
export const TURBO_COLORMAP = [
[0.189950, 0.071760, 0.232170],
[0.194830, 0.083390, 0.261490],
[0.199560, 0.094980, 0.290240],
[0.204150, 0.106520, 0.318440],
[0.208600, 0.118020, 0.346070],
[0.212910, 0.129470, 0.373140],
[0.217080, 0.140870, 0.399640],
[0.221110, 0.152230, 0.425580],
[0.225000, 0.163540, 0.450960],
[0.228750, 0.174810, 0.475780],
[0.232360, 0.186030, 0.500040],
[0.235820, 0.197200, 0.523730],
[0.239150, 0.208330, 0.546860],
[0.242340, 0.219410, 0.569420],
[0.245390, 0.230440, 0.591420],
[0.248300, 0.241430, 0.612860],
[0.251070, 0.252370, 0.633740],
[0.253690, 0.263270, 0.654060],
[0.256180, 0.274120, 0.673810],
[0.258530, 0.284920, 0.693000],
[0.260740, 0.295680, 0.711620],
[0.262800, 0.306390, 0.729680],
[0.264730, 0.317060, 0.747180],
[0.266520, 0.327680, 0.764120],
[0.268160, 0.338250, 0.780500],
[0.269670, 0.348780, 0.796310],
[0.271030, 0.359260, 0.811560],
[0.272260, 0.369700, 0.826240],
[0.273340, 0.380080, 0.840370],
[0.274290, 0.390430, 0.853930],
[0.275090, 0.400720, 0.866920],
[0.275760, 0.410970, 0.879360],
[0.276280, 0.421180, 0.891230],
[0.276670, 0.431340, 0.902540],
[0.276910, 0.441450, 0.913280],
[0.277010, 0.451520, 0.923470],
[0.276980, 0.461530, 0.933090],
[0.276800, 0.471510, 0.942140],
[0.276480, 0.481440, 0.950640],
[0.276030, 0.491320, 0.958570],
[0.275430, 0.501150, 0.965940],
[0.274690, 0.510940, 0.972750],
[0.273810, 0.520690, 0.978990],
[0.272730, 0.530400, 0.984610],
[0.271060, 0.540150, 0.989300],
[0.268780, 0.549950, 0.993030],
[0.265920, 0.559790, 0.995830],
[0.262520, 0.569670, 0.997730],
[0.258620, 0.579580, 0.998760],
[0.254250, 0.589500, 0.998960],
[0.249460, 0.599430, 0.998350],
[0.244270, 0.609370, 0.996970],
[0.238740, 0.619310, 0.994850],
[0.232880, 0.629230, 0.992020],
[0.226760, 0.639130, 0.988510],
[0.220390, 0.649010, 0.984360],
[0.213820, 0.658860, 0.979590],
[0.207080, 0.668660, 0.974230],
[0.200210, 0.678420, 0.968330],
[0.193260, 0.688120, 0.961900],
[0.186250, 0.697750, 0.954980],
[0.179230, 0.707320, 0.947610],
[0.172230, 0.716800, 0.939810],
[0.165290, 0.726200, 0.931610],
[0.158440, 0.735510, 0.923050],
[0.151730, 0.744720, 0.914160],
[0.145190, 0.753810, 0.904960],
[0.138860, 0.762790, 0.895500],
[0.132780, 0.771650, 0.885800],
[0.126980, 0.780370, 0.875900],
[0.121510, 0.788960, 0.865810],
[0.116390, 0.797400, 0.855590],
[0.111670, 0.805690, 0.845250],
[0.107380, 0.813810, 0.834840],
[0.103570, 0.821770, 0.824370],
[0.100260, 0.829550, 0.813890],
[0.097500, 0.837140, 0.803420],
[0.095320, 0.844550, 0.792990],
[0.093770, 0.851750, 0.782640],
[0.092870, 0.858750, 0.772400],
[0.092670, 0.865540, 0.762300],
[0.093200, 0.872110, 0.752370],
[0.094510, 0.878440, 0.742650],
[0.096620, 0.884540, 0.733160],
[0.099580, 0.890400, 0.723930],
[0.103420, 0.896000, 0.715000],
[0.108150, 0.901420, 0.705990],
[0.113740, 0.906730, 0.696510],
[0.120140, 0.911930, 0.686600],
[0.127330, 0.917010, 0.676270],
[0.135260, 0.921970, 0.665560],
[0.143910, 0.926800, 0.654480],
[0.153230, 0.931510, 0.643080],
[0.163190, 0.936090, 0.631370],
[0.173770, 0.940530, 0.619380],
[0.184910, 0.944840, 0.607130],
[0.196590, 0.949010, 0.594660],
[0.208770, 0.953040, 0.581990],
[0.221420, 0.956920, 0.569140],
[0.234490, 0.960650, 0.556140],
[0.247970, 0.964230, 0.543030],
[0.261800, 0.967650, 0.529810],
[0.275970, 0.970920, 0.516530],
[0.290420, 0.974030, 0.503210],
[0.305130, 0.976970, 0.489870],
[0.320060, 0.979740, 0.476540],
[0.335170, 0.982340, 0.463250],
[0.350430, 0.984770, 0.450020],
[0.365810, 0.987020, 0.436880],
[0.381270, 0.989090, 0.423860],
[0.396780, 0.990980, 0.410980],
[0.412290, 0.992680, 0.398260],
[0.427780, 0.994190, 0.385750],
[0.443210, 0.995510, 0.373450],
[0.458540, 0.996630, 0.361400],
[0.473750, 0.997550, 0.349630],
[0.488790, 0.998280, 0.338160],
[0.503620, 0.998790, 0.327010],
[0.518220, 0.999100, 0.316220],
[0.532550, 0.999190, 0.305810],
[0.546580, 0.999070, 0.295810],
[0.560260, 0.998730, 0.286230],
[0.573570, 0.998170, 0.277120],
[0.586460, 0.997390, 0.268490],
[0.598910, 0.996380, 0.260380],
[0.610880, 0.995140, 0.252800],
[0.622330, 0.993660, 0.245790],
[0.633230, 0.991950, 0.239370],
[0.643620, 0.989990, 0.233560],
[0.653940, 0.987750, 0.228350],
[0.664280, 0.985240, 0.223700],
[0.674620, 0.982460, 0.219600],
[0.684940, 0.979410, 0.216020],
[0.695250, 0.976100, 0.212940],
[0.705530, 0.972550, 0.210320],
[0.715770, 0.968750, 0.208150],
[0.725960, 0.964700, 0.206400],
[0.736100, 0.960430, 0.205040],
[0.746170, 0.955930, 0.204060],
[0.756170, 0.951210, 0.203430],
[0.766080, 0.946270, 0.203110],
[0.775910, 0.941130, 0.203100],
[0.785630, 0.935790, 0.203360],
[0.795240, 0.930250, 0.203860],
[0.804730, 0.924520, 0.204590],
[0.814100, 0.918610, 0.205520],
[0.823330, 0.912530, 0.206630],
[0.832410, 0.906270, 0.207880],
[0.841330, 0.899860, 0.209260],
[0.850100, 0.893280, 0.210740],
[0.858680, 0.886550, 0.212300],
[0.867090, 0.879680, 0.213910],
[0.875300, 0.872670, 0.215550],
[0.883310, 0.865530, 0.217190],
[0.891120, 0.858260, 0.218800],
[0.898700, 0.850870, 0.220380],
[0.906050, 0.843370, 0.221880],
[0.913170, 0.835760, 0.223280],
[0.920040, 0.828060, 0.224560],
[0.926660, 0.820250, 0.225700],
[0.933010, 0.812360, 0.226670],
[0.939090, 0.804390, 0.227440],
[0.944890, 0.796340, 0.228000],
[0.950390, 0.788230, 0.228310],
[0.955600, 0.780050, 0.228360],
[0.960490, 0.771810, 0.228110],
[0.965070, 0.763520, 0.227540],
[0.969310, 0.755190, 0.226630],
[0.973230, 0.746820, 0.225360],
[0.976790, 0.738420, 0.223690],
[0.980000, 0.730000, 0.221610],
[0.982890, 0.721400, 0.219180],
[0.985490, 0.712500, 0.216500],
[0.987810, 0.703300, 0.213580],
[0.989860, 0.693820, 0.210430],
[0.991630, 0.684080, 0.207060],
[0.993140, 0.674080, 0.203480],
[0.994380, 0.663860, 0.199710],
[0.995350, 0.653410, 0.195770],
[0.996070, 0.642770, 0.191650],
[0.996540, 0.631930, 0.187380],
[0.996750, 0.620930, 0.182970],
[0.996720, 0.609770, 0.178420],
[0.996440, 0.598460, 0.173760],
[0.995930, 0.587030, 0.168990],
[0.995170, 0.575490, 0.164120],
[0.994190, 0.563860, 0.159180],
[0.992970, 0.552140, 0.154170],
[0.991530, 0.540360, 0.149100],
[0.989870, 0.528540, 0.143980],
[0.987990, 0.516670, 0.138830],
[0.985900, 0.504790, 0.133670],
[0.983600, 0.492910, 0.128490],
[0.981080, 0.481040, 0.123320],
[0.978370, 0.469200, 0.118170],
[0.975450, 0.457400, 0.113050],
[0.972340, 0.445650, 0.107970],
[0.969040, 0.433990, 0.102940],
[0.965550, 0.422410, 0.097980],
[0.961870, 0.410930, 0.093100],
[0.958010, 0.399580, 0.088310],
[0.953980, 0.388360, 0.083620],
[0.949770, 0.377290, 0.079050],
[0.945380, 0.366380, 0.074610],
[0.940840, 0.355660, 0.070310],
[0.936120, 0.345130, 0.066160],
[0.931250, 0.334820, 0.062180],
[0.926230, 0.324730, 0.058370],
[0.921050, 0.314890, 0.054750],
[0.915720, 0.305300, 0.051340],
[0.910240, 0.295990, 0.048140],
[0.904630, 0.286960, 0.045160],
[0.898880, 0.278240, 0.042430],
[0.892980, 0.269810, 0.039930],
[0.886910, 0.261520, 0.037530],
[0.880660, 0.253340, 0.035210],
[0.874220, 0.245260, 0.032970],
[0.867600, 0.237300, 0.030820],
[0.860790, 0.229450, 0.028750],
[0.853800, 0.221700, 0.026770],
[0.846620, 0.214070, 0.024870],
[0.839260, 0.206540, 0.023050],
[0.831720, 0.199120, 0.021310],
[0.823990, 0.191820, 0.019660],
[0.816080, 0.184620, 0.018090],
[0.807990, 0.177530, 0.016600],
[0.799710, 0.170550, 0.015200],
[0.791250, 0.163680, 0.013870],
[0.782600, 0.156930, 0.012640],
[0.773770, 0.150280, 0.011480],
[0.764760, 0.143740, 0.010410],
[0.755560, 0.137310, 0.009420],
[0.746170, 0.130980, 0.008510],
[0.736610, 0.124770, 0.007690],
[0.726860, 0.118670, 0.006950],
[0.716920, 0.112680, 0.006290],
[0.706800, 0.106800, 0.005710],
[0.696500, 0.101020, 0.005220],
[0.686020, 0.095360, 0.004810],
[0.675350, 0.089800, 0.004490],
[0.664490, 0.084360, 0.004240],
[0.653450, 0.079020, 0.004080],
[0.642230, 0.073800, 0.004010],
[0.630820, 0.068680, 0.004010],
[0.619230, 0.063670, 0.004100],
[0.607460, 0.058780, 0.004270],
[0.595500, 0.053990, 0.004530],
[0.583360, 0.049310, 0.004860],
[0.571030, 0.044740, 0.005290],
[0.558520, 0.040280, 0.005790],
[0.545830, 0.035930, 0.006380],
[0.532950, 0.031690, 0.007050],
[0.519890, 0.027560, 0.007800],
[0.506640, 0.023540, 0.008630],
[0.493210, 0.019630, 0.009550],
[0.479600, 0.015830, 0.010550]
];
export const GRAY_COLORMAP = [
[0.000000, 0.000000, 0.000000],
[0.003922, 0.003922, 0.003922],
[0.007843, 0.007843, 0.007843],
[0.011765, 0.011765, 0.011765],
[0.015686, 0.015686, 0.015686],
[0.019608, 0.019608, 0.019608],
[0.023529, 0.023529, 0.023529],
[0.027451, 0.027451, 0.027451],
[0.031373, 0.031373, 0.031373],
[0.035294, 0.035294, 0.035294],
[0.039216, 0.039216, 0.039216],
[0.043137, 0.043137, 0.043137],
[0.047059, 0.047059, 0.047059],
[0.050980, 0.050980, 0.050980],
[0.054902, 0.054902, 0.054902],
[0.058824, 0.058824, 0.058824],
[0.062745, 0.062745, 0.062745],
[0.066667, 0.066667, 0.066667],
[0.070588, 0.070588, 0.070588],
[0.074510, 0.074510, 0.074510],
[0.078431, 0.078431, 0.078431],
[0.082353, 0.082353, 0.082353],
[0.086275, 0.086275, 0.086275],
[0.090196, 0.090196, 0.090196],
[0.094118, 0.094118, 0.094118],
[0.098039, 0.098039, 0.098039],
[0.101961, 0.101961, 0.101961],
[0.105882, 0.105882, 0.105882],
[0.109804, 0.109804, 0.109804],
[0.113725, 0.113725, 0.113725],
[0.117647, 0.117647, 0.117647],
[0.121569, 0.121569, 0.121569],
[0.125490, 0.125490, 0.125490],
[0.129412, 0.129412, 0.129412],
[0.133333, 0.133333, 0.133333],
[0.137255, 0.137255, 0.137255],
[0.141176, 0.141176, 0.141176],
[0.145098, 0.145098, 0.145098],
[0.149020, 0.149020, 0.149020],
[0.152941, 0.152941, 0.152941],
[0.156863, 0.156863, 0.156863],
[0.160784, 0.160784, 0.160784],
[0.164706, 0.164706, 0.164706],
[0.168627, 0.168627, 0.168627],
[0.172549, 0.172549, 0.172549],
[0.176471, 0.176471, 0.176471],
[0.180392, 0.180392, 0.180392],
[0.184314, 0.184314, 0.184314],
[0.188235, 0.188235, 0.188235],
[0.192157, 0.192157, 0.192157],
[0.196078, 0.196078, 0.196078],
[0.200000, 0.200000, 0.200000],
[0.203922, 0.203922, 0.203922],
[0.207843, 0.207843, 0.207843],
[0.211765, 0.211765, 0.211765],
[0.215686, 0.215686, 0.215686],
[0.219608, 0.219608, 0.219608],
[0.223529, 0.223529, 0.223529],
[0.227451, 0.227451, 0.227451],
[0.231373, 0.231373, 0.231373],
[0.235294, 0.235294, 0.235294],
[0.239216, 0.239216, 0.239216],
[0.243137, 0.243137, 0.243137],
[0.247059, 0.247059, 0.247059],
[0.250980, 0.250980, 0.250980],
[0.254902, 0.254902, 0.254902],
[0.258824, 0.258824, 0.258824],
[0.262745, 0.262745, 0.262745],
[0.266667, 0.266667, 0.266667],
[0.270588, 0.270588, 0.270588],
[0.274510, 0.274510, 0.274510],
[0.278431, 0.278431, 0.278431],
[0.282353, 0.282353, 0.282353],
[0.286275, 0.286275, 0.286275],
[0.290196, 0.290196, 0.290196],
[0.294118, 0.294118, 0.294118],
[0.298039, 0.298039, 0.298039],
[0.301961, 0.301961, 0.301961],
[0.305882, 0.305882, 0.305882],
[0.309804, 0.309804, 0.309804],
[0.313725, 0.313725, 0.313725],
[0.317647, 0.317647, 0.317647],
[0.321569, 0.321569, 0.321569],
[0.325490, 0.325490, 0.325490],
[0.329412, 0.329412, 0.329412],
[0.333333, 0.333333, 0.333333],
[0.337255, 0.337255, 0.337255],
[0.341176, 0.341176, 0.341176],
[0.345098, 0.345098, 0.345098],
[0.349020, 0.349020, 0.349020],
[0.352941, 0.352941, 0.352941],
[0.356863, 0.356863, 0.356863],
[0.360784, 0.360784, 0.360784],
[0.364706, 0.364706, 0.364706],
[0.368627, 0.368627, 0.368627],
[0.372549, 0.372549, 0.372549],
[0.376471, 0.376471, 0.376471],
[0.380392, 0.380392, 0.380392],
[0.384314, 0.384314, 0.384314],
[0.388235, 0.388235, 0.388235],
[0.392157, 0.392157, 0.392157],
[0.396078, 0.396078, 0.396078],
[0.400000, 0.400000, 0.400000],
[0.403922, 0.403922, 0.403922],
[0.407843, 0.407843, 0.407843],
[0.411765, 0.411765, 0.411765],
[0.415686, 0.415686, 0.415686],
[0.419608, 0.419608, 0.419608],
[0.423529, 0.423529, 0.423529],
[0.427451, 0.427451, 0.427451],
[0.431373, 0.431373, 0.431373],
[0.435294, 0.435294, 0.435294],
[0.439216, 0.439216, 0.439216],
[0.443137, 0.443137, 0.443137],
[0.447059, 0.447059, 0.447059],
[0.450980, 0.450980, 0.450980],
[0.454902, 0.454902, 0.454902],
[0.458824, 0.458824, 0.458824],
[0.462745, 0.462745, 0.462745],
[0.466667, 0.466667, 0.466667],
[0.470588, 0.470588, 0.470588],
[0.474510, 0.474510, 0.474510],
[0.478431, 0.478431, 0.478431],
[0.482353, 0.482353, 0.482353],
[0.486275, 0.486275, 0.486275],
[0.490196, 0.490196, 0.490196],
[0.494118, 0.494118, 0.494118],
[0.498039, 0.498039, 0.498039],
[0.501961, 0.501961, 0.501961],
[0.505882, 0.505882, 0.505882],
[0.509804, 0.509804, 0.509804],
[0.513725, 0.513725, 0.513725],
[0.517647, 0.517647, 0.517647],
[0.521569, 0.521569, 0.521569],
[0.525490, 0.525490, 0.525490],
[0.529412, 0.529412, 0.529412],
[0.533333, 0.533333, 0.533333],
[0.537255, 0.537255, 0.537255],
[0.541176, 0.541176, 0.541176],
[0.545098, 0.545098, 0.545098],
[0.549020, 0.549020, 0.549020],
[0.552941, 0.552941, 0.552941],
[0.556863, 0.556863, 0.556863],
[0.560784, 0.560784, 0.560784],
[0.564706, 0.564706, 0.564706],
[0.568627, 0.568627, 0.568627],
[0.572549, 0.572549, 0.572549],
[0.576471, 0.576471, 0.576471],
[0.580392, 0.580392, 0.580392],
[0.584314, 0.584314, 0.584314],
[0.588235, 0.588235, 0.588235],
[0.592157, 0.592157, 0.592157],
[0.596078, 0.596078, 0.596078],
[0.600000, 0.600000, 0.600000],
[0.603922, 0.603922, 0.603922],
[0.607843, 0.607843, 0.607843],
[0.611765, 0.611765, 0.611765],
[0.615686, 0.615686, 0.615686],
[0.619608, 0.619608, 0.619608],
[0.623529, 0.623529, 0.623529],
[0.627451, 0.627451, 0.627451],
[0.631373, 0.631373, 0.631373],
[0.635294, 0.635294, 0.635294],
[0.639216, 0.639216, 0.639216],
[0.643137, 0.643137, 0.643137],
[0.647059, 0.647059, 0.647059],
[0.650980, 0.650980, 0.650980],
[0.654902, 0.654902, 0.654902],
[0.658824, 0.658824, 0.658824],
[0.662745, 0.662745, 0.662745],
[0.666667, 0.666667, 0.666667],
[0.670588, 0.670588, 0.670588],
[0.674510, 0.674510, 0.674510],
[0.678431, 0.678431, 0.678431],
[0.682353, 0.682353, 0.682353],
[0.686275, 0.686275, 0.686275],
[0.690196, 0.690196, 0.690196],
[0.694118, 0.694118, 0.694118],
[0.698039, 0.698039, 0.698039],
[0.701961, 0.701961, 0.701961],
[0.705882, 0.705882, 0.705882],
[0.709804, 0.709804, 0.709804],
[0.713725, 0.713725, 0.713725],
[0.717647, 0.717647, 0.717647],
[0.721569, 0.721569, 0.721569],
[0.725490, 0.725490, 0.725490],
[0.729412, 0.729412, 0.729412],
[0.733333, 0.733333, 0.733333],
[0.737255, 0.737255, 0.737255],
[0.741176, 0.741176, 0.741176],
[0.745098, 0.745098, 0.745098],
[0.749020, 0.749020, 0.749020],
[0.752941, 0.752941, 0.752941],
[0.756863, 0.756863, 0.756863],
[0.760784, 0.760784, 0.760784],
[0.764706, 0.764706, 0.764706],
[0.768627, 0.768627, 0.768627],
[0.772549, 0.772549, 0.772549],
[0.776471, 0.776471, 0.776471],
[0.780392, 0.780392, 0.780392],
[0.784314, 0.784314, 0.784314],
[0.788235, 0.788235, 0.788235],
[0.792157, 0.792157, 0.792157],
[0.796078, 0.796078, 0.796078],
[0.800000, 0.800000, 0.800000],
[0.803922, 0.803922, 0.803922],
[0.807843, 0.807843, 0.807843],
[0.811765, 0.811765, 0.811765],
[0.815686, 0.815686, 0.815686],
[0.819608, 0.819608, 0.819608],
[0.823529, 0.823529, 0.823529],
[0.827451, 0.827451, 0.827451],
[0.831373, 0.831373, 0.831373],
[0.835294, 0.835294, 0.835294],
[0.839216, 0.839216, 0.839216],
[0.843137, 0.843137, 0.843137],
[0.847059, 0.847059, 0.847059],
[0.850980, 0.850980, 0.850980],
[0.854902, 0.854902, 0.854902],
[0.858824, 0.858824, 0.858824],
[0.862745, 0.862745, 0.862745],
[0.866667, 0.866667, 0.866667],
[0.870588, 0.870588, 0.870588],
[0.874510, 0.874510, 0.874510],
[0.878431, 0.878431, 0.878431],
[0.882353, 0.882353, 0.882353],
[0.886275, 0.886275, 0.886275],
[0.890196, 0.890196, 0.890196],
[0.894118, 0.894118, 0.894118],
[0.898039, 0.898039, 0.898039],
[0.901961, 0.901961, 0.901961],
[0.905882, 0.905882, 0.905882],
[0.909804, 0.909804, 0.909804],
[0.913725, 0.913725, 0.913725],
[0.917647, 0.917647, 0.917647],
[0.921569, 0.921569, 0.921569],
[0.925490, 0.925490, 0.925490],
[0.929412, 0.929412, 0.929412],
[0.933333, 0.933333, 0.933333],
[0.937255, 0.937255, 0.937255],
[0.941176, 0.941176, 0.941176],
[0.945098, 0.945098, 0.945098],
[0.949020, 0.949020, 0.949020],
[0.952941, 0.952941, 0.952941],
[0.956863, 0.956863, 0.956863],
[0.960784, 0.960784, 0.960784],
[0.964706, 0.964706, 0.964706],
[0.968627, 0.968627, 0.968627],
[0.972549, 0.972549, 0.972549],
[0.976471, 0.976471, 0.976471],
[0.980392, 0.980392, 0.980392],
[0.984314, 0.984314, 0.984314],
[0.988235, 0.988235, 0.988235],
[0.992157, 0.992157, 0.992157],
[0.996078, 0.996078, 0.996078],
[1.000000, 1.000000, 1.000000]
]; | the_stack |
import { System } from '../system'
import {
IPositionCallback,
IPositionEntry,
IPositionObserver,
} from '../types/global/IPositionObserver'
import { Unlisten } from '../types/Unlisten'
import callAll from '../util/call/callAll'
import { parseTransformXY } from './parseTransformXY'
import { animateThrottle } from './throttle'
import {
addVector,
angleToRad,
rotateVector,
subtractVector,
} from './util/geometry'
export class PositionObserver implements IPositionObserver {
private _system: System
private _callback: IPositionCallback
private _unlisten: () => void
private _abort: () => void
constructor(system: System, callback: IPositionCallback) {
this._system = system
this._callback = callback
}
public observe(element: HTMLElement): IPositionEntry {
const {
api: {
document: { MutationObserver, ResizeObserver },
},
} = this._system
if (this._abort) {
this._abort()
this._abort = undefined
}
// console.log(element)
const { isConnected } = element
if (!isConnected) {
console.log('PositionObserver', 'observe', '!isConnected')
return { x: 0, y: 0, sx: 1, sy: 1, rx: 0, ry: 0, rz: 0 }
// throw new Error('element is not mounted')
}
let x: number = 0
let y: number = 0
let sx: number = 1
let sy: number = 1
let rx: number = 0
let ry: number = 0
let rz: number = 0
let width: number = 0
let height: number = 0
let offset_x: number = 0
let offset_y: number = 0
let transform_x: number = 0
let transform_y: number = 0
let scale_x: number = 1
let scale_y: number = 1
let rotate_x: number = 0
let rotate_y: number = 0
let rotate_z: number = 0
let parent_x: number = 0
let parent_y: number = 0
let parent_scroll_top = 0
let parent_scroll_left = 0
let parent_scale_x = 1
let parent_scale_y = 1
let parent_rx = 0
let parent_ry = 0
let parent_rz = 0
let _transform: string | undefined
const _update_local = (): void => {
__update_local()
update()
}
const __update_local = (): void => {
const { offsetLeft, offsetTop, offsetWidth, offsetHeight, style } =
element
offset_x = offsetLeft
offset_y = offsetTop
const { transform } = style
if (
transform !== _transform ||
width !== offsetWidth ||
height !== offsetHeight
) {
if (transform) {
const [
_transform_x,
_transform_y,
_scale_x,
_scale_y,
_rotate_x,
_rotate_y,
_rotate_z,
] = parseTransformXY(transform, offsetWidth, offsetHeight)
transform_x = _transform_x
transform_y = _transform_y
scale_x = _scale_x
scale_y = _scale_y
rotate_x = _rotate_x
rotate_y = _rotate_y
rotate_z = _rotate_z
} else {
transform_x = 0
transform_y = 0
scale_x = 1
scale_y = 1
rotate_x = 0
rotate_y = 0
rotate_z = 0
}
_transform = transform
}
width = offsetWidth
height = offsetHeight
}
const { f: update_local, abort } = animateThrottle(_update_local)
this._abort = abort
// const update_local = _update_local
const _update = (): void => {
sx = scale_x * parent_scale_x
sy = scale_y * parent_scale_y
const rxr = angleToRad(rotate_x)
const ryr = angleToRad(rotate_y)
const rzr = angleToRad(rotate_z)
rx = rxr + parent_rx
ry = ryr + parent_ry
rz = rzr + parent_rz
const parent_rz_cos = Math.cos(parent_rz)
const parent_rz_sin = Math.sin(parent_rz)
const local_x = offset_x + transform_x
const local_y = offset_y + transform_y
const scaled_local_x = local_x * parent_scale_x
const scaled_local_y = local_y * parent_scale_y
const scaled_rotated_local_x =
scaled_local_x * parent_rz_cos - scaled_local_y * parent_rz_sin
const scaled_rotated_local_y =
scaled_local_y * parent_rz_cos + scaled_local_x * parent_rz_sin
const px =
parent_x -
parent_scroll_left +
scaled_rotated_local_x * scale_x -
((width * sx) / 2) * (scale_x - 1)
const py =
parent_y -
parent_scroll_top +
scaled_rotated_local_y * scale_y -
((height * sy) / 2) * (scale_y - 1)
const cx =
px +
(width * parent_scale_x * parent_rz_cos -
height * parent_scale_y * parent_rz_sin) /
2
const cy =
py +
(width * parent_scale_y * parent_rz_sin +
height * parent_scale_x * parent_rz_cos) /
2
const c = { x: cx, y: cy }
const p = { x: px, y: py }
const cp = subtractVector(p, c)
const rcp = rotateVector(cp, rzr)
const fp = addVector(c, rcp)
x = fp.x
y = fp.y
}
const update = (): void => {
_update()
this._callback({ x, y, sx, sy, rx, ry, rz })
}
const callback = function (mutationsList) {
// for (const mutation of mutationsList) {
// console.log('element', mutation)
// if (mutation.type === 'childList') {
// } else if (mutation.type === 'attributes') {
// }
// }
update_local()
}
const config = {
childList: false,
subtree: false,
attributes: true,
attributeFilter: ['style'],
}
const mutationObserver = new MutationObserver(callback)
mutationObserver.observe(element, config)
const unlisten_self = () => {
mutationObserver.disconnect()
}
const update_parent = (): (() => void) => {
const { offsetParent, parentElement } = element
const targetParent = offsetParent || parentElement
if (targetParent) {
const scrollParentUnlisten: Unlisten[] = []
const pushScrollParent = (p: Element) => {
const { scrollLeft, scrollTop } = p
parent_scroll_top += scrollTop
parent_scroll_left += scrollLeft
let _scrollLeft = scrollLeft
let _scrollTop = scrollTop
const parentScrollListener = function () {
const { scrollLeft, scrollTop } = p
parent_scroll_left += scrollLeft - _scrollLeft
parent_scroll_top += scrollTop - _scrollTop
_scrollLeft = scrollLeft
_scrollTop = scrollTop
update()
}
const { f: _parentScrollListener } =
animateThrottle(parentScrollListener)
p.addEventListener('scroll', _parentScrollListener, {
passive: true,
})
const unlisten = () => {
p.removeEventListener('scroll', _parentScrollListener)
}
scrollParentUnlisten.push(unlisten)
}
let p = parentElement
while (p !== targetParent) {
pushScrollParent(p)
p = p.parentElement
}
pushScrollParent(targetParent)
const unlitenScroll = callAll(scrollParentUnlisten)
const parentConfig = {
childList: true,
subtree: false,
attributes: true,
attributeFilter: ['style'],
}
const parentMutationCallback: MutationCallback = (mutationsList) => {
// for (const mutation of mutationsList) {
// // console.log('parent', mutation)
// if (mutation.type === 'childList') {
// const { removedNodes } = mutation
// const removedNodesLength = removedNodes.length
// for (let i = 0; i < removedNodesLength; i++) {
// const removedNode = removedNodes.item(i)
// if (removedNode === element) {
// break
// }
// }
// } else if (mutation.type === 'attributes') {
// }
// }
update_local()
}
const parentMutationObserver = new MutationObserver(
parentMutationCallback
)
parentMutationObserver.observe(targetParent, parentConfig)
const parentPositionCallback = ({
x: _parent_x,
y: _parent_y,
sx: _parent_scale_x,
sy: _parent_scale_y,
rx: _parent_rotate_x,
ry: _parent_rotate_y,
rz: _parent_rotate_z,
}) => {
parent_x = _parent_x
parent_y = _parent_y
parent_scale_x = _parent_scale_x
parent_scale_y = _parent_scale_y
parent_rx = _parent_rotate_x
parent_ry = _parent_rotate_y
parent_rz = _parent_rotate_z
update_local()
}
const parentPostionObserver = new PositionObserver(
this._system,
parentPositionCallback
)
const {
x: _parent_x,
y: _parent_y,
sx: _parent_scale_x,
sy: _parent_scale_y,
} = parentPostionObserver.observe(targetParent as HTMLElement)
parent_x = _parent_x
parent_y = _parent_y
parent_scale_x = _parent_scale_x
parent_scale_y = _parent_scale_y
_update()
const parentResizeObserverCallback = () => {
update_local()
}
const parentResizeObserver = new ResizeObserver(
parentResizeObserverCallback
)
parentResizeObserver.observe(targetParent)
return () => {
unlitenScroll()
parentResizeObserver.disconnect()
parentMutationObserver.disconnect()
parentPostionObserver.disconnect()
}
} else {
return () => {}
}
}
const unlisten_parent = update_parent()
const unlisten = () => {
unlisten_self()
unlisten_parent()
}
this._unlisten = unlisten
// update_local()
__update_local()
_update()
return { x, y, sx, sy, rx, ry, rz }
}
disconnect() {
if (this._unlisten) {
this._unlisten()
this._unlisten = undefined
}
}
} | the_stack |
import {concat, map, share, switchMap} from 'rxjs/operators';
import {Injectable} from '@angular/core';
import {Observable, AsyncSubject, of} from 'rxjs';
import {Apollo, QueryRef} from 'apollo-angular';
import * as moment from 'moment';
import {
GetChatsGQL,
GetChatGQL,
AddMessageGQL,
RemoveChatGQL,
RemoveMessagesGQL,
RemoveAllMessagesGQL,
GetUsersGQL,
AddChatGQL,
AddGroupGQL,
ChatAddedGQL,
MessageAddedGQL,
AddMessage,
GetChats,
GetChat,
RemoveMessages,
RemoveAllMessages,
GetUsers,
AddChat,
AddGroup,
MessageAdded,
} from '../../graphql';
import { DataProxy } from 'apollo-cache';
import { FetchResult } from 'apollo-link';
import {LoginService} from '../login/services/login.service';
const currentUserId = '1';
const currentUserName = 'Ethan Gonzalez';
@Injectable()
export class ChatsService {
messagesAmount = 3;
getChatsWq: QueryRef<GetChats.Query, GetChats.Variables>;
chats$: Observable<GetChats.Chats[]>;
chats: GetChats.Chats[];
getChatWqSubject: AsyncSubject<QueryRef<GetChat.Query>>;
addChat$: Observable<FetchResult<AddChat.Mutation | AddGroup.Mutation>>;
constructor(
private getChatsGQL: GetChatsGQL,
private getChatGQL: GetChatGQL,
private addMessageGQL: AddMessageGQL,
private removeChatGQL: RemoveChatGQL,
private removeMessagesGQL: RemoveMessagesGQL,
private removeAllMessagesGQL: RemoveAllMessagesGQL,
private getUsersGQL: GetUsersGQL,
private addChatGQL: AddChatGQL,
private addGroupGQL: AddGroupGQL,
private chatAddedGQL: ChatAddedGQL,
private messageAddedGQL: MessageAddedGQL,
private apollo: Apollo,
private loginService: LoginService
) {
this.getChatsWq = this.getChatsGQL.watch({
amount: this.messagesAmount,
});
this.getChatsWq.subscribeToMore({
document: this.chatAddedGQL.document,
updateQuery: (prev: GetChats.Query, { subscriptionData }) => {
if (!subscriptionData.data) {
return prev;
}
const newChat: GetChats.Chats = (<any>subscriptionData).data.chatAdded;
return Object.assign({}, prev, {
chats: [...prev.chats, newChat]
});
}
});
this.getChatsWq.subscribeToMore({
document: this.messageAddedGQL.document,
updateQuery: (prev: GetChats.Query, { subscriptionData }) => {
if (!subscriptionData.data) {
return prev;
}
const newMessage: MessageAdded.MessageAdded = (<any>subscriptionData).data.messageAdded;
// We need to update the cache for both Chat and Chats. The following updates the cache for Chat.
try {
// Read the data from our cache for this query.
const {chat}: GetChat.Query = this.apollo.getClient().readQuery({
query: this.getChatGQL.document,
variables: {
chatId: newMessage.chat.id,
}
});
// Add our message from the mutation to the end.
chat.messages.push(newMessage);
// Write our data back to the cache.
this.apollo.getClient().writeQuery({
query: this.getChatGQL.document,
data: {
chat
}
});
} catch {
console.error('The chat we received an update for does not exist in the store');
}
return Object.assign({}, prev, {
chats: [...prev.chats.map(_chat =>
_chat.id === newMessage.chat.id ? {..._chat, messages: [..._chat.messages, newMessage]} : _chat)]
});
}
});
this.chats$ = this.getChatsWq.valueChanges.pipe(
map((result) => result.data.chats)
);
this.chats$.subscribe(chats => this.chats = chats);
}
static getRandomId() {
return String(Math.round(Math.random() * 1000000000000));
}
getChats() {
return {query: this.getChatsWq, chats$: this.chats$};
}
getChat(chatId: string, oui?: boolean) {
const _chat = this.chats && this.chats.find(chat => chat.id === chatId) || {
id: chatId,
name: '',
picture: null,
allTimeMembers: [],
unreadMessages: 0,
isGroup: false,
messages: [],
};
const chat$FromCache = of<GetChat.Chat>(_chat);
const getApolloWatchQuery = (id: string) => {
return this.getChatGQL.watch({
chatId: id,
});
};
let chat$: Observable<GetChat.Chat>;
this.getChatWqSubject = new AsyncSubject();
if (oui) {
chat$ = chat$FromCache.pipe(
concat(this.addChat$.pipe(
switchMap(({ data }) => {
const id = (<AddChat.Mutation>data).addChat ? (<AddChat.Mutation>data).addChat.id : (<AddGroup.Mutation>data).addGroup.id;
const query = getApolloWatchQuery(id);
this.getChatWqSubject.next(query);
this.getChatWqSubject.complete();
return query.valueChanges.pipe(
map((result) => result.data.chat)
);
}))
));
} else {
const query = getApolloWatchQuery(chatId);
this.getChatWqSubject.next(query);
this.getChatWqSubject.complete();
chat$ = chat$FromCache.pipe(
concat(
query.valueChanges.pipe(
map((result) => result.data.chat)
)
)
);
}
return {query$: this.getChatWqSubject.asObservable(), chat$};
}
addMessage(chatId: string, content: string) {
return this.addMessageGQL.mutate({
chatId,
content,
}, {
optimisticResponse: {
__typename: 'Mutation',
addMessage: {
__typename: 'Message',
id: ChatsService.getRandomId(),
chat: {
__typename: 'Chat',
id: chatId,
},
sender: {
__typename: 'User',
id: this.loginService.getUser().id,
name: this.loginService.getUser().name,
},
content,
createdAt: moment().unix(),
type: 1,
recipients: [],
ownership: true,
},
},
update: (store, { data: { addMessage } }: {data: AddMessage.Mutation}) => {
// Update the messages cache
{
// Read the data from our cache for this query.
const {chat} = store.readQuery<GetChat.Query, GetChat.Variables>({
query: this.getChatGQL.document,
variables: {
chatId,
}
});
// Add our message from the mutation to the end.
chat.messages.push(addMessage);
// Write our data back to the cache.
store.writeQuery({
query: this.getChatGQL.document,
data: {
chat
}
});
}
// Update last message cache
{
// Read the data from our cache for this query.
const {chats} = store.readQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
});
// Add our comment from the mutation to the end.
chats.find(chat => chat.id === chatId).messages.push(addMessage);
// Write our data back to the cache.
store.writeQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
data: {
chats,
},
});
}
}
});
}
removeChat(chatId: string) {
return this.removeChatGQL.mutate(
{
chatId,
}, {
optimisticResponse: {
__typename: 'Mutation',
removeChat: chatId,
},
update: (store, { data: { removeChat } }) => {
// Read the data from our cache for this query.
const {chats} = store.readQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
});
// Remove the chat (mutable)
for (const index of chats.keys()) {
if (chats[index].id === removeChat) {
chats.splice(index, 1);
}
}
// Write our data back to the cache.
store.writeQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
data: {
chats,
},
});
},
}
);
}
removeMessages(chatId: string, messages: GetChat.Messages[], messageIdsOrAll: string[] | boolean) {
let ids: string[] = [];
const options = {
optimisticResponse: () => ({
__typename: 'Mutation',
removeMessages: ids,
}),
update: (store: DataProxy, { data: { removeMessages } }: {data: RemoveMessages.Mutation | RemoveAllMessages.Mutation}) => {
// Update the messages cache
{
// Read the data from our cache for this query.
const {chat} = store.readQuery<GetChat.Query, GetChat.Variables>({
query: this.getChatGQL.document,
variables: {
chatId,
}
});
// Remove the messages (mutable)
removeMessages.forEach(messageId => {
for (const index of chat.messages.keys()) {
if (chat.messages[index].id === messageId) {
chat.messages.splice(index, 1);
}
}
});
// Write our data back to the cache.
store.writeQuery<GetChat.Query, GetChat.Variables>({
query: this.getChatGQL.document,
data: {
chat
}
});
}
// Update last message cache
{
// Read the data from our cache for this query.
const {chats} = store.readQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
});
// Fix last message
chats.find(chat => chat.id === chatId).messages = messages
.filter(message => !ids.includes(message.id));
// Write our data back to the cache.
store.writeQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
data: {
chats,
},
});
}
}
};
if (typeof messageIdsOrAll === 'boolean') {
ids = messages.map(message => message.id);
return this.removeAllMessagesGQL.mutate({
chatId,
all: messageIdsOrAll
}, options);
} else {
ids = messageIdsOrAll;
return this.removeMessagesGQL.mutate({
chatId,
messageIds: messageIdsOrAll,
}, options);
}
}
getUsers() {
const query = this.getUsersGQL.watch();
const users$ = query.valueChanges.pipe(
map((result) => result.data.users)
);
return {query, users$};
}
// Checks if the chat is listed for the current user and returns the id
getChatId(userId: string) {
const _chat = this.chats.find(chat => {
return !chat.isGroup && !!chat.allTimeMembers.find(user => user.id === this.loginService.getUser().id) &&
!!chat.allTimeMembers.find(user => user.id === userId);
});
return _chat ? _chat.id : false;
}
addChat(userId: string, users: GetUsers.Users[], ouiId: string) {
this.addChat$ = this.addChatGQL.mutate(
{
userId,
}, {
optimisticResponse: {
__typename: 'Mutation',
addChat: {
__typename: 'Chat',
id: ouiId,
name: users.find(user => user.id === userId).name,
picture: users.find(user => user.id === userId).picture,
allTimeMembers: [
{
id: this.loginService.getUser().id,
__typename: 'User',
},
{
id: userId,
__typename: 'User',
}
],
unreadMessages: 0,
messages: [],
isGroup: false,
},
},
update: (store, { data: { addChat } }) => {
// Read the data from our cache for this query.
const {chats} = store.readQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
});
// Add our comment from the mutation to the end.
chats.push(addChat);
// Write our data back to the cache.
store.writeQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
data: {
chats,
},
});
},
}
).pipe(share());
return this.addChat$;
}
addGroup(userIds: string[], groupName: string, ouiId: string) {
this.addChat$ = this.addGroupGQL.mutate(
{
userIds,
groupName,
}, {
optimisticResponse: {
__typename: 'Mutation',
addGroup: {
__typename: 'Chat',
id: ouiId,
name: groupName,
picture: 'https://randomuser.me/api/portraits/thumb/lego/1.jpg',
userIds: [this.loginService.getUser().id, userIds],
allTimeMembers: [
{
id: this.loginService.getUser().id,
__typename: 'User',
},
...userIds.map(id => ({id, __typename: 'User'})),
],
unreadMessages: 0,
messages: [],
isGroup: true,
},
},
update: (store, { data: { addGroup } }) => {
// Read the data from our cache for this query.
const {chats} = store.readQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
});
// Add our comment from the mutation to the end.
chats.push(addGroup);
// Write our data back to the cache.
store.writeQuery<GetChats.Query, GetChats.Variables>({
query: this.getChatsGQL.document,
variables: {
amount: this.messagesAmount,
},
data: {
chats,
},
});
},
}
).pipe(share());
return this.addChat$;
}
} | the_stack |
import UndoPlugin from '../../lib/corePlugins/UndoPlugin';
import { IEditor, Keys, PluginEventType, UndoPluginState } from 'roosterjs-editor-types';
import { Position } from 'roosterjs-editor-dom';
describe('UndoPlugin', () => {
let plugin: UndoPlugin;
let state: UndoPluginState;
let editor: IEditor;
let isInIME: jasmine.Spy;
let addUndoSnapshot: jasmine.Spy;
beforeEach(() => {
plugin = new UndoPlugin({});
state = plugin.getState();
isInIME = jasmine.createSpy('isInIME');
addUndoSnapshot = jasmine.createSpy('addUndoSnapshot');
editor = <IEditor>(<any>{
isInIME,
addUndoSnapshot,
});
plugin.initialize(editor);
});
afterEach(() => {
plugin.dispose();
plugin = null;
state = null;
editor = null;
isInIME = null;
});
it('init', () => {
expect(state.hasNewContent).toBeFalse();
expect(state.isRestoring).toBeFalse();
expect(state.isNested).toBeFalsy();
expect(state.snapshotsService).toBeDefined();
});
it('editor ready event', () => {
let getUndoState = jasmine.createSpy('getUndoState').and.returnValue({
canUndo: false,
canRedo: false,
});
editor.getUndoState = getUndoState;
plugin.onPluginEvent({
eventType: PluginEventType.EditorReady,
});
expect(isInIME).toHaveBeenCalled();
expect(getUndoState).toHaveBeenCalled();
expect(addUndoSnapshot).toHaveBeenCalled();
});
it('editor ready event where can undo', () => {
let getUndoState = jasmine.createSpy('getUndoState').and.returnValue({
canUndo: true,
canRedo: false,
});
editor.getUndoState = getUndoState;
plugin.onPluginEvent({
eventType: PluginEventType.EditorReady,
});
expect(isInIME).toHaveBeenCalled();
expect(getUndoState).toHaveBeenCalled();
expect(addUndoSnapshot).not.toHaveBeenCalledWith(state);
});
it('editor ready event where can redo', () => {
let getUndoState = jasmine.createSpy('getUndoState').and.returnValue({
canUndo: false,
canRedo: true,
});
editor.getUndoState = getUndoState;
plugin.onPluginEvent({
eventType: PluginEventType.EditorReady,
});
expect(isInIME).toHaveBeenCalled();
expect(getUndoState).toHaveBeenCalled();
expect(addUndoSnapshot).not.toHaveBeenCalledWith(state);
});
it('key down event with BACKSPACE, add undo snapshot once', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.BACKSPACE,
},
});
expect(addUndoSnapshot).toHaveBeenCalledTimes(1);
// Backspace again, no need to add undo snapshot now
(<jasmine.Spy>addUndoSnapshot).calls.reset();
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.BACKSPACE,
},
});
expect(addUndoSnapshot).not.toHaveBeenCalled();
// Backspace again, with ctrl key pressed, addUndoSnapshot
(<jasmine.Spy>addUndoSnapshot).calls.reset();
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.BACKSPACE,
ctrlKey: true,
},
});
expect(addUndoSnapshot).toHaveBeenCalled();
// Backspace again, with expanded range, addUndoSnapshot
(<jasmine.Spy>addUndoSnapshot).calls.reset();
editor.getSelectionRange = () => {
return <any>{
collapsed: false,
};
};
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.BACKSPACE,
},
});
expect(addUndoSnapshot).toHaveBeenCalled();
});
it('key down event with DELETE, add undo snapshot once', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.DELETE,
},
});
expect(addUndoSnapshot).toHaveBeenCalledTimes(1);
// DELETE again, no need to add undo snapshot now
(<jasmine.Spy>addUndoSnapshot).calls.reset();
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.DELETE,
},
});
expect(addUndoSnapshot).not.toHaveBeenCalled();
// DELETE again, with ctrl key pressed, addUndoSnapshot
(<jasmine.Spy>addUndoSnapshot).calls.reset();
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.DELETE,
ctrlKey: true,
},
});
expect(addUndoSnapshot).toHaveBeenCalled();
// DELETE again, with expanded range, addUndoSnapshot
(<jasmine.Spy>addUndoSnapshot).calls.reset();
editor.getSelectionRange = () => {
return <any>{
collapsed: false,
};
};
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.DELETE,
},
});
expect(addUndoSnapshot).toHaveBeenCalled();
});
it('key down event with DELETE then BACKSPACE, add undo snapshot twice', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.DELETE,
},
});
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which: Keys.BACKSPACE,
},
});
expect(addUndoSnapshot).toHaveBeenCalledTimes(2);
});
it('key down event with cursor moving and has new content, add undo snapshot each time', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
const KEY_PAGEUP = 33;
const KEY_DOWN = 40;
for (let which = KEY_PAGEUP; which <= KEY_DOWN; which++) {
state.hasNewContent = true;
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which,
},
});
}
expect(addUndoSnapshot).toHaveBeenCalledTimes(KEY_DOWN - KEY_PAGEUP + 1);
});
it('key down event with cursor moving and but no new content, no undo snapshot each time', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
const KEY_PAGEUP = 33;
const KEY_DOWN = 40;
for (let which = KEY_PAGEUP; which <= KEY_DOWN; which++) {
state.hasNewContent = false;
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which,
},
});
}
expect(addUndoSnapshot).not.toHaveBeenCalled();
});
it('delete, page up, delete, no new content, add undo snapshot twice', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
const KEY_PAGEUP = 33;
[Keys.DELETE, KEY_PAGEUP, Keys.DELETE].forEach(which => {
state.hasNewContent = false;
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <any>{
which,
},
});
});
expect(addUndoSnapshot).toHaveBeenCalledTimes(2);
});
it('key press event with expanded range', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: false,
};
};
state.hasNewContent = false;
plugin.onPluginEvent({
eventType: PluginEventType.KeyPress,
rawEvent: <any>{
which: 65,
},
});
expect(addUndoSnapshot).toHaveBeenCalled();
});
it('key press event with collapsed range', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
const clearRedo = jasmine.createSpy('clearRedo');
state.hasNewContent = false;
state.snapshotsService.clearRedo = clearRedo;
plugin.onPluginEvent({
eventType: PluginEventType.KeyPress,
rawEvent: <any>{
which: 65,
},
});
expect(addUndoSnapshot).not.toHaveBeenCalled();
expect(state.hasNewContent).toBeTrue();
expect(clearRedo).toHaveBeenCalled();
});
it('key press event with SPACE key in collapsed range', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
state.hasNewContent = false;
plugin.onPluginEvent({
eventType: PluginEventType.KeyPress,
rawEvent: <any>{
which: Keys.SPACE,
},
});
expect(addUndoSnapshot).toHaveBeenCalled();
expect(state.hasNewContent).toBeFalse();
// Press SPACE again, no undo snapshot added
(<jasmine.Spy>addUndoSnapshot).calls.reset();
plugin.onPluginEvent({
eventType: PluginEventType.KeyPress,
rawEvent: <any>{
which: Keys.SPACE,
},
});
expect(addUndoSnapshot).not.toHaveBeenCalled();
});
it('key press event with ENTER key in collapsed range', () => {
editor.getSelectionRange = () => {
return <any>{
collapsed: true,
};
};
state.hasNewContent = false;
plugin.onPluginEvent({
eventType: PluginEventType.KeyPress,
rawEvent: <any>{
which: Keys.ENTER,
},
});
expect(addUndoSnapshot).toHaveBeenCalled();
expect(state.hasNewContent).toBeTrue();
// Press ENTER again, add one more snapshot
(<jasmine.Spy>addUndoSnapshot).calls.reset();
plugin.onPluginEvent({
eventType: PluginEventType.KeyPress,
rawEvent: <any>{
which: Keys.SPACE,
},
});
expect(addUndoSnapshot).toHaveBeenCalled();
});
it('CompositionEnd event', () => {
plugin.onPluginEvent({
eventType: PluginEventType.CompositionEnd,
rawEvent: <any>{},
});
expect(addUndoSnapshot).toHaveBeenCalled();
});
it('ContentChanged event', () => {
state.hasNewContent = false;
const clearRedo = jasmine.createSpy('clearRedo');
state.snapshotsService.clearRedo = clearRedo;
plugin.onPluginEvent({
eventType: PluginEventType.ContentChanged,
source: '',
});
expect(addUndoSnapshot).not.toHaveBeenCalled();
expect(state.hasNewContent).toBeTrue();
expect(clearRedo).toHaveBeenCalled();
});
it('customized UndoSnapshotService', () => {
const canMove = jasmine.createSpy('canMove');
const move = jasmine.createSpy('move');
const addSnapshot = jasmine.createSpy('addSnapshot');
const clearRedo = jasmine.createSpy('clearRedo');
const canUndoAutoComplete = jasmine.createSpy('canUndoAutoComplete');
plugin = new UndoPlugin({
undoSnapshotService: {
canMove,
move,
addSnapshot,
clearRedo,
canUndoAutoComplete,
},
});
plugin.initialize(<IEditor>(<any>{
getSelectionRange: () => ({
collapsed: true,
}),
isInIME,
}));
plugin.onPluginEvent({
eventType: PluginEventType.KeyPress,
rawEvent: <any>{
which: 65,
},
});
expect(clearRedo).toHaveBeenCalled();
});
it('can undo autoComplete', () => {
state.snapshotsService.addSnapshot('snapshot 1', false);
state.snapshotsService.addSnapshot('snapshot 2', true);
state.snapshotsService.addSnapshot('snapshot 3', false);
expect(state.snapshotsService.canUndoAutoComplete()).toBeTrue();
});
it('cannot undo autoComplete', () => {
state.snapshotsService.addSnapshot('snapshot 1', false);
state.snapshotsService.addSnapshot('snapshot 2', true);
state.snapshotsService.addSnapshot('snapshot 3', false);
state.snapshotsService.addSnapshot('snapshot 4', false);
expect(state.snapshotsService.canUndoAutoComplete()).toBeFalse();
});
it('Backspace trigger undo when can undo autoComplete', () => {
state.snapshotsService.addSnapshot('snapshot 1', false);
state.snapshotsService.addSnapshot('snapshot 2', true);
state.snapshotsService.addSnapshot('snapshot 3', false);
const undo = jasmine.createSpy('undo');
const preventDefault = jasmine.createSpy('preventDefault');
const range = document.createRange();
const pos = Position.getStart(range);
editor.undo = undo;
editor.getSelectionRange = () => range;
editor.getFocusedPosition = () => pos;
state.autoCompletePosition = pos;
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <KeyboardEvent>(<any>{
which: Keys.BACKSPACE,
preventDefault,
}),
});
expect(undo).toHaveBeenCalled();
expect(preventDefault).toHaveBeenCalled();
expect(state.autoCompletePosition).toBeNull();
});
it('Other key does not trigger undo auto complete', () => {
state.snapshotsService.addSnapshot('snapshot 1', false);
state.snapshotsService.addSnapshot('snapshot 2', true);
state.snapshotsService.addSnapshot('snapshot 3', false);
const undo = jasmine.createSpy('undo');
const preventDefault = jasmine.createSpy('preventDefault');
const range = document.createRange();
const pos = Position.getStart(range);
editor.undo = undo;
editor.getSelectionRange = () => range;
editor.getFocusedPosition = () => pos;
state.autoCompletePosition = pos;
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <KeyboardEvent>(<any>{
which: Keys.ENTER,
preventDefault,
}),
});
expect(undo).not.toHaveBeenCalled();
expect(preventDefault).not.toHaveBeenCalled();
expect(state.autoCompletePosition).not.toBeNull();
expect(state.snapshotsService.canUndoAutoComplete()).toBeTrue();
});
it('Another undo snapshot is added, cannot undo autocomplete any more', () => {
state.snapshotsService.addSnapshot('snapshot 1', false);
state.snapshotsService.addSnapshot('snapshot 2', true);
state.snapshotsService.addSnapshot('snapshot 3', false);
const undo = jasmine.createSpy('undo');
const preventDefault = jasmine.createSpy('preventDefault');
const range = document.createRange();
const pos = Position.getStart(range);
editor.undo = undo;
editor.getSelectionRange = () => range;
editor.getFocusedPosition = () => pos;
editor.addUndoSnapshot = () => state.snapshotsService.addSnapshot('snapshot 4', false);
state.autoCompletePosition = pos;
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <KeyboardEvent>(<any>{
which: Keys.DELETE,
ctrlKey: true,
preventDefault,
}),
});
expect(undo).not.toHaveBeenCalled();
expect(preventDefault).not.toHaveBeenCalled();
expect(state.autoCompletePosition).toBeNull();
expect(state.snapshotsService.canUndoAutoComplete()).toBeFalse();
});
it('Position changed, cannot undo autocomplete for Backspace', () => {
state.snapshotsService.addSnapshot('snapshot 1', false);
const undo = jasmine.createSpy('undo');
const preventDefault = jasmine.createSpy('preventDefault');
const range = document.createRange();
const pos = Position.getStart(range);
editor.undo = undo;
editor.getSelectionRange = () => range;
const pos2 = new Position(pos);
(<any>pos2).offset++; // hack, just want to make pos2 different from pos
editor.getFocusedPosition = () => pos2;
editor.addUndoSnapshot = () => state.snapshotsService.addSnapshot('snapshot 4', false);
// Press backspace first time, to let plugin remember last pressed key
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <KeyboardEvent>(<any>{
which: Keys.BACKSPACE,
preventDefault,
}),
});
state.snapshotsService.addSnapshot('snapshot 2', true);
state.snapshotsService.addSnapshot('snapshot 3', false);
state.autoCompletePosition = pos;
plugin.onPluginEvent({
eventType: PluginEventType.KeyDown,
rawEvent: <KeyboardEvent>(<any>{
which: Keys.BACKSPACE,
preventDefault,
}),
});
expect(undo).not.toHaveBeenCalled();
expect(preventDefault).not.toHaveBeenCalled();
expect(state.autoCompletePosition).not.toBeNull();
expect(state.snapshotsService.canUndoAutoComplete()).toBeTrue();
});
}); | the_stack |
// clang-format off
import {flush} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
// <if expr="not chromeos_ash">
import {CrActionMenuElement} from 'chrome://settings/settings.js';
// </if>
import {MAX_SIGNIN_PROMO_IMPRESSION, Router, SettingsSyncAccountControlElement, StatusAction, SyncBrowserProxyImpl} from 'chrome://settings/settings.js';
import {assertEquals, assertFalse, assertTrue} from 'chrome://webui-test/chai_assert.js';
import {isChildVisible, isVisible} from 'chrome://webui-test/test_util.js';
import {setupRouterWithSyncRoutes, simulateStoredAccounts, SyncRoutes} from './sync_test_util.js';
import {TestSyncBrowserProxy} from './test_sync_browser_proxy.js';
// clang-format on
suite('SyncAccountControl', function() {
let browserProxy: TestSyncBrowserProxy;
let testElement: SettingsSyncAccountControlElement;
function forcePromoResetWithCount(count: number, signedIn: boolean) {
browserProxy.setImpressionCount(count);
// Flipping syncStatus.signedIn will force promo state to be reset.
testElement.syncStatus = {
signedIn: !signedIn,
statusAction: StatusAction.NO_ACTION
};
testElement.syncStatus = {
signedIn: signedIn,
statusAction: StatusAction.NO_ACTION
};
}
setup(async function() {
setupRouterWithSyncRoutes();
browserProxy = new TestSyncBrowserProxy();
SyncBrowserProxyImpl.setInstance(browserProxy);
document.body.innerHTML = '';
testElement = document.createElement('settings-sync-account-control');
testElement.syncStatus = {
signedIn: true,
signedInUsername: 'foo@foo.com',
statusAction: StatusAction.NO_ACTION
};
testElement.prefs = {
signin: {
allowed_on_next_startup:
{type: chrome.settingsPrivate.PrefType.BOOLEAN, value: true},
},
};
document.body.appendChild(testElement);
await browserProxy.whenCalled('getStoredAccounts');
flush();
simulateStoredAccounts([
{
fullName: 'fooName',
givenName: 'foo',
email: 'foo@foo.com',
},
{
fullName: 'barName',
givenName: 'bar',
email: 'bar@bar.com',
},
]);
});
teardown(function() {
testElement.remove();
});
test('promo shows/hides in the right states', async function() {
// Not signed in, no accounts, will show banner.
simulateStoredAccounts([]);
forcePromoResetWithCount(0, false);
const banner = testElement.shadowRoot!.querySelector('#banner');
assertTrue(isVisible(banner));
// Flipping signedIn in forcePromoResetWithCount should increment count.
await browserProxy.whenCalled('incrementPromoImpressionCount');
forcePromoResetWithCount(MAX_SIGNIN_PROMO_IMPRESSION + 1, false);
assertFalse(isVisible(banner));
// Not signed in, has accounts, will show banner.
simulateStoredAccounts([{email: 'foo@foo.com'}]);
forcePromoResetWithCount(0, false);
assertTrue(isVisible(banner));
forcePromoResetWithCount(MAX_SIGNIN_PROMO_IMPRESSION + 1, false);
assertFalse(isVisible(banner));
// signed in, banners never show.
simulateStoredAccounts([{email: 'foo@foo.com'}]);
forcePromoResetWithCount(0, true);
assertFalse(isVisible(banner));
forcePromoResetWithCount(MAX_SIGNIN_PROMO_IMPRESSION + 1, true);
assertFalse(isVisible(banner));
});
test('promo header is visible', function() {
testElement.syncStatus = {
signedIn: false,
signedInUsername: '',
statusAction: StatusAction.NO_ACTION
};
testElement.promoLabelWithNoAccount = testElement.promoLabelWithAccount =
'title';
simulateStoredAccounts([]);
assertTrue(isChildVisible(testElement, '#promo-header'));
});
test('not signed in and no stored accounts', async function() {
testElement.syncStatus = {
signedIn: false,
signedInUsername: '',
statusAction: StatusAction.NO_ACTION
};
simulateStoredAccounts([]);
assertTrue(isChildVisible(testElement, '#promo-header'));
assertFalse(isChildVisible(testElement, '#avatar-row'));
// <if expr="not chromeos_ash">
// Chrome OS does not use the account switch menu.
assertFalse(isChildVisible(testElement, '#menu'));
// </if>
assertTrue(isChildVisible(testElement, '#signIn'));
testElement.$.signIn.click();
// <if expr="chromeos_ash">
await browserProxy.whenCalled('turnOnSync');
// </if>
// <if expr="not chromeos_ash">
await browserProxy.whenCalled('startSignIn');
// </if>
});
// <if expr="not chromeos_ash">
// Chrome OS users are always signed in.
test('not signed in but has stored accounts', async function() {
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: false,
signedInUsername: '',
statusAction: StatusAction.NO_ACTION,
hasError: false,
disabled: false,
};
simulateStoredAccounts([
{
fullName: 'fooName',
givenName: 'foo',
email: 'foo@foo.com',
},
{
fullName: 'barName',
givenName: 'bar',
email: 'bar@bar.com',
},
]);
const userInfo =
testElement.shadowRoot!.querySelector<HTMLElement>('#user-info')!;
const syncButton =
testElement.shadowRoot!.querySelector<HTMLElement>('#sync-button')!;
// Avatar row shows the right account.
assertTrue(isChildVisible(testElement, '#promo-header'));
assertTrue(isChildVisible(testElement, '#avatar-row'));
assertTrue(userInfo.textContent!.includes('fooName'));
assertTrue(userInfo.textContent!.includes('foo@foo.com'));
assertFalse(userInfo.textContent!.includes('barName'));
assertFalse(userInfo.textContent!.includes('bar@bar.com'));
// Menu contains the right items.
assertTrue(!!testElement.shadowRoot!.querySelector('#menu'));
assertFalse(
testElement.shadowRoot!.querySelector<CrActionMenuElement>(
'#menu')!.open);
const items =
testElement.shadowRoot!.querySelectorAll<HTMLElement>('.dropdown-item');
assertEquals(4, items.length);
assertTrue(items[0]!.textContent!.includes('foo@foo.com'));
assertTrue(items[1]!.textContent!.includes('bar@bar.com'));
assertEquals(items[2]!.id, 'sign-in-item');
assertEquals(items[3]!.id, 'sign-out-item');
// "sync to" button is showing the correct name and syncs with the
// correct account when clicked.
assertTrue(isVisible(syncButton));
assertFalse(isChildVisible(testElement, '#turn-off'));
syncButton.click();
flush();
let [email, isDefaultPromoAccount] =
await browserProxy.whenCalled('startSyncingWithEmail');
assertEquals(email, 'foo@foo.com');
assertEquals(isDefaultPromoAccount, true);
assertTrue(isChildVisible(testElement, 'cr-icon-button'));
assertTrue(testElement.shadowRoot!
.querySelector<HTMLElement>('#sync-icon-container')!.hidden);
testElement.shadowRoot!.querySelector<HTMLElement>(
'#dropdown-arrow')!.click();
flush();
assertTrue(
testElement.shadowRoot!.querySelector<CrActionMenuElement>(
'#menu')!.open);
// Switching selected account will update UI with the right name and
// email.
items[1]!.click();
flush();
assertFalse(userInfo.textContent!.includes('fooName'));
assertFalse(userInfo.textContent!.includes('foo@foo.com'));
assertTrue(userInfo.textContent!.includes('barName'));
assertTrue(userInfo.textContent!.includes('bar@bar.com'));
assertTrue(isVisible(syncButton));
browserProxy.resetResolver('startSyncingWithEmail');
syncButton.click();
flush();
[email, isDefaultPromoAccount] =
await browserProxy.whenCalled('startSyncingWithEmail');
assertEquals(email, 'bar@bar.com');
assertEquals(isDefaultPromoAccount, false);
// Tapping the last menu item will initiate sign-in.
items[2]!.click();
await browserProxy.whenCalled('startSignIn');
});
// </if>
test('signed in, no error', function() {
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
statusAction: StatusAction.NO_ACTION,
hasError: false,
hasUnrecoverableError: false,
disabled: false,
};
flush();
assertTrue(isChildVisible(testElement, '#avatar-row'));
assertFalse(isChildVisible(testElement, '#promo-header'));
assertFalse(
testElement.shadowRoot!
.querySelector<HTMLElement>('#sync-icon-container')!.hidden);
// <if expr="not chromeos_ash">
// Chrome OS does not use the account switch menu.
assertFalse(isChildVisible(testElement, 'cr-icon-button'));
assertFalse(!!testElement.shadowRoot!.querySelector('#menu'));
// </if>
const userInfo =
testElement.shadowRoot!.querySelector<HTMLElement>('#user-info')!;
assertTrue(userInfo.textContent!.includes('barName'));
assertTrue(userInfo.textContent!.includes('bar@bar.com'));
assertFalse(userInfo.textContent!.includes('fooName'));
assertFalse(userInfo.textContent!.includes('foo@foo.com'));
assertFalse(isChildVisible(testElement, '#sync-button'));
assertTrue(isChildVisible(testElement, '#turn-off'));
assertFalse(isChildVisible(testElement, '#sync-error-button'));
testElement.shadowRoot!.querySelector<HTMLElement>(
'#avatar-row #turn-off')!.click();
flush();
assertEquals(
Router.getInstance().getCurrentRoute(),
(Router.getInstance().getRoutes() as SyncRoutes).SIGN_OUT);
});
test('signed in, has error', function() {
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
hasError: true,
hasUnrecoverableError: false,
statusAction: StatusAction.CONFIRM_SYNC_SETTINGS,
disabled: false,
};
flush();
const userInfo = testElement.shadowRoot!.querySelector('#user-info')!;
assertTrue(
testElement.shadowRoot!
.querySelector<HTMLElement>(
'#sync-icon-container')!.classList.contains('sync-problem'));
assertTrue(!!testElement.shadowRoot!.querySelector(
'[icon="settings:sync-problem"]'));
let displayedText =
userInfo.querySelector<HTMLElement>('div:not([hidden])')!.textContent!;
assertFalse(displayedText.includes('barName'));
assertFalse(displayedText.includes('fooName'));
assertTrue(displayedText.includes('Sync isn\'t working'));
// The sync error button is shown to resolve the error.
assertTrue(isChildVisible(testElement, '#sync-error-button'));
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
hasError: true,
hasUnrecoverableError: false,
statusAction: StatusAction.REAUTHENTICATE,
disabled: false,
};
assertTrue(
testElement.shadowRoot!
.querySelector<HTMLElement>(
'#sync-icon-container')!.classList.contains('sync-paused'));
assertTrue(!!testElement.shadowRoot!.querySelector(
'[icon=\'settings:sync-disabled\']'));
displayedText =
userInfo.querySelector<HTMLElement>('div:not([hidden])')!.textContent!;
assertFalse(displayedText.includes('barName'));
assertFalse(displayedText.includes('fooName'));
assertTrue(displayedText.includes('Sync is paused'));
// The sync error button is shown to resolve the error.
assertTrue(isChildVisible(testElement, '#sync-error-button'));
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
statusAction: StatusAction.NO_ACTION,
hasError: false,
hasUnrecoverableError: false,
disabled: true,
};
assertTrue(
testElement.shadowRoot!
.querySelector<HTMLElement>(
'#sync-icon-container')!.classList.contains('sync-disabled'));
assertTrue(!!testElement.shadowRoot!.querySelector('[icon=\'cr:sync\']'));
displayedText =
userInfo.querySelector<HTMLElement>('div:not([hidden])')!.textContent!;
assertFalse(displayedText.includes('barName'));
assertFalse(displayedText.includes('fooName'));
assertTrue(displayedText.includes('Sync disabled'));
assertFalse(isChildVisible(testElement, '#sync-error-button'));
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
statusAction: StatusAction.REAUTHENTICATE,
hasError: true,
hasUnrecoverableError: true,
disabled: false,
};
assertTrue(
testElement.shadowRoot!
.querySelector<HTMLElement>(
'#sync-icon-container')!.classList.contains('sync-problem'));
assertTrue(!!testElement.shadowRoot!.querySelector(
'[icon="settings:sync-problem"]'));
displayedText =
userInfo.querySelector<HTMLElement>('div:not([hidden])')!.textContent!;
assertFalse(displayedText.includes('barName'));
assertFalse(displayedText.includes('fooName'));
assertTrue(displayedText.includes('Sync isn\'t working'));
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
statusAction: StatusAction.RETRIEVE_TRUSTED_VAULT_KEYS,
hasError: true,
hasPasswordsOnlyError: true,
hasUnrecoverableError: false,
disabled: false,
};
assertTrue(
testElement.shadowRoot!
.querySelector<HTMLElement>(
'#sync-icon-container')!.classList.contains('sync-problem'));
assertTrue(!!testElement.shadowRoot!.querySelector(
'[icon="settings:sync-problem"]'));
displayedText =
userInfo.querySelector<HTMLElement>('div:not([hidden])')!.textContent!;
assertFalse(displayedText.includes('barName'));
assertFalse(displayedText.includes('fooName'));
assertFalse(displayedText.includes('Sync isn\'t working'));
assertTrue(displayedText.includes('Password sync isn\'t working'));
// The sync error button is shown to resolve the error.
assertTrue(isChildVisible(testElement, '#sync-error-button'));
assertTrue(isChildVisible(testElement, '#turn-off'));
});
test('signed in, setup in progress', function() {
testElement.syncStatus = {
signedIn: true,
signedInUsername: 'bar@bar.com',
statusAction: StatusAction.NO_ACTION,
statusText: 'Setup in progress...',
firstSetupInProgress: true,
hasError: false,
hasUnrecoverableError: false,
disabled: false,
};
flush();
const userInfo = testElement.shadowRoot!.querySelector('#user-info')!;
const setupButtons =
testElement.shadowRoot!.querySelector('#setup-buttons');
assertTrue(userInfo.textContent!.includes('barName'));
assertTrue(userInfo.textContent!.includes('Setup in progress...'));
assertTrue(isVisible(setupButtons));
});
test('embedded in another page', function() {
testElement.embeddedInSubpage = true;
forcePromoResetWithCount(100, false);
const banner = testElement.shadowRoot!.querySelector('#banner');
assertTrue(isVisible(banner));
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
statusAction: StatusAction.NO_ACTION,
hasError: false,
hasUnrecoverableError: false,
disabled: false,
};
assertTrue(isChildVisible(testElement, '#turn-off'));
assertFalse(isChildVisible(testElement, '#sync-error-button'));
testElement.embeddedInSubpage = true;
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
hasError: true,
hasUnrecoverableError: false,
statusAction: StatusAction.REAUTHENTICATE,
disabled: false,
};
assertTrue(isChildVisible(testElement, '#turn-off'));
assertTrue(isChildVisible(testElement, '#sync-error-button'));
testElement.embeddedInSubpage = true;
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
hasError: true,
hasUnrecoverableError: true,
statusAction: StatusAction.REAUTHENTICATE,
disabled: false,
};
assertTrue(isChildVisible(testElement, '#turn-off'));
assertTrue(isChildVisible(testElement, '#sync-error-button'));
testElement.embeddedInSubpage = true;
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
hasError: true,
hasUnrecoverableError: false,
statusAction: StatusAction.ENTER_PASSPHRASE,
disabled: false,
};
assertTrue(isChildVisible(testElement, '#turn-off'));
// Don't show passphrase error button on embedded page.
assertFalse(isChildVisible(testElement, '#sync-error-button'));
testElement.embeddedInSubpage = true;
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
hasError: true,
hasUnrecoverableError: true,
statusAction: StatusAction.NO_ACTION,
disabled: false,
};
assertTrue(isChildVisible(testElement, '#turn-off'));
assertFalse(isChildVisible(testElement, '#sync-error-button'));
});
test('hide buttons', function() {
testElement.hideButtons = true;
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
statusAction: StatusAction.NO_ACTION,
hasError: false,
hasUnrecoverableError: false,
disabled: false,
};
assertFalse(isChildVisible(testElement, '#turn-off'));
assertFalse(isChildVisible(testElement, '#sync-error-button'));
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
hasError: true,
hasUnrecoverableError: false,
statusAction: StatusAction.REAUTHENTICATE,
disabled: false,
};
assertFalse(isChildVisible(testElement, '#turn-off'));
assertFalse(isChildVisible(testElement, '#sync-error-button'));
testElement.syncStatus = {
firstSetupInProgress: false,
signedIn: true,
signedInUsername: 'bar@bar.com',
hasError: true,
hasUnrecoverableError: false,
statusAction: StatusAction.ENTER_PASSPHRASE,
disabled: false,
};
assertFalse(isChildVisible(testElement, '#turn-off'));
assertFalse(isChildVisible(testElement, '#sync-error-button'));
});
test('signinButtonDisabled', function() {
// Ensure that the sync button is disabled when signin is disabled.
assertFalse(testElement.$.signIn.disabled);
testElement.setPrefValue('signin.allowed_on_next_startup', false);
flush();
assertTrue(testElement.$.signIn.disabled);
});
}); | the_stack |
import { PdfBrush } from './../brushes/pdf-brush';
import { PdfPen } from './../pdf-pen';
import { PdfLayoutResult, PdfLayoutFormat } from './../figures/base/element-layouter';
import { PdfGraphics } from './../pdf-graphics';
import { RectangleF, PointF } from './../../drawing/pdf-drawing';
import { PdfPage } from './../../pages/pdf-page';
import { PathPointType } from './enum';
import { PdfFillElement} from './../figures/base/fill-element';
import { PdfFillMode} from './../enum';
/**
* `PdfPath` class Implements graphics path, which is a sequence of primitive graphics elements.
* @private
*/
export class PdfPath extends PdfFillElement {
// Fields
/**
* Local variable to store the points.
* @private
*/
private mpoints : PointF[] = null;
/**
* Local variable to store the path Types.
* @private
*/
private mpathTypes : number[] = null;
/**
* Local variable to store the Start Figure.
* @private
*/
private mStartFigure : boolean = true;
/**
* Local variable to store the fill Mode.
* @private
*/
private mfillMode: PdfFillMode = PdfFillMode.Alternate;
/**
* Local variable to store the Beziers.
* @private
*/
private isBeziers3: boolean = false;
/**
* Local variable to store the xps.
* @private
*/
private isXps: boolean = false;
// Constructor
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
public constructor()
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
public constructor(pen: PdfPen)
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
public constructor(brush: PdfBrush)
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
public constructor(points: PointF[], pathTypes: number[])
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
public constructor(brush: PdfBrush, fillMode: PdfFillMode)
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
public constructor(pen: PdfPen, points: PointF[], pathTypes: number[])
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
public constructor(pen: PdfPen, brush: PdfBrush, fillMode: PdfFillMode)
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
public constructor(brush: PdfBrush, fillMode: PdfFillMode, points: PointF[], pathTypes: number[])
/**
* Initializes a new instance of the `PdfPath` class.
* @public
*/
/* tslint:disable-next-line:max-line-length */
public constructor(arg1?: PdfPen|PdfBrush|PointF[], arg2?: PdfFillMode|number[]|PointF[]|PdfBrush, arg3?: PointF[]|number[]|PdfFillMode, arg4?: number[]) {
super();
if (typeof arg1 === 'undefined') {
//
} else if (arg1 instanceof PdfPen) {
super(<PdfPen>arg1);
if (arg2 instanceof PdfBrush) {
super(<PdfPen> arg1, <PdfBrush>arg2);
this.fillMode = <PdfFillMode>arg3;
} else if (arg2 !== null && typeof arg2 !== 'undefined' && arg3 !== null && typeof arg3 !== 'undefined') {
this.addPath(<PointF[]>arg2, <number[]>arg3);
}
} else if (arg1 instanceof PdfBrush) {
super(<PdfBrush>arg1);
if (arg2 !== null && typeof arg2 !== 'undefined') {
this.fillMode = <PdfFillMode>arg2;
}
if (arg3 !== null && typeof arg3 !== 'undefined' && arg4 !== null && typeof arg4 !== 'undefined' ) {
this.addPath(<PointF[]>arg3, <number[]>arg4);
}
} else {
this.addPath(arg1, <number[]>arg2);
}
}
// Properties
/**
* Gets or sets the fill mode.
* @public
*/
public get fillMode(): PdfFillMode {
return this.mfillMode;
}
public set fillMode(value: PdfFillMode) {
this.mfillMode = value;
}
/**
* Gets the path points.
* @public
*/
public get pathPoints(): PointF[] {
return this.points;
}
/**
* Gets the path point types.
* @public
*/
public get pathTypes(): number[] {
return this.types;
}
/**
* Gets the point count.
* @public
*/
public get pointCount(): number {
let count: number = 0;
if ((this.mpoints != null)) {
count = this.mpoints.length;
}
return count;
}
/**
* Gets the last points.
* @public
*/
public get lastPoint(): PointF {
return this.getLastPoint();
}
/**
* Gets the points list.
* @private
*/
private get points(): PointF[] {
if ((this.mpoints == null)) {
this.mpoints = [];
}
return this.mpoints;
}
/**
* Gets the types.
* @private
*/
private get types(): number[] {
if ((this.mpathTypes == null)) {
this.mpathTypes = [];
}
return this.mpathTypes;
}
// Public methods
/**
* `draw` the element on the page with the specified page and 'PointF' class
* @param page Current page where the element should be drawn.
* @param location Start location on the page.
*/
public draw(page : PdfPage, location : PointF) : PdfLayoutResult
/**
* `draw` the element on the page with the specified page and pair of coordinates
* @private
*/
public draw(page : PdfPage, x : number, y : number) : PdfLayoutResult
/**
* `draw` the element on the page with the specified page and 'RectangleF' class
* @private
*/
public draw(page : PdfPage, layoutRectangle : RectangleF) : PdfLayoutResult
/**
* `draw` the element on the page with the specified page, 'PointF' class and layout format
* @private
*/
public draw(page : PdfPage, location : PointF, format : PdfLayoutFormat) : PdfLayoutResult
/**
* `draw` the element on the page with the specified page, pair of coordinates and layout format
* @private
*/
public draw(page : PdfPage, x : number, y : number, format : PdfLayoutFormat) : PdfLayoutResult
/**
* `draw` the element on the page.
* @private
*/
public draw(page : PdfPage, layoutRect : RectangleF, format : PdfLayoutFormat) : PdfLayoutResult
public draw(arg1 : PdfPage, arg2 : RectangleF|PointF|number, arg3 ?: PdfLayoutFormat|number,
arg4 ?: PdfLayoutFormat) : PdfLayoutResult {
if (arg2 instanceof PointF && typeof (arg2 as RectangleF).width === 'undefined' && typeof arg3 === 'undefined') {
return this.drawHelper(arg1, arg2.x, arg2.y);
} else if (arg2 instanceof RectangleF && typeof (arg2 as RectangleF).width !== 'undefined' && typeof arg3 === 'undefined') {
return this.drawHelper(arg1, arg2, null);
} else if (typeof arg2 === 'number' && typeof arg3 === 'number' && typeof arg4 === 'undefined') {
return this.drawHelper(arg1, arg2, arg3, null);
} else if (arg2 instanceof PointF && arg3 instanceof PdfLayoutFormat) {
return this.drawHelper(arg1, arg2.x, arg2.y, arg3);
} else if (typeof arg2 === 'number' && (arg4 instanceof PdfLayoutFormat || arg4 == null) && typeof arg3 === 'number') {
let widthValue : number = (arg1.graphics.clientSize.width - arg2);
let layoutRect : RectangleF = new RectangleF(arg2, arg3, widthValue, 0);
return this.drawHelper(arg1, layoutRect, arg4);
} else if (arg2 instanceof RectangleF && arg3 instanceof PdfLayoutFormat) {
return this.drawHelper(arg1, arg2, arg3);
} else {
return this.drawHelper(arg1, (arg2 as RectangleF), arg3 as PdfLayoutFormat);
}
}
/**
* `add a arc` specified by a rectangle, a coordinate start angle and sweepangle.
* @param rectangle The boundaries of the arc.
* @param startAngle The start angle of the arc.
* @param sweepAngle The angle between startAngle and the end of the arc.
*/
public addArc(rectangle: RectangleF, startAngle: number, sweepAngle: number) : void
/**
* `add a arc` specified by a x , y coordinate points, a width, a height and coordinate start angle and sweepangle.
* @param x The x-coordinate of the upper-left corner of the rectangular region.
* @param y The y-coordinate of the upper-left corner of the rectangular region
* @param width The width of the rectangular region.
* @param height The height of the rectangular region.
* @param startAngle The start angle of the arc.
* @param sweepAngle The angle between startAngle and the end of the arc.
*/
public addArc(x: number, y: number, width: number, height: number, startAngle: number, sweepAngle: number) : void
public addArc(arg1: number|RectangleF, arg2?: number, arg3?: number, arg4?: number, arg5?: number, arg6?: number) : void {
if (arg1 instanceof RectangleF) {
this.addArc(arg1.x, arg1.y, arg1.width, arg1.height, arg2, arg3);
} else {
let points: number[] = this.getBezierArcPoints(<number>arg1, arg2, (arg2 + arg3), (arg2 + arg4), arg5, arg6);
for (let i : number = 0 ; i < points.length ; i = i + 8 ) {
/* tslint:disable-next-line:max-line-length */
let point : number[] = [ points[i], points[i + 1], points[i + 2], points[i + 3], points[i + 4], points[i + 5], points[i + 6], points[i + 7]];
this.addPoints(point, PathPointType.Bezier3);
}
}
}
/**
* `add a bezier curve` specified by region points.
* @param startPoint The start point - represents the starting point of the curve.
* @param firstControlPoint The first control point - represents the second control point of the curve.
* @param secondControlPoint The second control point - represents the second control point of the curve.
* @param endPoint The end point - represents the end point of the curve.
*/
public addBezier(startPoint: PointF, firstControlPoint: PointF, secondControlPoint: PointF, endPoint: PointF) : void
/**
* `add a bezier curve` specified by region points.
* @param startPointX The start point X.
* @param startPointY The start point Y.
* @param firstControlPointX The first control point X.
* @param firstControlPointY The first control point Y.
* @param secondControlPointX The second control point X.
* @param secondControlPointY The second control point Y.
* @param endPointX The end point X.
* @param endPointY The end point Y.
*/
/* tslint:disable-next-line:max-line-length */
public addBezier(startPointX: number, startPointY: number, firstControlPointX: number, firstControlPointY: number, secondControlPointX: number, secondControlPointY: number, endPointX: number, endPointY: number) : void
/* tslint:disable-next-line:max-line-length */
public addBezier(arg1: number|PointF, arg2: number|PointF, arg3: number|PointF, arg4: number|PointF, arg5?: number, arg6?: number, arg7?: number, arg8?: number) : void {
if (arg1 instanceof PointF && arg2 instanceof PointF && arg3 instanceof PointF && arg4 instanceof PointF ) {
this.addBezier(arg1.x, arg1.y, arg2.x, arg2.y, arg3.x, arg3.y, arg4.x, arg4.y);
} else {
let points : number[] = [];
points.push(<number>arg1);
points.push(<number>arg2);
points.push(<number>arg3);
points.push(<number>arg4);
points.push(arg5);
points.push(arg6);
points.push(arg7);
points.push(arg8);
this.addPoints(points, PathPointType.Bezier3);
}
}
/**
* `add a ellipse` specified by a rectangle.
* @param rectangle The boundaries of the ellipse.
*/
public addEllipse(rectangle : RectangleF) : void
/**
* `add a ellipse` specified by a rectangle bounds .
* @param x The x-coordinate of the upper-left corner of the rectangular region.
* @param y The y-coordinate of the upper-left corner of the rectangular region.
* @param width The width of the rectangular region.
* @param height The height of the rectangular region.
*/
public addEllipse(x : number, y : number, width : number, height : number) : void
public addEllipse(arg1 : number|RectangleF, arg2 ?: number, arg3 ?: number, arg4 ?: number) : void {
if (arg1 instanceof RectangleF) {
this.addEllipse(arg1.x, arg1.y, arg1.width, arg1.height);
} else {
this.startFigure();
this.addArc(arg1, arg2, arg3, arg4, 0, 360);
this.closeFigure();
}
}
/**
* `add a line` specified by points .
* @param point1 The start point of the line.
* @param point2 The end point of the line.
*/
public addLine(point1: PointF, point2: PointF) : void
/**
* `add a line` specified by a rectangle bounds.
* @param x1 The x-coordinate of the starting point of the line.
* @param y1 The y-coordinate of the starting point of the line.
* @param x2 The x-coordinate of the end point of the line.
* @param y2 The y-coordinate of the end point of the line.
*/
public addLine(x1: number, y1: number, x2: number, y2: number) : void
public addLine(arg1: number|PointF, arg2: number|PointF, arg3?: number, arg4?: number): void {
if (arg1 instanceof PointF && arg2 instanceof PointF) {
this.addLine(arg1.x, arg1.y, arg2.x, arg2.y);
} else {
let points: number[] = [];
points.push(<number>arg1);
points.push(<number>arg2);
points.push(arg3);
points.push(arg4);
this.addPoints(points, PathPointType.Line);
}
}
/**
* `add a path` specified by a path, appends the path specified to this one.
* @param path The path, which should be appended.
*/
public addPath(path : PdfPath) : void
/**
* `add a path` specified by a path points and path types.
* @param pathPoints The array of points that represents the points to define the path
* @param pathTypes The path types specifies the types of the corresponding points in the path.
*/
public addPath(pathPoints: PointF[], pathTypes: number[]) : void
public addPath(arg1: PointF[]|PdfPath, arg2?: number[]) : void {
if (arg1 instanceof PdfPath) {
this.addPath((<PdfPath>arg1).pathPoints, arg1.pathTypes);
} else {
if ((arg1 == null)) {
throw new Error('ArgumentNullException:pathPoints');
}
if ((arg2 == null)) {
throw new Error('ArgumentNullException:pathTypes');
}
let count: number = arg1.length;
if ((count !== arg2.length)) {
throw new Error('The argument arrays should be of equal length.');
}
}
}
/**
* `add a pie` specified by a rectangle, a coordinate start angle and sweepangle.
* @param rectangle The bounding rectangle of the pie.
* @param startAngle The start angle of the pie.
* @param sweepAngle The sweep angle of the pie.
*/
public addPie(rectangle: RectangleF, startAngle: number, sweepAngle: number) : void
/**
* `add a pie` specified by x , y coordinate points, a width, a height and start angle and sweepangle.
* @param x The x-coordinate of the upper-left corner of the bounding rectangle.
* @param y The y-coordinate of the upper-left corner of the bounding rectangle.
* @param width The width of the bounding rectangle.
* @param height The height of the bounding rectangle
* @param startAngle The start angle of the pie.
* @param sweepAngle The sweep angle of the pie.
*/
public addPie(x: number, y: number, width: number, height: number, startAngle: number, sweepAngle: number) : void
public addPie(arg1: number|RectangleF, arg2?: number, arg3?: number, arg4?: number, arg5?: number, arg6?: number) : void {
if (arg1 instanceof RectangleF) {
this.addPie(arg1.x, arg1.y, arg1.width, arg1.height, arg2, arg3);
} else {
this.startFigure();
this.addArc(<number>arg1, arg2, arg3, arg4, arg5, arg6);
this.addPoint(new PointF((arg1 + (arg3 / 2)), (arg2 + (arg4 / 2))), PathPointType.Line);
this.closeFigure();
}
}
/**
* `add a polygon` specified by points.
* @param points The points of the polygon
*/
public addPolygon(points: PointF[]) : void {
let count: number = (points.length * 2);
let p: number[] = [];
this.startFigure();
for (let i: number = 0; i < points.length ; i++) {
p.push(points[i].x);
p.push(points[i].y);
}
this.addPoints(p, PathPointType.Line);
this.closeFigure();
}
/**
* `add a rectangle` specified by a rectangle.
* @param rectangle The rectangle.
*/
public addRectangle(rectangle: RectangleF) : void
/**
* `add a rectangle` specified by a rectangle.
* @param x The x-coordinate of the upper-left corner of the rectangular region.
* @param y The y-coordinate of the upper-left corner of the rectangular region
* @param width The width of the rectangular region.
* @param height The height of the rectangular region.
*/
public addRectangle(x: number, y: number, width: number, height: number) : void
public addRectangle(arg1: number|RectangleF, y?: number, width?: number, height?: number) : void {
if (arg1 instanceof RectangleF) {
this.addRectangle(arg1.x, arg1.y, arg1.width, arg1.height);
} else {
let points: number[] = [];
this.startFigure();
points.push(<number>arg1);
points.push(y);
points.push((<number>arg1 + width));
points.push(y);
points.push((<number>arg1 + width));
points.push((y + height));
points.push(<number>arg1);
points.push((y + height));
this.addPoints(points, PathPointType.Line);
this.closeFigure();
}
}
/**
* Starts a new figure.
* @public
*/
public startFigure() : void {
this.mStartFigure = true;
}
/**
* Closed all non-closed figures.
* @public
*/
public closeAllFigures() : void {
let startPath: PointF = this.pathPoints[0];
for (let i: number = 0 ; i < this.mpathTypes.length; i++) {
let pt: PathPointType = (<PathPointType>((<number>(this.types[i]))));
let flag: boolean = false;
if (((i !== 0) && (pt === PathPointType.Start))) {
this.closeFigure((i - 1));
flag = true;
} else if (((i === (this.mpathTypes.length - 1)) && (!flag && this.isXps))) {
if ((startPath.x === this.pathPoints[i].y)) {
this.closeFigure(i);
}
}
}
}
/**
* Gets the last point.
* @public
*/
public getLastPoint(): PointF {
let lastPoint: PointF = new PointF(0, 0);
let count: number = this.pointCount;
if (((count > 0) && (this.mpoints != null))) {
lastPoint.x = this.mpoints[(count - 1)].x;
lastPoint.y = this.mpoints[(count - 1)].y;
}
return lastPoint;
}
/**
* Gets the bezier points for arc constructing.
* @public
*/
public getBezierArcPoints(x1: number, y1: number, x2: number, y2: number, s1: number, e1: number): number[] {
if ((x1 > x2)) {
let tmp: number;
tmp = x1;
x1 = x2;
x2 = tmp;
}
if ((y2 > y1)) {
let tmp: number;
tmp = y1;
y1 = y2;
y2 = tmp;
}
let fragAngle: number;
let numFragments: number;
if ((Math.abs(e1) <= 90)) {
fragAngle = e1;
numFragments = 1;
} else {
numFragments = (<number>(Math.ceil((Math.abs(e1) / 90))));
fragAngle = (e1 / numFragments);
}
let xcen: number = ((x1 + x2) / 2);
let ycen: number = ((y1 + y2) / 2);
let rx: number = ((x2 - x1) / 2);
let ry: number = ((y2 - y1) / 2);
let halfAng: number = (<number>((fragAngle * (Math.PI / 360))));
let kappa: number = (<number>(Math.abs(4.0 / 3.0 * (1.0 - Math.cos(halfAng)) / Math.sin(halfAng))));
let pointList: number[] = [];
for (let i: number = 0; (i < numFragments); i++) {
let theta0: number = (<number>(((s1 + (i * fragAngle)) * (Math.PI / 180))));
let theta1: number = (<number>(((s1 + ((i + 1) * fragAngle)) * (Math.PI / 180))));
let cos0: number = (<number>(Math.cos(theta0)));
let cos1: number = (<number>(Math.cos(theta1)));
let sin0: number = (<number>(Math.sin(theta0)));
let sin1: number = (<number>(Math.sin(theta1)));
if ((fragAngle > 0)) {
/* tslint:disable-next-line:max-line-length */
pointList.push((xcen + (rx * cos0)), (ycen - (ry * sin0)), (xcen + (rx * (cos0 - (kappa * sin0)))), (ycen - (ry * (sin0 + (kappa * cos0)))), (xcen + (rx * (cos1 + (kappa * sin1)))), (ycen - (ry * (sin1 - (kappa * cos1)))), (xcen + (rx * cos1)), (ycen - (ry * sin1)));
} else {
/* tslint:disable-next-line:max-line-length */
pointList.push((xcen + (rx * cos0)), (ycen - (ry * sin0)), (xcen + (rx * (cos0 + (kappa * sin0)))), (ycen - (ry * (sin0 - (kappa * cos0)))), (xcen + (rx * (cos1 - (kappa * sin1)))), (ycen - (ry * (sin1 + (kappa * cos1)))), (xcen + (rx * cos1)), (ycen - (ry * sin1)));
}
}
return pointList;
}
/**
* `getBoundsInternal` Returns a rectangle that bounds this element.
* @public
*/
public getBoundsInternal(): RectangleF {
let points: PointF[] = this.pathPoints;
let bounds: RectangleF = new RectangleF(0, 0, 0, 0);
if ((points.length > 0)) {
let xmin: number = points[0].x;
let xmax: number = points[0].x;
let ymin: number = points[0].y;
let ymax: number = points[0].y;
for (let i: number = 1; i < points.length; i++) {
let point: PointF = points[i];
xmin = Math.min(point.x, xmin);
xmax = Math.max(point.x, xmax);
ymin = Math.min(point.y, ymin);
ymax = Math.max(point.y, ymax);
}
bounds = new RectangleF(xmin, ymin, (xmax - xmin), (ymax - ymin));
}
return bounds;
}
/**
* `drawInternal` Draws an element on the Graphics.
* @param graphics Graphics context where the element should be printed.
* @public
*/
public drawInternal(graphics: PdfGraphics) : void {
if ((graphics == null)) {
throw new Error('ArgumentNullException :graphics');
}
graphics.drawPath(this.obtainPen(), this.brush, this);
}
/**
* `add a points` Adds the points along with their type to the path.
* @param points The points.
* @param pointType Type of the points.
* @private
*/
private addPoints(points: number[], pointType: PathPointType) : void
/**
* `add a points` Adds the points along with their type to the path.
* @param points The points.
* @param pointType Type of the points.
* @param startIndex The start index.
* @param endIndex The end index.
* @private
*/
private addPoints(points: number[], pointType: PathPointType, startIndex: number, endIndex: number) : void
private addPoints(points: number[], pointType: PathPointType, startIndex?: number, endIndex?: number) : void {
if (typeof startIndex === 'undefined' && typeof endIndex === 'undefined') {
this.addPoints(points, pointType, 0, points.length);
} else {
for (let i: number = startIndex; i < endIndex; i++) {
let point: PointF = new PointF(points[i], points[(i + 1)]);
if ((i === startIndex)) {
if (((this.pointCount <= 0) || this.mStartFigure)) {
this.addPoint(point, PathPointType.Start);
this.mStartFigure = false;
} else if (((point.x !== this.lastPoint.x) && (point.y !== this.lastPoint.y) && !this.isBeziers3)) {
this.addPoint(point, PathPointType.Line);
} else if ((point.x !== this.lastPoint.x) && (point.y !== this.lastPoint.y)) {
this.addPoint(point, PathPointType.Bezier3);
}
} else {
this.addPoint(point, pointType);
}
i++;
}
}
}
/**
* `add a point` Adds the point and its type
* @param points The points.
* @param pointType Type of the points.
* @private
*/
private addPoint(point: PointF, pointType: PathPointType) : void {
this.points.push(point);
this.types.push((<number>(pointType)));
}
/**
* Closes the figure.
* @public
*/
public closeFigure() : void
/**
* Closes the figure.
* @param index The index of the last figure point.
* @public
*/
public closeFigure(index: number) : void
public closeFigure(index?: number) : void {
if (typeof index === 'undefined') {
if ((this.pointCount > 0)) {
this.closeFigure(this.pointCount - 1);
}
this.startFigure();
} else {
if ((index < 0)) {
throw new Error('IndexOutOfRangeException()');
}
let pt: PathPointType = (<PathPointType>((<number>(this.types[index]))));
pt = (pt | PathPointType.CloseSubpath);
this.types[index] = (<number>(pt));
}
}
} | the_stack |
import * as Ro from '@nakedobjects/restful-objects';
import { Dictionary } from 'lodash';
import forEach from 'lodash-es/forEach';
import fromPairs from 'lodash-es/fromPairs';
import keys from 'lodash-es/keys';
import last from 'lodash-es/last';
import map from 'lodash-es/map';
import mapKeys from 'lodash-es/mapKeys';
import mapValues from 'lodash-es/mapValues';
import reduce from 'lodash-es/reduce';
import { Command } from './Command';
import { CommandResult } from './command-result';
import * as Commandresult from './command-result';
import * as Usermessages from '../user-messages';
import { supportedDateFormats, validateDate, validateMandatory, validateMandatoryAgainstType } from '../validate';
export class Enter extends Command {
shortCommand = 'en';
fullCommand = Usermessages.enterCommand;
helpText = Usermessages.enterHelp;
protected minArguments = 2;
protected maxArguments = 2;
isAvailableInCurrentContext(): boolean {
return this.isDialog() || this.isEdit() || this.isTransient() || this.isForm();
}
doExecute(args: string | null, chained: boolean): Promise<CommandResult> {
const fieldName = this.argumentAsString(args, 0);
const fieldEntry = this.argumentAsString(args, 1, false, false);
if (fieldName === undefined) {
return this.returnResult('', Usermessages.doesNotMatchDialog(fieldName));
}
if (fieldEntry === undefined) {
return this.returnResult('', Usermessages.tooFewArguments);
}
if (this.isDialog()) {
return this.fieldEntryForDialog(fieldName, fieldEntry);
} else {
return this.fieldEntryForEdit(fieldName, fieldEntry);
}
}
private fieldEntryForEdit(fieldName: string | undefined, fieldEntry: string) {
return this.getObject().then(obj => {
const fields = this.matchingProperties(obj, fieldName);
switch (fields.length) {
case 0:
const s = Usermessages.doesNotMatchProperties(fieldName);
return this.returnResult('', s);
case 1:
const field = fields[0];
if (fieldEntry === '?') {
// TODO: does this work in edit mode i.e. show entered value
const details = this.renderFieldDetails(field, field.value());
return this.returnResult('', details);
} else {
this.findAndClearAnyDependentFields(field.id(), obj.propertyMembers());
return this.setField(field, fieldEntry);
}
default:
const ss = reduce(fields, (str, prop) => str + prop.extensions().friendlyName() + '\n', `${fieldName} ${Usermessages.matchesMultiple}`);
return this.returnResult('', ss);
}
});
}
private isDependentField(fieldName: string, possibleDependent: Ro.IField): boolean {
const promptLink = possibleDependent.promptLink();
if (promptLink) {
const pArgs = promptLink.arguments();
const argNames = keys(pArgs);
return (argNames.indexOf(fieldName.toLowerCase()) >= 0);
}
return false;
}
private findAndClearAnyDependentFields(changingField: string, allFields: Dictionary<Ro.IField>) {
forEach(allFields, field => {
if (this.isDependentField(changingField, field)) {
if (!this.isMultiChoiceField(field)) {
this.clearField(field);
}
}
});
}
private fieldEntryForDialog(fieldName: string, fieldEntry: string) {
return this.getActionForCurrentDialog().then(action => {
//
let params = map(action.parameters(), param => param);
params = this.matchFriendlyNameAndOrMenuPath(params, fieldName);
switch (params.length) {
case 0:
return this.returnResult('', Usermessages.doesNotMatchDialog(fieldName));
case 1:
if (fieldEntry === '?') {
const p = params[0];
const value = Commandresult.getParametersAndCurrentValue(p.parent, this.context)[p.id()];
const s = this.renderFieldDetails(p, value);
return this.returnResult('', s);
} else {
this.findAndClearAnyDependentFields(fieldName, action.parameters());
return this.setField(params[0], fieldEntry);
}
default:
return this.returnResult('', `${Usermessages.multipleFieldMatches} ${fieldName}`); // TODO: list them
}
});
}
private clearField(field: Ro.IField): void {
this.context.cacheFieldValue(this.routeData().dialogId, field.id(), new Ro.Value(null));
if (field instanceof Ro.Parameter) {
this.context.cacheFieldValue(this.routeData().dialogId, field.id(), new Ro.Value(null));
} else if (field instanceof Ro.PropertyMember) {
const parent = field.parent as Ro.DomainObjectRepresentation;
this.context.cachePropertyValue(parent, field, new Ro.Value(null));
}
}
private setField(field: Ro.IField, fieldEntry: string) {
if (field instanceof Ro.PropertyMember && field.disabledReason()) {
return this.returnResult('', `${field.extensions().friendlyName()} ${Usermessages.isNotModifiable}`);
}
const entryType = field.entryType();
switch (entryType) {
case Ro.EntryType.FreeForm:
return this.handleFreeForm(field, fieldEntry);
case Ro.EntryType.AutoComplete:
return this.handleAutoComplete(field, fieldEntry);
case Ro.EntryType.Choices:
return this.handleChoices(field, fieldEntry);
case Ro.EntryType.MultipleChoices:
return this.handleChoices(field, fieldEntry);
case Ro.EntryType.ConditionalChoices:
return this.handleConditionalChoices(field, false, fieldEntry);
case Ro.EntryType.MultipleConditionalChoices:
return this.handleConditionalChoices(field, false, fieldEntry);
default:
return this.returnResult('', Usermessages.invalidCase);
}
}
private handleFreeForm(field: Ro.IField, fieldEntry: string) {
if (field.isScalar()) {
const mandatoryError = validateMandatory(field, fieldEntry);
if (mandatoryError) {
return this.returnResult('', this.validationMessage(mandatoryError, new Ro.Value(''), field.extensions().friendlyName()));
}
let value = new Ro.Value(fieldEntry);
if (Ro.isDateOrDateTime(field)) {
const dt = validateDate(fieldEntry, supportedDateFormats);
if (dt) {
value = new Ro.Value(Ro.toDateString(dt.toDate()));
}
}
// if optional but empty always valid
if (fieldEntry != null && fieldEntry !== '') {
const remoteMask = field.extensions().mask();
const localFilter = this.mask.toLocalFilter(remoteMask, field.extensions().format()!);
const validateError = validateMandatoryAgainstType(field, fieldEntry, localFilter);
if (validateError) {
return this.returnResult('', this.validationMessage(validateError, value, field.extensions().friendlyName()));
}
}
this.setFieldValue(field, value);
return this.returnResult('', '', () => this.urlManager.triggerPageReloadByFlippingReloadFlagInUrl());
} else {
return this.handleReferenceField(field, fieldEntry);
}
}
private setFieldValue(field: Ro.IField, value: Ro.Value): void {
const urlVal = this.valueForUrl(value, field);
if (urlVal != null) {
if (field instanceof Ro.Parameter) {
this.setFieldValueInContext(field, urlVal);
} else if (field instanceof Ro.PropertyMember) {
const parent = field.parent;
if (parent instanceof Ro.DomainObjectRepresentation) {
this.setPropertyValueinContext(parent, field, urlVal);
}
}
}
}
private handleReferenceField(field: Ro.IField, fieldEntry: string) {
if (this.isPaste(fieldEntry)) {
return this.handleClipboard(field);
} else {
return this.returnResult('', Usermessages.invalidRefEntry);
}
}
private isPaste(fieldEntry: string) {
return 'paste'.indexOf(fieldEntry) === 0;
}
private handleClipboard(field: Ro.IField) {
const ref = this.ciceroContext.ciceroClipboard;
if (!ref) {
return this.returnResult('', Usermessages.emptyClipboard);
}
const paramType = field.extensions().returnType()!;
const refType = ref.domainType();
return this.context.isSubTypeOf(refType, paramType).then(isSubType => {
if (isSubType) {
const obj = this.ciceroContext.ciceroClipboard as any;
const selfLink = obj.selfLink();
// Need to add a title to the SelfLink as not there by default
selfLink.setTitle(obj.title());
const value = new Ro.Value(selfLink);
this.setFieldValue(field, value);
return this.returnResult('', '', () => this.urlManager.triggerPageReloadByFlippingReloadFlagInUrl());
} else {
return this.returnResult('', Usermessages.incompatibleClipboard);
}
});
}
private handleAutoComplete(field: Ro.IField, fieldEntry: string) {
// TODO: Need to check that the minimum number of characters has been entered or fail validation
if (!field.isScalar() && this.isPaste(fieldEntry)) {
return this.handleClipboard(field);
} else {
return this.context.autoComplete(field, field.id(), () => ({}), fieldEntry).then((choices: Dictionary<Ro.Value>) => {
const matches = this.findMatchingChoicesForRef(choices, fieldEntry);
const allFields = Commandresult.getFields(field);
return this.switchOnMatches(field, allFields, fieldEntry, matches);
});
}
}
private handleChoices(field: Ro.IField, fieldEntry: string) {
let matches: Ro.Value[];
if (field.isScalar()) {
matches = this.findMatchingChoicesForScalar(field.choices(), fieldEntry);
} else {
matches = this.findMatchingChoicesForRef(field.choices(), fieldEntry);
}
const allFields = Commandresult.getFields(field);
return this.switchOnMatches(field, allFields, fieldEntry, matches);
}
private updateDependentField(field: Ro.IField): Promise<CommandResult> {
return this.handleConditionalChoices(field, true);
}
private setFieldAndCheckDependencies(field: Ro.IField, allFields: Ro.IField[], match: Ro.Value): Promise<CommandResult[]> {
this.setFieldValue(field, match);
const promises: Promise<CommandResult>[] = [];
// find any dependent multi choice fields and update
// non multi choice we will have just cleared
forEach(allFields, depField => {
if (this.isMultiChoiceField(depField)) {
if (this.isDependentField(field.id().toLowerCase(), depField)) {
promises.push(this.updateDependentField(depField));
}
}
});
promises.push(this.returnResult('', '', () => this.urlManager.triggerPageReloadByFlippingReloadFlagInUrl()));
return Promise.all(promises);
}
private switchOnMatches(field: Ro.IField, allFields: Ro.IField[], fieldEntry: string, matches: Ro.Value[]) {
switch (matches.length) {
case 0:
return this.returnResult('', Usermessages.noMatch(fieldEntry));
case 1:
// TODO fix "!""
return this.setFieldAndCheckDependencies(field, allFields, matches[0]).then((crs: CommandResult[]) => last(crs)!);
default:
let msg = Usermessages.multipleMatches;
forEach(matches, m => msg += m.toString() + '\n');
return this.returnResult('', msg);
}
}
private getPropertiesAndCurrentValue(obj: Ro.DomainObjectRepresentation): Dictionary<Ro.Value> {
const props = obj.propertyMembers();
const values = mapValues(props, p => p.value());
const modifiedProps = this.context.getObjectCachedValues(obj.id());
forEach(values, (v, k) => {
const newValue = modifiedProps[k];
if (newValue) {
values[k] = newValue;
}
});
return mapKeys(values, (v, k) => k.toLowerCase());
}
private updateOnMatches(field: Ro.IField, allFields: Ro.IField[], fieldEntry: string, matches: Ro.Value[]) {
switch (matches.length) {
case 0:
case 1:
const match = matches.length === 0 ? new Ro.Value(null) : matches[0];
// TODO fix "!""
return this.setFieldAndCheckDependencies(field, allFields, match).then((crs: CommandResult[]) => last(crs)!);
default:
// shouldn't happen - ignore
return this.returnResult('', '');
}
}
private handleConditionalChoices(field: Ro.IField, updating: boolean, fieldEntry?: string, ) {
let enteredFields: Dictionary<Ro.Value>;
const allFields = Commandresult.getFields(field);
if (field instanceof Ro.Parameter) {
enteredFields = Commandresult.getParametersAndCurrentValue(field.parent, this.context);
}
if (field instanceof Ro.PropertyMember) {
enteredFields = this.getPropertiesAndCurrentValue(field.parent as Ro.DomainObjectRepresentation);
}
// TODO fix this any cast
const args = fromPairs(map(field.promptLink()!.arguments()! as any, (v: any, key: string) => [key, new Ro.Value(v.value)])) as Dictionary<Ro.Value>;
forEach(keys(args), key => args[key] = enteredFields[key]);
let fieldEntryOrExistingValue: string;
if (fieldEntry === undefined) {
const def = args[field.id()];
fieldEntryOrExistingValue = def ? def.toValueString() : '';
} else {
fieldEntryOrExistingValue = fieldEntry;
}
return this.context.conditionalChoices(field, field.id(), () => ({}), args).then((choices: Dictionary<Ro.Value>) => {
const matches = this.findMatchingChoicesForRef(choices, fieldEntryOrExistingValue);
if (updating) {
return this.updateOnMatches(field, allFields, fieldEntryOrExistingValue, matches);
}
return this.switchOnMatches(field, allFields, fieldEntryOrExistingValue, matches);
});
}
private renderFieldDetails(field: Ro.IField, value: Ro.Value): string {
const fieldName = Usermessages.fieldName(field.extensions().friendlyName());
const desc = field.extensions().description();
const descAndPrefix = desc ? `\n${Usermessages.descriptionFieldPrefix} ${desc}` : '';
const types = `\n${Usermessages.typePrefix} ${Ro.friendlyTypeName(field.extensions().returnType()!)}`;
let postFix = '';
if (field instanceof Ro.PropertyMember && field.disabledReason()) {
postFix = `\n${Usermessages.unModifiablePrefix(field.disabledReason())}`;
} else {
postFix = field.extensions().optional() ? `\n${Usermessages.optional}` : `\n${Usermessages.mandatory}`;
const choices = field.choices();
if (choices) {
const label = `\n${Usermessages.choices}: `;
const labelAndChoices = reduce(choices, (ss, cho) => ss + cho + ' ', label);
postFix = `${postFix}${labelAndChoices}`;
}
}
return `${fieldName}${descAndPrefix}${types}${postFix}`;
}
} | the_stack |
import { cancelButton } from './widgets'
import { label } from './utils'
import { NamedNode } from 'rdflib'
import { store } from './logic'
/**
* @ignore
*/
class ContainerElement extends HTMLElement {
asSettings?: boolean
}
type TabWidgetOptions = {
backgroundColor?: string
dom?: HTMLDocument
items?: Array<NamedNode>
onClose?: (event: Event) => void
ordered?: boolean
orientation?: '0' | '1' | '2' | '3'
predicate?: NamedNode
renderMain?: (bodyMain: HTMLElement, subject: NamedNode) => void
renderTab?: (tabDiv: HTMLDivElement, subject: NamedNode) => void
renderTabSettings?: (bodyMain: ContainerElement, subject: NamedNode) => void
selectedTab?: string
startEmpty?: boolean
subject?: NamedNode
}
export class TabWidgetElement extends HTMLElement {
bodyContainer?: HTMLElement
refresh?: () => void
tabContainer?: HTMLElement
}
/**
* @ignore
*/
class TabElement extends HTMLElement {
bodyTR?: HTMLElement
subject?: NamedNode
}
/**
* Use this widget to generate tabs from triples set in the global store.
*
* [Here you can see examples of the tabs](https://solid.github.io/solid-ui/examples/tabs/).
*
* It assumes that items to use for tabs will be in a collection by default,
* e.g.:
*
* ```turtle
* :subject :predicate ( :item1 :item2 ) .
* ```
*
* You can override this by setting `ordered: false`, in which case it expects
* unordered triples:
*
* ```turtle
* :subject :predicate :item1, :item 2 .
* ```
*
* Triples that are not ordered in collection are in principle not sorted,
* which means that tabs could change order every time you render the widget.
* But in this case the widget will try to sort it in order to keep it
* consistent.
*
* In both of these cases you need to define options `subject` and `predicate`
* to tell the widget which triples it should be looking for.
*
* Finally you can set items manually, using the `items` option, e.g.:
*
* ```javascript
* {
* items: [
* namedNode('https://domain.tld/#item1'),
* namedNode('https://domain.tld/#item2')
* ]
* }
* ```
*
* When you set items manually you do not need to set `subject` and
* `predicate`.
*
* In any case you probably want to set the renderMain option to specify
* what should be rendered for the various items, e.g.:
*
* ```javascript
* {
* renderMain: (bodyMain, subject) => {
* bodyMain.innerHTML = renderItem(subject)
* }
* }
* ```
*
* **Note:** `renderItem` is a custom function that you need to define yourself.
*
* The option `renderTabSettings` allows you to render a custom view in the
* body container that is shown when you hold the ALT key and click on a
* tab. It works very much like the `renderMain` option:
*
* ```javascript
* {
* renderTabSettings: (bodyMain, subject) => {
* bodyMain.innerHTML = renderTabSettings(subject)
* }
* }
* ```
*
* **Note:** `renderTabSettings` is a custom function that you need to define
* yourself.
*
* By default the widget will try to guess the label by using the
* [[utils.label]] function. If you want to customize this yourself, you can
* use the `renderTab` option:
*
* ```javascript
* {
* renderTab: (tabDiv, subject) => {
* tabDiv.innerText = renderTabText(subject)
* }
* }
* ```
*
* **Note:** `renderTabText` is a custom function you need to define yourself.
*
* The option renderTab is also important if you want to set which tab should
* be selected once the widget is rendered. By default it will simply select
* the first tab, but you can override by setting `dataset.name` on the tab
* and referring to the same string in `selectedTab`:
*
* ```javascript
* {
* renderTab: (tabDiv, subject) => {
* tabDiv.dataset.name = subject.uri
* },
* selectedTab: item2.uri
* }
* ```
*
* You can apply a color to use for tabs and border of the container by using
* option `background-color`. This is #ddddcc by default.
*
* You can override the document object that the widget uses to generate DOM
* elements by setting the option `dom`. This is encouraged to set if you
* intend your functionality to be used in environments that don't provide
* a global `document` object.
*
* If you want to render a close button next to the tabs you can set option
* `onClose` which takes a callback function that is triggered when the
* button is clicked:
*
* ```javascript
* {
* onClose: (event) => {
* // do something that hides the widget altogether
* }
* }
* ```
*
* The option `orientation` allows you to set which side the tabs should be
* located: `'0'` = Top, `'1'` = Left, `'2'` = Bottom, `'3'` = Right
*
* If you don't want to render anything in the body container by default,
* you can set the option `startEmpty` to `true`.
*
* @param options
*/
export function tabWidget (options: TabWidgetOptions) {
const subject = options.subject
const dom = options.dom || document
const orientation = parseInt(options.orientation || '0')
const backgroundColor = options.backgroundColor || '#ddddcc'
const flipped = orientation & 2
const vertical = orientation & 1
const onClose = options.onClose
const [selectedColor, color] = getColors(backgroundColor)
const bodyMainStyle = `flex: 2; width: auto; height: 100%; border: 0.1em; border-style: solid; border-color: ${selectedColor}; padding: 1em;`
const rootElement: TabWidgetElement = dom.createElement('div') // 20200117a
rootElement.setAttribute('style', 'display: flex; height: 100%; width: 100%; flex-direction: ' +
(vertical ? 'row' : 'column') + (flipped ? '-reverse;' : ';'))
const navElement = rootElement.appendChild(dom.createElement('nav'))
navElement.setAttribute('style', 'margin: 0;')
const mainElement = rootElement.appendChild(dom.createElement('main'))
mainElement.setAttribute('style', 'margin: 0; width:100%; height: 100%;') // override tabbedtab.css
const tabContainer = navElement.appendChild(dom.createElement('ul'))
tabContainer.setAttribute('style', `
list-style-type: none;
display: flex;
height: 100%;
width: 100%;
margin: 0;
padding: 0;
flex-direction: ${(vertical ? 'column' : 'row')}
`)
const tabElement = 'li'
const bodyContainer = mainElement
rootElement.tabContainer = tabContainer
rootElement.bodyContainer = bodyContainer
const corners = ['0.2em', '0.2em', '0', '0'] // top left, TR, BR, BL
const cornersPrepped = corners.concat(corners).slice(orientation, orientation + 4)
const cornersStyle = `border-radius: ${cornersPrepped.join(' ')};`
const margins = ['0.3em', '0.3em', '0', '0.3em'] // top, right, bottom, left
const marginsPrepped = margins.concat(margins).slice(orientation, orientation + 4)
const marginsStyle = `margin: ${marginsPrepped.join(' ')};`
const paddingStyle = `padding: ${marginsPrepped.join(' ')};`
const tabStyle = cornersStyle + `padding: 0.7em; max-width: 20em; color: ${color};`
const unselectedStyle = `${tabStyle + marginsStyle}opacity: 50%; background-color: ${backgroundColor};`
const selectedStyle = `${tabStyle + marginsStyle}background-color: ${selectedColor};`
const shownStyle = 'height: 100%; width: 100%;'
const hiddenStyle = shownStyle + 'display: none;'
rootElement.refresh = orderedSync
orderedSync()
if (!options.startEmpty && tabContainer.children.length && options.selectedTab) {
const selectedTab = Array.from(tabContainer.children)
.map(tab => tab.firstChild as HTMLElement)
.find(tab => tab.dataset.name === options.selectedTab)
const tab = selectedTab || tabContainer.children[0].firstChild as HTMLButtonElement
tab.click()
} else if (!options.startEmpty) {
(tabContainer.children[0].firstChild as HTMLButtonElement).click() // Open first tab
}
return rootElement
function addCancelButton (tabContainer) {
if (tabContainer.dataset.onCloseSet) {
// @@ TODO: this is only here to make the browser tests work
// Discussion at https://github.com/solid/solid-ui/pull/110#issuecomment-527080663
const existingCancelButton = tabContainer.querySelector('.unstyled')
tabContainer.removeChild(existingCancelButton)
}
const extraTab = dom.createElement(tabElement)
extraTab.classList.add('unstyled')
const tabCancelButton = cancelButton(dom, onClose)
tabCancelButton.setAttribute('style', tabCancelButton.getAttribute('style') + paddingStyle)
extraTab.appendChild(tabCancelButton)
tabContainer.appendChild(extraTab)
tabContainer.dataset.onCloseSet = 'true'
}
function getItems (): Array<NamedNode> {
if (options.items) return options.items
if (options.ordered !== false) {
// options.ordered defaults to true
return (store.the(subject, options.predicate) as any).elements
} else {
return store.each(subject, options.predicate) as any
}
}
function makeNewSlot (item: NamedNode) {
const ele = dom.createElement(tabElement) as TabElement
ele.subject = item
const div = ele.appendChild(dom.createElement('div'))
div.setAttribute('style', unselectedStyle)
div.addEventListener('click', function (e) {
if (!e.metaKey) {
resetTabStyle()
resetBodyStyle()
}
div.setAttribute('style', selectedStyle)
if (!ele.bodyTR) return
ele.bodyTR.setAttribute('style', shownStyle)
const bodyMain = getOrCreateContainerElement(ele)
if (options.renderTabSettings && e.altKey && ele.subject && bodyMain.asSettings !== true) {
bodyMain.innerHTML = 'loading settings ...' + item
options.renderTabSettings(bodyMain, ele.subject)
bodyMain.asSettings = true
} else if (options.renderMain && ele.subject && bodyMain.asSettings !== false) {
bodyMain.innerHTML = 'loading item ...' + item
options.renderMain(bodyMain, ele.subject)
bodyMain.asSettings = false
}
})
if (options.renderTab) {
options.renderTab(div, item)
} else {
div.textContent = label(item)
}
return ele
function getOrCreateContainerElement (ele: TabElement): ContainerElement {
const bodyMain = ele.bodyTR?.children[0] as ContainerElement
if (bodyMain) return bodyMain
const newBodyMain = ele.bodyTR!.appendChild(dom.createElement('main'))
newBodyMain.setAttribute('style', bodyMainStyle)
return newBodyMain
}
}
// @@ Use common one from utils?
function orderedSync () {
const items = getItems()
let slot: TabElement, i, j, left, right
let differ = false
// Find how many match at each end
for (left = 0; left < tabContainer.children.length; left++) {
slot = tabContainer.children[left] as TabElement
if (
left >= items.length ||
(slot.subject && !slot.subject.sameTerm(items[left]))
) {
differ = true
break
}
}
if (!differ && items.length === tabContainer.children.length) {
return // The two just match in order: a case to optimize for
}
for (right = tabContainer.children.length - 1; right >= 0; right--) {
slot = tabContainer.children[right] as TabElement
j = right - tabContainer.children.length + items.length
if (slot.subject && !slot.subject.sameTerm(items[j])) {
break
}
}
// The elements left ... right in tabContainer.children do not match
const insertables = items.slice(left, right - tabContainer.children.length + items.length + 1)
while (right >= left) {
// remove extra
tabContainer.removeChild(tabContainer.children[left])
bodyContainer.removeChild(bodyContainer.children[left])
right -= 1
}
for (i = 0; i < insertables.length; i++) {
const newSlot = makeNewSlot(insertables[i])
const newBodyDiv = dom.createElement('div')
newSlot.bodyTR = newBodyDiv
if (left === tabContainer.children.length) {
// None left of original on right
tabContainer.appendChild(newSlot)
bodyContainer.appendChild(newBodyDiv)
} else {
tabContainer.insertBefore(newSlot, tabContainer.children[left + i])
bodyContainer.insertBefore(newBodyDiv, bodyContainer.children[left + i])
}
}
if (onClose) {
addCancelButton(tabContainer)
}
}
function resetTabStyle () {
for (let i = 0; i < tabContainer.children.length; i++) {
const tab = tabContainer.children[i]
if (tab.classList.contains('unstyled')) {
continue
}
if (tab.children[0]) {
tab.children[0].setAttribute('style', unselectedStyle)
}
}
}
function resetBodyStyle () {
for (let i = 0; i < bodyContainer.children.length; i++) {
bodyContainer.children[i].setAttribute('style', hiddenStyle)
}
}
}
/**
* @internal
*/
function getColors (backgroundColor: string): [string, string] {
return isLight(backgroundColor)
? [colorBlend(backgroundColor, '#ffffff', 0.3), '#000000']
: [colorBlend(backgroundColor, '#000000', 0.3), '#ffffff']
}
/**
* @internal
*/
function colorBlend (a: string, b: string, mix: number): string {
let ca, cb, res
let str = '#'
const hex = '0123456789abcdef'
for (let i = 0; i < 3; i++) {
ca = parseInt(a.slice(i * 2 + 1, i * 2 + 3), 16)
cb = parseInt(b.slice(i * 2 + 1, i * 2 + 3), 16)
res = ca * (1.0 - mix) + cb * mix // @@@ rounding
const res2 = parseInt(('' + res).split('.')[0]) // @@ ugh
const h = parseInt(('' + res2 / 16).split('.')[0]) // @@ ugh
const l = parseInt(('' + (res2 % 16)).split('.')[0]) // @@ ugh
str += hex[h] + hex[l]
}
return str
}
/**
* @internal
*/
function isLight (x: string): boolean {
let total = 0
for (let i = 0; i < 3; i++) {
total += parseInt(x.slice(i * 2 + 1, i * 2 + 3), 16)
}
return total > 128 * 3
} | the_stack |
import { OAuth2PopupFlow } from './';
interface ExampleTokenPayload {
exp: number;
foo: string;
bar: number;
}
function createTestStorage() {
const _storage: { [key: string]: string | undefined | null } = {};
return {
clear: () => {
for (const key of Object.keys(_storage)) {
delete _storage[key];
}
},
getItem: (key: string) => _storage[key] || null,
key: (index: number) => Object.keys(_storage)[index] || null,
length: Object.keys(_storage).length,
removeItem: (key: string) => {
delete _storage[key];
},
setItem: (key: string, value: string) => {
_storage[key] = value;
},
_storage,
};
}
describe('OAuth2PopupFlow', () => {
describe('jsonParseOrUndefined', () => {
it('returns parsed JSON when valid', () => {
const validJson = '{"a": "some value", "b": 5}';
const parsed = OAuth2PopupFlow.jsonParseOrUndefined<{
a: string;
b: number;
}>(validJson)!;
expect(parsed).toBeDefined();
expect(parsed.a).toBe('some value');
expect(parsed.b).toBe(5);
});
it('returns undefined when the JSON is invalid', () => {
const invalidJson = 'this aint json';
const parsed = OAuth2PopupFlow.jsonParseOrUndefined(invalidJson);
expect(parsed).toBeUndefined();
});
});
describe('time', () => {
it('calls `setTimeout` and returns `TIMER`', async () => {
function fiveMilliseconds() {
return new Promise<5>((resolve) => setTimeout(() => resolve(5), 5));
}
const race = await Promise.race([
OAuth2PopupFlow.time(10),
fiveMilliseconds(),
]);
expect(race).toBe(5);
const otherRace = await Promise.race([
OAuth2PopupFlow.time(0),
fiveMilliseconds(),
]);
expect(otherRace).toBe('TIMER');
});
});
describe('decodeUri', () => {
it('calls `decodeURIComponent` and returns its result', () => {
const result = OAuth2PopupFlow.decodeUri('hello%20world');
expect(result).toBe('hello world');
});
it('catches `decodeURIComponent` and returns the original string', () => {
const notParsable = '%';
const result = OAuth2PopupFlow.decodeUri(notParsable);
expect(result).toBe(notParsable);
});
});
describe('encodeObjectToUri', () => {
it('encodes plain ole javascript objects of strings to a URI component', () => {
const javascriptObject = { foo: 'some value', bar: 'some other value' };
const encoded = OAuth2PopupFlow.encodeObjectToUri(javascriptObject);
expect(encoded).toBe('foo=some%20value&bar=some%20other%20value');
});
});
describe('decodeUriToObject', () => {
it('decodes a URI into an object of strings', () => {
const uri = 'foo=some%20value&bar=some%20other%20value';
const decoded = OAuth2PopupFlow.decodeUriToObject(uri);
expect(decoded).toEqual({ foo: 'some value', bar: 'some other value' });
});
});
describe('constructor', () => {
it('creates instances from the `OAuth2PopupFlowOptions` object', () => {
function beforePopup() {}
function afterResponse() {}
function tokenValidator() {
return true;
}
const additionalAuthorizationParameters = { foo: 'bar' };
const storage = createTestStorage();
const options = {
accessTokenResponseKey: 'test_response_key',
accessTokenStorageKey: 'test_storage_key',
additionalAuthorizationParameters,
authorizationUri: 'http://example.com/oauth/authorize',
beforePopup,
clientId: 'test_client_id',
pollingTime: Math.random(),
redirectUri: 'http://localhost:8080/redirect',
responseType: 'test_token',
scope: 'test scope',
storage,
tokenValidator,
afterResponse,
};
const auth = new OAuth2PopupFlow(options);
expect(auth.accessTokenResponseKey).toBe(options.accessTokenResponseKey);
expect(auth.accessTokenStorageKey).toBe(options.accessTokenStorageKey);
expect(auth.additionalAuthorizationParameters).toBe(
additionalAuthorizationParameters,
);
expect(auth.authorizationUri).toBe(options.authorizationUri);
expect(auth.beforePopup).toBe(beforePopup);
expect(auth.clientId).toBe(options.clientId);
expect(auth.pollingTime).toBe(options.pollingTime);
expect(auth.redirectUri).toBe(options.redirectUri);
expect(auth.responseType).toBe(options.responseType);
expect(auth.scope).toBe(options.scope);
expect(auth.storage).toBe(storage);
expect(auth.tokenValidator).toBe(tokenValidator);
expect(auth.afterResponse).toBe(afterResponse);
});
it('uses the default `responseType` of `token` when none is present', () => {
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'test_client_id',
redirectUri: 'http://localhost:8080/redirect',
scope: 'test scope',
};
const auth = new OAuth2PopupFlow(options);
expect(auth.responseType).toBe('token');
});
it('uses the default `accessTokenStorageKey` of `token` when none is present', () => {
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'test_client_id',
redirectUri: 'http://localhost:8080/redirect',
scope: 'test scope',
};
const auth = new OAuth2PopupFlow(options);
expect(auth.accessTokenStorageKey).toBe('token');
});
it('uses the default `accessTokenResponseKey` of `access_token` when none is present', () => {
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'test_client_id',
redirectUri: 'http://localhost:8080/redirect',
scope: 'test scope',
};
const auth = new OAuth2PopupFlow(options);
expect(auth.accessTokenResponseKey).toBe('access_token');
});
it('uses the default `storage` of `window.localStorage` when none is present', () => {
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'test_client_id',
redirectUri: 'http://localhost:8080/redirect',
scope: 'test scope',
};
const auth = new OAuth2PopupFlow(options);
expect(auth.storage).toBe(window.localStorage);
});
it('uses the default `pollingTime` of `200` when none is present', () => {
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'test_client_id',
redirectUri: 'http://localhost:8080/redirect',
responseType: 'test_token',
scope: 'test scope',
};
const auth = new OAuth2PopupFlow(options);
expect(auth.pollingTime).toBe(200);
});
});
describe('_rawToken', () => {
it('gets the raw token from storage', () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
storage._storage.token = 'test_token';
expect(auth['_rawToken']).toBe('test_token');
});
it('returns `undefined` if the value in storage is falsy', () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
storage._storage.token = '';
expect(auth['_rawToken']).toBeUndefined();
storage._storage.token = null;
expect(auth['_rawToken']).toBeUndefined();
});
it("doesn't allow `null` or `undefined` to be assigned to storage but allows strings", () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
storage._storage.token = 'initial value';
auth['_rawToken'] = undefined;
expect(storage._storage.token).toBe('initial value');
(auth as any)['_rawToken'] = null;
expect(storage._storage.token).toBe('initial value');
auth['_rawToken'] = '';
expect(storage._storage.token).toBe('');
auth['_rawToken'] = 'something';
expect(storage._storage.token).toBe('something');
});
});
describe('_rawTokenPayload', () => {
it('returns `undefined` if the `_rawToken` is falsy', () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
storage._storage.token = undefined;
expect(auth['_rawTokenPayload']).toBeUndefined();
storage._storage.token = null;
expect(auth['_rawTokenPayload']).toBeUndefined();
storage._storage.token = '';
expect(auth['_rawTokenPayload']).toBeUndefined();
});
it("returns `undefined` if it couldn't find the encoded payload in the token", () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
storage._storage.token = 'non-proper JWT';
expect(auth['_rawTokenPayload']).toBeUndefined();
});
it("returns `undefined` if it couldn't parse the JSON in the encoded payload", () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
storage._storage.token = [
'non proper JWT',
'this is the payload section',
'this is the signature section',
].join('.');
expect(auth['_rawTokenPayload']).toBeUndefined();
});
it('returns a proper decoded payload', () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000),
};
storage._storage.token = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
expect(auth['_rawTokenPayload']).toEqual(examplePayload);
});
});
describe('loggedIn', () => {
it('returns `false` if the `_rawTokenPayload` is undefined', () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
storage._storage.token = undefined;
expect(auth.loggedIn()).toBe(false);
});
it('passes through the `tokenValidator` with `true`', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
let tokenValidatorCalled = false;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
tokenValidator: ({ token, payload }) => {
expect(token).toBe(exampleToken);
expect(payload).toEqual(examplePayload);
tokenValidatorCalled = true;
return true;
},
});
expect(auth.loggedIn()).toBe(true);
expect(tokenValidatorCalled).toBe(true);
});
it('returns `false` if there is a `tokenValidator` and that returns false', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000),
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
let tokenValidatorCalled = false;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
tokenValidator: ({ token, payload }) => {
expect(token).toBe(exampleToken);
expect(payload).toEqual(examplePayload);
tokenValidatorCalled = true;
return false;
},
});
expect(auth.loggedIn()).toBe(false);
expect(tokenValidatorCalled).toBe(true);
});
it('returns `false` if the `exp` in the payload is falsy', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: 0,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
expect(auth.loggedIn()).toBe(false);
});
it('returns `false` if the token is expired', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) - 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
expect(auth.loggedIn()).toBe(false);
});
it('returns `true` if the token is good', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
expect(auth.loggedIn()).toBe(true);
});
});
describe('tokenExpired', () => {
it('returns `false` if the `_rawTokenPayload` is undefined', () => {
const storage = createTestStorage();
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
storage._storage.token = undefined;
expect(auth.tokenExpired()).toBe(false);
});
it('returns `false` if the `exp` in the payload is falsy', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: 0,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
expect(auth.tokenExpired()).toBe(false);
});
it('returns `false` if the token is not expired', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
expect(auth.tokenExpired()).toBe(false);
});
it('returns `true` if the token is expired', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) - 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
expect(auth.tokenExpired()).toBe(true);
});
});
describe('logout', () => {
it('should remove the token from storage', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
expect(auth.loggedIn()).toBe(true);
auth.logout();
expect(auth.loggedIn()).toBe(false);
});
it('should dispatch a logout event', () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>({
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
});
const handler = jest.fn();
auth.addEventListener('logout', handler);
expect(auth.loggedIn()).toBe(true);
auth.logout();
expect(auth.loggedIn()).toBe(false);
expect(handler).toBeCalledTimes(1);
});
});
describe('handleRedirect', () => {
it("returns early with `REDIRECT_URI_MISMATCH` if location doesn't match the redirect", () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: 'http://localhost:8080/redirect',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
window.location.hash = 'something%20else';
const result = auth.handleRedirect();
expect(result).toBe('REDIRECT_URI_MISMATCH');
});
it('returns early with `FALSY_HASH` if the hash is falsy', () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
window.location.hash = '';
const result = auth.handleRedirect();
expect(result).toBe('FALSY_HASH');
});
// // this test won't pass because the js-dom environment will always add the `#` to the string
// it('returns early with `NO_HASH_MATCH` if hash doesn\'t match /#(.*)/', () => {
// const storage = createTestStorage();
// const options = {
// authorizationUri: 'http://example.com/oauth/authorize',
// clientId: 'some_test_client',
// redirectUri: '',
// scope: 'openid profile',
// storage,
// };
// const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
// window.location.hash = 'shouldn\t match';
// const result = auth.handleRedirect();
// expect(result).toBe('NO_HASH_MATCH');
// });
it('calls `afterResponse` with the `decodeUriToObject`', () => {
const storage = createTestStorage();
let afterResponseCalled = false;
const objectToEncode = {
access_token: 'fake access token',
one: 'something',
two: 'something else',
};
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
afterResponse: (obj: { [key: string]: string | undefined }) => {
expect(obj).toEqual(objectToEncode);
afterResponseCalled = true;
},
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
window.location.hash = `#${OAuth2PopupFlow.encodeObjectToUri(
objectToEncode,
)}`;
const result = auth.handleRedirect();
expect(result).toBe('SUCCESS');
expect(afterResponseCalled).toBe(true);
});
it('returns early with `false` if `rawToken` is falsy', () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
window.location.hash = `#${OAuth2PopupFlow.encodeObjectToUri({
access_token: '',
})}`;
const result = auth.handleRedirect();
expect(result).toBe('FALSY_TOKEN');
});
it('returns `SUCCESS` setting the `_rawToken` and clearing the hash if token is valid', () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
window.location.hash = `#${OAuth2PopupFlow.encodeObjectToUri({
access_token: 'some token thing',
})}`;
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
const result = auth.handleRedirect();
expect(result).toBe('SUCCESS');
expect(storage.getItem('token')).toBe('some token thing');
expect(window.location.hash).toBe('');
});
});
describe('tryLoginPopup', () => {
it('returns `ALREADY_LOGGED_IN` if already `loggedIn()`', async () => {
const storage = createTestStorage();
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
expect(auth.loggedIn()).toBe(true);
expect(await auth.tryLoginPopup()).toBe('ALREADY_LOGGED_IN');
});
it("doesn't call `beforePopup` if it doesn't exist", async () => {
const storage = createTestStorage();
(window as any).open = () => undefined;
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
expect(auth.loggedIn()).toBe(false);
expect(await auth.tryLoginPopup()).toBe('POPUP_FAILED');
});
it('calls `beforePopup` synchronously', async () => {
const storage = createTestStorage();
(window as any).open = () => undefined;
let beforePopupCalled = false;
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
beforePopup: () => {
beforePopupCalled = true;
},
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
expect(auth.loggedIn()).toBe(false);
expect(await auth.tryLoginPopup()).toBe('POPUP_FAILED');
expect(beforePopupCalled).toBe(true);
});
it('calls `beforePopup` asynchronously', async () => {
const storage = createTestStorage();
(window as any).open = () => undefined;
let beforePopupCalled = false;
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
beforePopup: async () => {
expect(await OAuth2PopupFlow.time(0)).toBe('TIMER');
beforePopupCalled = true;
},
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
expect(auth.loggedIn()).toBe(false);
expect(await auth.tryLoginPopup()).toBe('POPUP_FAILED');
expect(beforePopupCalled).toBe(true);
});
it('calls `additionalAuthorizationParameters` if it is a function', async () => {
const storage = createTestStorage();
let openCalled = false;
(window as any).open = (url: string) => {
expect(url.includes('foo=bar')).toBe(true);
openCalled = true;
};
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
additionalAuthorizationParameters: () => {
return {
foo: 'bar',
};
},
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
expect(await auth.tryLoginPopup()).toBe('POPUP_FAILED');
expect(openCalled).toBe(true);
});
it('uses `additionalAuthorizationParameters` if it is an object', async () => {
const storage = createTestStorage();
let openCalled = false;
(window as any).open = (url: string) => {
expect(url.includes('foo=bar')).toBe(true);
openCalled = true;
};
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
additionalAuthorizationParameters: { foo: 'bar' },
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
expect(await auth.tryLoginPopup()).toBe('POPUP_FAILED');
expect(openCalled).toBe(true);
});
it('returns `SUCCESS` and calls `close` on the popup and fires and event', async () => {
const storage = createTestStorage();
let closedCalled = false;
let resolve!: () => void;
const eventCalled = new Promise((thisResolve) => (resolve = thisResolve));
(window as any).open = () => ({
close: () => {
closedCalled = true;
},
});
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
auth.addEventListener('login', resolve);
OAuth2PopupFlow.time(0).then(() => {
storage._storage.token = exampleToken;
});
expect(auth.loggedIn()).toBe(false);
expect(await auth.tryLoginPopup()).toBe('SUCCESS');
expect(closedCalled).toBe(true);
await eventCalled;
});
});
describe('authenticated', () => {
it('only resolves after a `loggedIn()` is truthy', async () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
OAuth2PopupFlow.time(10).then(() => {
storage._storage.token = exampleToken;
});
expect(auth.loggedIn()).toBe(false);
// this won't resolve and the test will fail unless `loggedIn` is truthy
await auth.authenticated();
});
});
describe('token', () => {
it('returns the `_rawToken` if `loggedIn()`', async () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const token = await auth.token();
expect(token).toEqual(exampleToken);
});
it('throws if `_rawToken` was falsy after being authenticated', async () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
expect(auth.loggedIn()).toBe(false);
spyOn(auth, 'authenticated');
let catchCalled = false;
try {
await auth.token();
} catch (e) {
expect(e.message).toBe('Token was falsy after being authenticated.');
catchCalled = true;
} finally {
expect(catchCalled).toBe(true);
}
});
});
describe('tokenPayload', () => {
it('returns the `_rawToken` if `loggedIn()`', async () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
const examplePayload = {
foo: 'something',
bar: 5,
exp: Math.floor(new Date().getTime() / 1000) + 1000,
};
const exampleToken = [
'blah blah header',
window.btoa(JSON.stringify(examplePayload)),
'this is the signature section',
].join('.');
storage._storage.token = exampleToken;
const payload = await auth.tokenPayload();
expect(payload).toEqual(examplePayload);
});
it('throws if `_rawToken` was falsy after being authenticated', async () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
expect(auth.loggedIn()).toBe(false);
spyOn(auth, 'authenticated');
let catchCalled = false;
try {
await auth.tokenPayload();
} catch (e) {
expect(e.message).toBe(
'Token payload was falsy after being authenticated.',
);
catchCalled = true;
} finally {
expect(catchCalled).toBe(true);
}
});
});
describe('EventTarget', () => {
it('allows events to be listened to and dispatched', () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const handler = jest.fn();
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
auth.addEventListener('login', handler);
auth.dispatchEvent(new Event('login'));
auth.dispatchEvent(new Event('login'));
auth.dispatchEvent(new Event('login'));
expect(handler).toBeCalledTimes(3);
});
it('allows event listeners to be removed', () => {
const storage = createTestStorage();
const handler = jest.fn();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
auth.addEventListener('login', handler);
auth.dispatchEvent(new Event('login'));
auth.dispatchEvent(new Event('login'));
auth.dispatchEvent(new Event('login'));
auth.removeEventListener('login', handler);
auth.dispatchEvent(new Event('login'));
expect(handler).toBeCalledTimes(3);
});
it("doesn't throw when the type of event doesn't exist", () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
auth.removeEventListener('login', () => {});
});
it('allows for an event handler object', () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
const handler = jest.fn();
const eventListenerObject = {
handleEvent: handler,
};
auth.addEventListener('login', eventListenerObject);
auth.dispatchEvent(new Event('login'));
auth.dispatchEvent(new Event('login'));
auth.dispatchEvent(new Event('login'));
expect(handler).toBeCalledTimes(3);
});
it('defaults to a no-op when there is nothing callable', () => {
const storage = createTestStorage();
const options = {
authorizationUri: 'http://example.com/oauth/authorize',
clientId: 'some_test_client',
redirectUri: '',
scope: 'openid profile',
storage,
};
const auth = new OAuth2PopupFlow<ExampleTokenPayload>(options);
const handler = jest.fn();
const notRealObj = {};
auth.addEventListener('login', notRealObj as any);
auth.dispatchEvent(new Event('login'));
auth.dispatchEvent(new Event('login'));
auth.dispatchEvent(new Event('login'));
expect(handler).toBeCalledTimes(0);
});
});
}); | the_stack |
import { BaseResource, CloudError, AzureServiceClientOptions } from "@azure/ms-rest-azure-js";
import * as msRest from "@azure/ms-rest-js";
export { BaseResource, CloudError };
/**
* Display name of operation
*/
export interface OperationDisplay {
/**
* The resource provider name: Microsoft.MachineLearningExperimentation
*/
provider?: string;
/**
* The resource on which the operation is performed.
*/
resource?: string;
/**
* The operation that users can perform.
*/
operation?: string;
/**
* The description for the operation.
*/
description?: string;
}
/**
* Azure Machine Learning workspace REST API operation
*/
export interface Operation {
/**
* Operation name: {provider}/{resource}/{operation}
*/
name?: string;
/**
* Display name of operation
*/
display?: OperationDisplay;
}
/**
* An interface representing NotebookListCredentialsResult.
*/
export interface NotebookListCredentialsResult {
primaryAccessKey?: string;
secondaryAccessKey?: string;
}
/**
* An interface representing NotebookPreparationError.
*/
export interface NotebookPreparationError {
errorMessage?: string;
statusCode?: number;
}
/**
* An interface representing NotebookResourceInfo.
*/
export interface NotebookResourceInfo {
fqdn?: string;
/**
* the data plane resourceId that used to initialize notebook component
*/
resourceId?: string;
/**
* The error that occurs when preparing notebook.
*/
notebookPreparationError?: NotebookPreparationError;
}
/**
* An interface representing KeyVaultProperties.
*/
export interface KeyVaultProperties {
/**
* The ArmId of the keyVault where the customer owned encryption key is present.
*/
keyVaultArmId: string;
/**
* Key vault uri to access the encryption key.
*/
keyIdentifier: string;
/**
* For future use - The client id of the identity which will be used to access key vault.
*/
identityClientId?: string;
}
/**
* An interface representing EncryptionProperty.
*/
export interface EncryptionProperty {
/**
* Indicates whether or not the encryption is enabled for the workspace. Possible values include:
* 'Enabled', 'Disabled'
*/
status: EncryptionStatus;
/**
* Customer Key vault properties.
*/
keyVaultProperties: KeyVaultProperties;
}
/**
* The Private Endpoint resource.
*/
export interface PrivateEndpoint {
/**
* The ARM identifier for Private Endpoint
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
}
/**
* A collection of information about the state of the connection between service consumer and
* provider.
*/
export interface PrivateLinkServiceConnectionState {
/**
* Indicates whether the connection has been Approved/Rejected/Removed by the owner of the
* service. Possible values include: 'Pending', 'Approved', 'Rejected', 'Disconnected', 'Timeout'
*/
status?: PrivateEndpointServiceConnectionStatus;
/**
* The reason for approval/rejection of the connection.
*/
description?: string;
/**
* A message indicating if changes on the service provider require any updates on the consumer.
*/
actionsRequired?: string;
}
/**
* The Private Endpoint Connection resource.
*/
export interface PrivateEndpointConnection extends BaseResource {
/**
* ResourceId of the private endpoint connection.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* Friendly name of the private endpoint connection.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* Resource type of private endpoint connection.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* The resource of private end point.
*/
privateEndpoint?: PrivateEndpoint;
/**
* A collection of information about the state of the connection between service consumer and
* provider.
*/
privateLinkServiceConnectionState: PrivateLinkServiceConnectionState;
/**
* The provisioning state of the private endpoint connection resource. Possible values include:
* 'Succeeded', 'Creating', 'Deleting', 'Failed'
*/
provisioningState?: PrivateEndpointConnectionProvisioningState;
}
/**
* An interface representing SharedPrivateLinkResource.
*/
export interface SharedPrivateLinkResource {
/**
* Unique name of the private link.
*/
name?: string;
/**
* The resource id that private link links to.
*/
privateLinkResourceId?: string;
/**
* The private link resource group id.
*/
groupId?: string;
/**
* Request message.
*/
requestMessage?: string;
/**
* Indicates whether the connection has been Approved/Rejected/Removed by the owner of the
* service. Possible values include: 'Pending', 'Approved', 'Rejected', 'Disconnected', 'Timeout'
*/
status?: PrivateEndpointServiceConnectionStatus;
}
/**
* Azure Resource Manager resource envelope.
*/
export interface Resource extends BaseResource {
/**
* Specifies the resource ID.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* Specifies the name of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The identity of the resource.
*/
identity?: Identity;
/**
* Specifies the location of the resource.
*/
location?: string;
/**
* Specifies the type of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* Contains resource tags defined as key/value pairs.
*/
tags?: { [propertyName: string]: string };
/**
* The sku of the workspace.
*/
sku?: Sku;
}
/**
* An object that represents a machine learning workspace.
*/
export interface Workspace extends Resource {
/**
* The immutable id associated with this workspace.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly workspaceId?: string;
/**
* The description of this workspace.
*/
description?: string;
/**
* The friendly name for this workspace. This name in mutable
*/
friendlyName?: string;
/**
* The creation time of the machine learning workspace in ISO8601 format.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly creationTime?: Date;
/**
* ARM id of the key vault associated with this workspace. This cannot be changed once the
* workspace has been created
*/
keyVault?: string;
/**
* ARM id of the application insights associated with this workspace. This cannot be changed once
* the workspace has been created
*/
applicationInsights?: string;
/**
* ARM id of the container registry associated with this workspace. This cannot be changed once
* the workspace has been created
*/
containerRegistry?: string;
/**
* ARM id of the storage account associated with this workspace. This cannot be changed once the
* workspace has been created
*/
storageAccount?: string;
/**
* Url for the discovery service to identify regional endpoints for machine learning
* experimentation services
*/
discoveryUrl?: string;
/**
* The current deployment state of workspace resource. The provisioningState is to indicate
* states for resource provisioning. Possible values include: 'Unknown', 'Updating', 'Creating',
* 'Deleting', 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The encryption settings of Azure ML workspace.
*/
encryption?: EncryptionProperty;
/**
* The flag to signal HBI data in the workspace and reduce diagnostic data collected by the
* service. Default value: false.
*/
hbiWorkspace?: boolean;
/**
* The name of the managed resource group created by workspace RP in customer subscription if the
* workspace is CMK workspace
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly serviceProvisionedResourceGroup?: string;
/**
* Count of private connections in the workspace
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly privateLinkCount?: number;
/**
* The compute name for image build
*/
imageBuildCompute?: string;
/**
* The flag to indicate whether to allow public access when behind VNet. Default value: false.
*/
allowPublicAccessWhenBehindVnet?: boolean;
/**
* The list of private endpoint connections in the workspace.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly privateEndpointConnections?: PrivateEndpointConnection[];
/**
* The list of shared private link resources in this workspace.
*/
sharedPrivateLinkResources?: SharedPrivateLinkResource[];
/**
* The notebook info of Azure ML workspace.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly notebookInfo?: NotebookResourceInfo;
}
/**
* Sku of the resource
*/
export interface Sku {
/**
* Name of the sku
*/
name?: string;
/**
* Tier of the sku like Basic or Enterprise
*/
tier?: string;
}
/**
* The parameters for updating a machine learning workspace.
*/
export interface WorkspaceUpdateParameters {
/**
* The resource tags for the machine learning workspace.
*/
tags?: { [propertyName: string]: string };
/**
* The sku of the workspace.
*/
sku?: Sku;
/**
* The description of this workspace.
*/
description?: string;
/**
* The friendly name for this workspace.
*/
friendlyName?: string;
}
/**
* Features enabled for a workspace
*/
export interface AmlUserFeature {
/**
* Specifies the feature ID
*/
id?: string;
/**
* Specifies the feature name
*/
displayName?: string;
/**
* Describes the feature for user experience
*/
description?: string;
}
/**
* The Usage Names.
*/
export interface UsageName {
/**
* The name of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly value?: string;
/**
* The localized name of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly localizedValue?: string;
}
/**
* Describes AML Resource Usage.
*/
export interface Usage {
/**
* Specifies the resource ID.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* Specifies the resource type.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* An enum describing the unit of usage measurement. Possible values include: 'Count'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly unit?: UsageUnit;
/**
* The current usage of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly currentValue?: number;
/**
* The maximum permitted usage of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly limit?: number;
/**
* The name of the type of usage.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: UsageName;
}
/**
* The estimated price info for using a VM of a particular OS type, tier, etc.
*/
export interface EstimatedVMPrice {
/**
* Retail price. The price charged for using the VM.
*/
retailPrice: number;
/**
* OS type. Operating system type used by the VM. Possible values include: 'Linux', 'Windows'
*/
osType: VMPriceOSType;
/**
* VM tier. The type of the VM. Possible values include: 'Standard', 'LowPriority', 'Spot'
*/
vmTier: VMTier;
}
/**
* The estimated price info for using a VM.
*/
export interface EstimatedVMPrices {
/**
* List of estimated VM prices. The list of estimated prices for using a VM of a particular OS
* type, tier, etc.
*/
values: EstimatedVMPrice[];
}
/**
* Describes the properties of a VM size.
*/
export interface VirtualMachineSize {
/**
* Virtual Machine size name. The name of the virtual machine size.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* Virtual Machine family name. The family name of the virtual machine size.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly family?: string;
/**
* Number of vPUs. The number of vCPUs supported by the virtual machine size.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly vCPUs?: number;
/**
* Number of gPUs. The number of gPUs supported by the virtual machine size.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly gpus?: number;
/**
* OS VHD Disk size. The OS VHD disk size, in MB, allowed by the virtual machine size.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly osVhdSizeMB?: number;
/**
* Resource volume size. The resource volume size, in MB, allowed by the virtual machine size.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly maxResourceVolumeMB?: number;
/**
* Memory size. The amount of memory, in GB, supported by the virtual machine size.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly memoryGB?: number;
/**
* Low priority capable. Specifies if the virtual machine size supports low priority VMs.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lowPriorityCapable?: boolean;
/**
* Premium IO supported. Specifies if the virtual machine size supports premium IO.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly premiumIO?: boolean;
/**
* Estimated VM prices. The estimated price information for using a VM.
*/
estimatedVMPrices?: EstimatedVMPrices;
/**
* Supported Compute Types. Specifies the compute types supported by the virtual machine size.
*/
supportedComputeTypes?: string[];
}
/**
* The List Virtual Machine size operation response.
*/
export interface VirtualMachineSizeListResult {
/**
* The list of virtual machine sizes supported by AmlCompute.
*/
amlCompute?: VirtualMachineSize[];
}
/**
* The properties for Quota update or retrieval.
*/
export interface QuotaBaseProperties {
/**
* Specifies the resource ID.
*/
id?: string;
/**
* Specifies the resource type.
*/
type?: string;
/**
* Limit. The maximum permitted quota of the resource.
*/
limit?: number;
/**
* An enum describing the unit of quota measurement. Possible values include: 'Count'
*/
unit?: QuotaUnit;
}
/**
* Quota update parameters.
*/
export interface QuotaUpdateParameters {
/**
* The list for update quota.
*/
value?: QuotaBaseProperties[];
}
/**
* The properties for update Quota response.
*/
export interface UpdateWorkspaceQuotas {
/**
* Specifies the resource ID.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* Specifies the resource type.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* Limit. The maximum permitted quota of the resource.
*/
limit?: number;
/**
* An enum describing the unit of quota measurement. Possible values include: 'Count'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly unit?: QuotaUnit;
/**
* Update Workspace Quota Status. Status of update workspace quota. Possible values include:
* 'Undefined', 'Success', 'Failure', 'InvalidQuotaBelowClusterMinimum',
* 'InvalidQuotaExceedsSubscriptionLimit', 'InvalidVMFamilyName', 'OperationNotSupportedForSku',
* 'OperationNotEnabledForRegion'
*/
status?: Status;
}
/**
* The result of update workspace quota.
*/
export interface UpdateWorkspaceQuotasResult {
/**
* The list of workspace quota update result.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly value?: UpdateWorkspaceQuotas[];
/**
* The URI to fetch the next page of workspace quota update result. Call ListNext() with this to
* fetch the next page of Workspace Quota update result.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* The Resource Name.
*/
export interface ResourceName {
/**
* The name of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly value?: string;
/**
* The localized name of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly localizedValue?: string;
}
/**
* The quota assigned to a resource.
*/
export interface ResourceQuota {
/**
* Specifies the resource ID.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* Specifies the resource type.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* Name of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: ResourceName;
/**
* Limit. The maximum permitted quota of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly limit?: number;
/**
* An enum describing the unit of quota measurement. Possible values include: 'Count'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly unit?: QuotaUnit;
}
/**
* An interface representing IdentityUserAssignedIdentitiesValue.
*/
export interface IdentityUserAssignedIdentitiesValue {
/**
* The principal id of user assigned identity.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly principalId?: string;
/**
* The client id of user assigned identity.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly clientId?: string;
}
/**
* Identity for the resource.
*/
export interface Identity {
/**
* The principal ID of resource identity.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly principalId?: string;
/**
* The tenant ID of resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly tenantId?: string;
/**
* The identity type. Possible values include: 'SystemAssigned', 'UserAssigned',
* 'SystemAssigned,UserAssigned', 'None'
*/
type: ResourceIdentityType;
/**
* The list of user identities associated with resource. The user identity dictionary key
* references will be ARM resource ids in the form:
* '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
*/
userAssignedIdentities?: { [propertyName: string]: IdentityUserAssignedIdentitiesValue };
}
/**
* Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
*/
export interface ResourceId extends BaseResource {
/**
* The ID of the resource
*/
id: string;
}
/**
* An interface representing Password.
*/
export interface Password {
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly value?: string;
}
/**
* An interface representing RegistryListCredentialsResult.
*/
export interface RegistryListCredentialsResult {
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly location?: string;
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly username?: string;
passwords?: Password[];
}
/**
* An interface representing ListWorkspaceKeysResult.
*/
export interface ListWorkspaceKeysResult {
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly userStorageKey?: string;
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly userStorageResourceId?: string;
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly appInsightsInstrumentationKey?: string;
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly containerRegistryCredentials?: RegistryListCredentialsResult;
notebookAccessKeys?: NotebookListCredentialsResult;
}
/**
* Error detail information.
*/
export interface ErrorDetail {
/**
* Error code.
*/
code: string;
/**
* Error message.
*/
message: string;
}
/**
* Error response information.
*/
export interface ErrorResponse {
/**
* Error code.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly code?: string;
/**
* Error message.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly message?: string;
/**
* An array of error detail objects.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly details?: ErrorDetail[];
}
/**
* Wrapper for error response to follow ARM guidelines.
*/
export interface MachineLearningServiceError {
/**
* The error response.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly error?: ErrorResponse;
}
/**
* Contains the possible cases for Compute.
*/
export type ComputeUnion = Compute | AKS | AmlCompute | ComputeInstance | VirtualMachine | HDInsight | DataFactory | Databricks | DataLakeAnalytics;
/**
* Machine Learning compute object.
*/
export interface Compute {
/**
* Polymorphic Discriminator
*/
computeType: "Compute";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
}
/**
* Machine Learning compute object wrapped into ARM resource envelope.
*/
export interface ComputeResource extends Resource {
/**
* Compute properties
*/
properties?: ComputeUnion;
}
/**
* A system service running on a compute.
*/
export interface SystemService {
/**
* The type of this system service.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly systemServiceType?: string;
/**
* Public IP address
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly publicIpAddress?: string;
/**
* The version for this type.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly version?: string;
}
/**
* The ssl configuration for scoring
*/
export interface SslConfiguration {
/**
* Enable or disable ssl for scoring. Possible values include: 'Disabled', 'Enabled'
*/
status?: Status1;
/**
* Cert data
*/
cert?: string;
/**
* Key data
*/
key?: string;
/**
* CNAME of the cert
*/
cname?: string;
}
/**
* Advance configuration for AKS networking
*/
export interface AksNetworkingConfiguration {
/**
* Virtual network subnet resource ID the compute nodes belong to
*/
subnetId?: string;
/**
* A CIDR notation IP range from which to assign service cluster IPs. It must not overlap with
* any Subnet IP ranges.
*/
serviceCidr?: string;
/**
* An IP address assigned to the Kubernetes DNS service. It must be within the Kubernetes service
* address range specified in serviceCidr.
*/
dnsServiceIP?: string;
/**
* A CIDR notation IP range assigned to the Docker bridge network. It must not overlap with any
* Subnet IP ranges or the Kubernetes service address range.
*/
dockerBridgeCidr?: string;
}
/**
* AKS properties
*/
export interface AKSProperties {
/**
* Cluster full qualified domain name
*/
clusterFqdn?: string;
/**
* System services
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly systemServices?: SystemService[];
/**
* Number of agents
*/
agentCount?: number;
/**
* Agent virtual machine size
*/
agentVMSize?: string;
/**
* SSL configuration
*/
sslConfiguration?: SslConfiguration;
/**
* AKS networking configuration for vnet
*/
aksNetworkingConfiguration?: AksNetworkingConfiguration;
}
/**
* A Machine Learning compute based on AKS.
*/
export interface AKS {
/**
* Polymorphic Discriminator
*/
computeType: "AKS";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
/**
* AKS properties
*/
properties?: AKSProperties;
}
/**
* scale settings for AML Compute
*/
export interface ScaleSettings {
/**
* Max number of nodes to use
*/
maxNodeCount: number;
/**
* Min number of nodes to use. Default value: 0.
*/
minNodeCount?: number;
/**
* Node Idle Time before scaling down amlCompute
*/
nodeIdleTimeBeforeScaleDown?: string;
}
/**
* Settings for user account that gets created on each on the nodes of a compute.
*/
export interface UserAccountCredentials {
/**
* User name. Name of the administrator user account which can be used to SSH to nodes.
*/
adminUserName: string;
/**
* SSH public key. SSH public key of the administrator user account.
*/
adminUserSshPublicKey?: string;
/**
* Password. Password of the administrator user account.
*/
adminUserPassword?: string;
}
/**
* Counts of various compute node states on the amlCompute.
*/
export interface NodeStateCounts {
/**
* Idle node count. Number of compute nodes in idle state.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly idleNodeCount?: number;
/**
* Running node count. Number of compute nodes which are running jobs.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly runningNodeCount?: number;
/**
* Preparing node count. Number of compute nodes which are being prepared.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly preparingNodeCount?: number;
/**
* Unusable node count. Number of compute nodes which are in unusable state.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly unusableNodeCount?: number;
/**
* Leaving node count. Number of compute nodes which are leaving the amlCompute.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly leavingNodeCount?: number;
/**
* Preempted node count. Number of compute nodes which are in preempted state.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly preemptedNodeCount?: number;
}
/**
* AML Compute properties
*/
export interface AmlComputeProperties {
/**
* Virtual Machine Size
*/
vmSize?: string;
/**
* Virtual Machine priority. Possible values include: 'Dedicated', 'LowPriority'
*/
vmPriority?: VmPriority;
/**
* Scale settings for AML Compute
*/
scaleSettings?: ScaleSettings;
/**
* User account credentials. Credentials for an administrator user account that will be created
* on each compute node.
*/
userAccountCredentials?: UserAccountCredentials;
/**
* Subnet. Virtual network subnet resource ID the compute nodes belong to.
*/
subnet?: ResourceId;
/**
* Close remote Login Access Port. State of the public SSH port. Possible values are: Disabled -
* Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - Indicates
* that the public ssh port is open on all nodes of the cluster. NotSpecified - Indicates that
* the public ssh port is closed on all nodes of the cluster if VNet is defined, else is open all
* public nodes. It can be default only during cluster creation time, after creation it will be
* either enabled or disabled. Possible values include: 'Enabled', 'Disabled', 'NotSpecified'.
* Default value: 'NotSpecified'.
*/
remoteLoginPortPublicAccess?: RemoteLoginPortPublicAccess;
/**
* Allocation state. Allocation state of the compute. Possible values are: steady - Indicates
* that the compute is not resizing. There are no changes to the number of compute nodes in the
* compute in progress. A compute enters this state when it is created and when no operations are
* being performed on the compute to change the number of compute nodes. resizing - Indicates
* that the compute is resizing; that is, compute nodes are being added to or removed from the
* compute. Possible values include: 'Steady', 'Resizing'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly allocationState?: AllocationState;
/**
* Allocation state transition time. The time at which the compute entered its current allocation
* state.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly allocationStateTransitionTime?: Date;
/**
* Errors. Collection of errors encountered by various compute nodes during node setup.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errors?: MachineLearningServiceError[];
/**
* Current node count. The number of compute nodes currently assigned to the compute.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly currentNodeCount?: number;
/**
* Target node count. The target number of compute nodes for the compute. If the allocationState
* is resizing, this property denotes the target node count for the ongoing resize operation. If
* the allocationState is steady, this property denotes the target node count for the previous
* resize operation.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly targetNodeCount?: number;
/**
* Node state counts. Counts of various node states on the compute.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nodeStateCounts?: NodeStateCounts;
}
/**
* An Azure Machine Learning compute.
*/
export interface AmlCompute {
/**
* Polymorphic Discriminator
*/
computeType: "AmlCompute";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
/**
* AML Compute properties
*/
properties?: AmlComputeProperties;
}
/**
* Specifies policy and settings for SSH access.
*/
export interface ComputeInstanceSshSettings {
/**
* Access policy for SSH. State of the public SSH port. Possible values are: Disabled - Indicates
* that the public ssh port is closed on this instance. Enabled - Indicates that the public ssh
* port is open and accessible according to the VNet/subnet policy if applicable. Possible values
* include: 'Enabled', 'Disabled'. Default value: 'Disabled'.
*/
sshPublicAccess?: SshPublicAccess;
/**
* Describes the admin user name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly adminUserName?: string;
/**
* Describes the port for connecting through SSH.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly sshPort?: number;
/**
* Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t rsa -b 2048" to generate
* your SSH key pairs.
*/
adminPublicKey?: string;
}
/**
* Defines all connectivity endpoints and properties for a ComputeInstance.
*/
export interface ComputeInstanceConnectivityEndpoints {
/**
* Public IP Address of this ComputeInstance.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly publicIpAddress?: string;
/**
* Private IP Address of this ComputeInstance (local to the VNET in which the compute instance is
* deployed).
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly privateIpAddress?: string;
}
/**
* Defines an Aml Instance application and its connectivity endpoint URI.
*/
export interface ComputeInstanceApplication {
/**
* Name of the ComputeInstance application.
*/
displayName?: string;
/**
* Application' endpoint URI.
*/
endpointUri?: string;
}
/**
* Describes information on user who created this ComputeInstance.
*/
export interface ComputeInstanceCreatedBy {
/**
* Name of the user.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly userName?: string;
/**
* Uniquely identifies user' Azure Active Directory organization.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly userOrgId?: string;
/**
* Uniquely identifies the user within his/her organization.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly userId?: string;
}
/**
* The last operation on ComputeInstance.
*/
export interface ComputeInstanceLastOperation {
/**
* Name of the last operation. Possible values include: 'Create', 'Start', 'Stop', 'Restart',
* 'Reimage', 'Delete'
*/
operationName?: OperationName;
/**
* Time of the last operation.
*/
operationTime?: Date;
/**
* Operation status. Possible values include: 'InProgress', 'Succeeded', 'CreateFailed',
* 'StartFailed', 'StopFailed', 'RestartFailed', 'ReimageFailed', 'DeleteFailed'
*/
operationStatus?: OperationStatus;
}
/**
* Compute Instance properties
*/
export interface ComputeInstanceProperties {
/**
* Virtual Machine Size
*/
vmSize?: string;
/**
* Subnet. Virtual network subnet resource ID the compute nodes belong to.
*/
subnet?: ResourceId;
/**
* Sharing policy for applications on this compute instance. Policy for sharing applications on
* this compute instance among users of parent workspace. If Personal, only the creator can
* access applications on this compute instance. When Shared, any workspace user can access
* applications on this instance depending on his/her assigned role. Possible values include:
* 'Personal', 'Shared'. Default value: 'Shared'.
*/
applicationSharingPolicy?: ApplicationSharingPolicy;
/**
* Specifies policy and settings for SSH access.
*/
sshSettings?: ComputeInstanceSshSettings;
/**
* Describes all connectivity endpoints available for this ComputeInstance.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly connectivityEndpoints?: ComputeInstanceConnectivityEndpoints;
/**
* Describes available applications and their endpoints on this ComputeInstance.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly applications?: ComputeInstanceApplication[];
/**
* Describes information on user who created this ComputeInstance.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdBy?: ComputeInstanceCreatedBy;
/**
* Errors. Collection of errors encountered on this ComputeInstance.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errors?: MachineLearningServiceError[];
/**
* The current state of this ComputeInstance. Possible values include: 'Creating',
* 'CreateFailed', 'Deleting', 'Running', 'Restarting', 'JobRunning', 'SettingUp', 'SetupFailed',
* 'Starting', 'Stopped', 'Stopping', 'UserSettingUp', 'UserSetupFailed', 'Unknown', 'Unusable'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly state?: ComputeInstanceState;
/**
* The last operation on ComputeInstance.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lastOperation?: ComputeInstanceLastOperation;
}
/**
* An Azure Machine Learning compute instance.
*/
export interface ComputeInstance {
/**
* Polymorphic Discriminator
*/
computeType: "ComputeInstance";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
/**
* Compute Instance properties
*/
properties?: ComputeInstanceProperties;
}
/**
* Admin credentials for virtual machine
*/
export interface VirtualMachineSshCredentials {
/**
* Username of admin account
*/
username?: string;
/**
* Password of admin account
*/
password?: string;
/**
* Public key data
*/
publicKeyData?: string;
/**
* Private key data
*/
privateKeyData?: string;
}
/**
* An interface representing VirtualMachineProperties.
*/
export interface VirtualMachineProperties {
/**
* Virtual Machine size
*/
virtualMachineSize?: string;
/**
* Port open for ssh connections.
*/
sshPort?: number;
/**
* Public IP address of the virtual machine.
*/
address?: string;
/**
* Admin credentials for virtual machine
*/
administratorAccount?: VirtualMachineSshCredentials;
}
/**
* A Machine Learning compute based on Azure Virtual Machines.
*/
export interface VirtualMachine {
/**
* Polymorphic Discriminator
*/
computeType: "VirtualMachine";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
properties?: VirtualMachineProperties;
}
/**
* An interface representing HDInsightProperties.
*/
export interface HDInsightProperties {
/**
* Port open for ssh connections on the master node of the cluster.
*/
sshPort?: number;
/**
* Public IP address of the master node of the cluster.
*/
address?: string;
/**
* Admin credentials for master node of the cluster
*/
administratorAccount?: VirtualMachineSshCredentials;
}
/**
* A HDInsight compute.
*/
export interface HDInsight {
/**
* Polymorphic Discriminator
*/
computeType: "HDInsight";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
properties?: HDInsightProperties;
}
/**
* A DataFactory compute.
*/
export interface DataFactory {
/**
* Polymorphic Discriminator
*/
computeType: "DataFactory";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
}
/**
* An interface representing DatabricksProperties.
*/
export interface DatabricksProperties {
/**
* Databricks access token
*/
databricksAccessToken?: string;
}
/**
* A DataFactory compute.
*/
export interface Databricks {
/**
* Polymorphic Discriminator
*/
computeType: "Databricks";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
properties?: DatabricksProperties;
}
/**
* An interface representing DataLakeAnalyticsProperties.
*/
export interface DataLakeAnalyticsProperties {
/**
* DataLake Store Account Name
*/
dataLakeStoreAccountName?: string;
}
/**
* A DataLakeAnalytics compute.
*/
export interface DataLakeAnalytics {
/**
* Polymorphic Discriminator
*/
computeType: "DataLakeAnalytics";
/**
* Location for the underlying compute
*/
computeLocation?: string;
/**
* The provision state of the cluster. Valid values are Unknown, Updating, Provisioning,
* Succeeded, and Failed. Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting',
* 'Succeeded', 'Failed', 'Canceled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: ProvisioningState;
/**
* The description of the Machine Learning compute.
*/
description?: string;
/**
* The date and time when the compute was created.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdOn?: Date;
/**
* The date and time when the compute was last modified.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly modifiedOn?: Date;
/**
* ARM resource id of the underlying compute
*/
resourceId?: string;
/**
* Errors during provisioning
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningErrors?: MachineLearningServiceError[];
/**
* Indicating whether the compute was provisioned by user and brought from outside if true, or
* machine learning service provisioned it if false.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isAttachedCompute?: boolean;
properties?: DataLakeAnalyticsProperties;
}
/**
* Service principal credentials.
*/
export interface ServicePrincipalCredentials {
/**
* Client Id
*/
clientId: string;
/**
* Client secret
*/
clientSecret: string;
}
/**
* AmlCompute update parameters.
*/
export interface ClusterUpdateParameters {
/**
* Scale settings. Desired scale settings for the amlCompute.
*/
scaleSettings?: ScaleSettings;
}
/**
* Contains the possible cases for ComputeNodesInformation.
*/
export type ComputeNodesInformationUnion = ComputeNodesInformation | AmlComputeNodesInformation;
/**
* Compute nodes information related to a Machine Learning compute. Might differ for every type of
* compute.
*/
export interface ComputeNodesInformation {
/**
* Polymorphic Discriminator
*/
computeType: "ComputeNodesInformation";
/**
* The continuation token.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* Compute node information related to a AmlCompute.
*/
export interface AmlComputeNodeInformation {
/**
* Node ID. ID of the compute node.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nodeId?: string;
/**
* Private IP address. Private IP address of the compute node.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly privateIpAddress?: string;
/**
* Public IP address. Public IP address of the compute node.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly publicIpAddress?: string;
/**
* Port. SSH port number of the node.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly port?: number;
/**
* State of the compute node. Values are idle, running, preparing, unusable, leaving and
* preempted. Possible values include: 'idle', 'running', 'preparing', 'unusable', 'leaving',
* 'preempted'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nodeState?: NodeState;
/**
* Run ID. ID of the Experiment running on the node, if any else null.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly runId?: string;
}
/**
* Compute node information related to a AmlCompute.
*/
export interface AmlComputeNodesInformation {
/**
* Polymorphic Discriminator
*/
computeType: "AmlCompute";
/**
* The continuation token.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
/**
* The collection of returned AmlCompute nodes details.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nodes?: AmlComputeNodeInformation[];
}
/**
* Contains the possible cases for ComputeSecrets.
*/
export type ComputeSecretsUnion = ComputeSecrets | AksComputeSecrets | VirtualMachineSecrets | DatabricksComputeSecrets;
/**
* Secrets related to a Machine Learning compute. Might differ for every type of compute.
*/
export interface ComputeSecrets {
/**
* Polymorphic Discriminator
*/
computeType: "ComputeSecrets";
}
/**
* Secrets related to a Machine Learning compute based on AKS.
*/
export interface AksComputeSecrets {
/**
* Polymorphic Discriminator
*/
computeType: "AKS";
/**
* Content of kubeconfig file that can be used to connect to the Kubernetes cluster.
*/
userKubeConfig?: string;
/**
* Content of kubeconfig file that can be used to connect to the Kubernetes cluster.
*/
adminKubeConfig?: string;
/**
* Image registry pull secret.
*/
imagePullSecretName?: string;
}
/**
* Secrets related to a Machine Learning compute based on AKS.
*/
export interface VirtualMachineSecrets {
/**
* Polymorphic Discriminator
*/
computeType: "VirtualMachine";
/**
* Admin credentials for virtual machine.
*/
administratorAccount?: VirtualMachineSshCredentials;
}
/**
* Secrets related to a Machine Learning compute based on Databricks.
*/
export interface DatabricksComputeSecrets {
/**
* Polymorphic Discriminator
*/
computeType: "Databricks";
/**
* access token for databricks account.
*/
databricksAccessToken?: string;
}
/**
* Features/user capabilities associated with the sku
*/
export interface SKUCapability {
/**
* Capability/Feature ID
*/
name?: string;
/**
* Details about the feature/capability
*/
value?: string;
}
/**
* Describes The zonal capabilities of a SKU.
*/
export interface ResourceSkuZoneDetails {
/**
* The set of zones that the SKU is available in with the specified capabilities.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string[];
/**
* A list of capabilities that are available for the SKU in the specified list of zones.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly capabilities?: SKUCapability[];
}
/**
* An interface representing ResourceSkuLocationInfo.
*/
export interface ResourceSkuLocationInfo {
/**
* Location of the SKU
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly location?: string;
/**
* List of availability zones where the SKU is supported.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly zones?: string[];
/**
* Details of capabilities available to a SKU in specific zones.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly zoneDetails?: ResourceSkuZoneDetails[];
}
/**
* The restriction because of which SKU cannot be used.
*/
export interface Restriction {
/**
* The type of restrictions. As of now only possible value for this is location.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* The value of restrictions. If the restriction type is set to location. This would be different
* locations where the SKU is restricted.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly values?: string[];
/**
* The reason for the restriction. Possible values include: 'NotSpecified',
* 'NotAvailableForRegion', 'NotAvailableForSubscription'
*/
reasonCode?: ReasonCode;
}
/**
* Describes Workspace Sku details and features
*/
export interface SkuSettings {
/**
* The set of locations that the SKU is available. This will be supported and registered Azure
* Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly locations?: string[];
/**
* A list of locations and availability zones in those locations where the SKU is available.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly locationInfo?: ResourceSkuLocationInfo[];
/**
* Sku Tier like Basic or Enterprise
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly tier?: string;
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly resourceType?: string;
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* List of features/user capabilities associated with the sku
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly capabilities?: SKUCapability[];
/**
* The restrictions because of which SKU cannot be used. This is empty if there are no
* restrictions.
*/
restrictions?: Restriction[];
}
/**
* AML workspace sku information
*/
export interface WorkspaceSku {
/**
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly resourceType?: string;
/**
* The list of workspace sku settings
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly skus?: SkuSettings[];
}
/**
* A private link resource
*/
export interface PrivateLinkResource extends Resource {
/**
* The private link resource group id.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly groupId?: string;
/**
* The private link resource required member names.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly requiredMembers?: string[];
/**
* The private link resource Private link DNS zone name.
*/
requiredZoneNames?: string[];
}
/**
* A list of private link resources
*/
export interface PrivateLinkResourceListResult {
/**
* Array of private link resources
*/
value?: PrivateLinkResource[];
}
/**
* Workspace connection.
*/
export interface WorkspaceConnection extends BaseResource {
/**
* ResourceId of the workspace connection.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* Friendly name of the workspace connection.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* Resource type of workspace connection.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* Category of the workspace connection.
*/
category?: string;
/**
* Target of the workspace connection.
*/
target?: string;
/**
* Authorization type of the workspace connection.
*/
authType?: string;
/**
* Value details of the workspace connection.
*/
value?: string;
}
/**
* object used for creating workspace connection.
*/
export interface WorkspaceConnectionDto {
/**
* Friendly name of the workspace connection
*/
name?: string;
/**
* Category of the workspace connection.
*/
category?: string;
/**
* Target of the workspace connection.
*/
target?: string;
/**
* Authorization type of the workspace connection.
*/
authType?: string;
/**
* Value details of the workspace connection.
*/
value?: string;
}
/**
* Optional Parameters.
*/
export interface WorkspacesListByResourceGroupOptionalParams extends msRest.RequestOptionsBase {
/**
* Continuation token for pagination.
*/
skiptoken?: string;
}
/**
* Optional Parameters.
*/
export interface WorkspacesListBySubscriptionOptionalParams extends msRest.RequestOptionsBase {
/**
* Continuation token for pagination.
*/
skiptoken?: string;
}
/**
* Optional Parameters.
*/
export interface WorkspacesListByResourceGroupNextOptionalParams extends msRest.RequestOptionsBase {
/**
* Continuation token for pagination.
*/
skiptoken?: string;
}
/**
* Optional Parameters.
*/
export interface WorkspacesListBySubscriptionNextOptionalParams extends msRest.RequestOptionsBase {
/**
* Continuation token for pagination.
*/
skiptoken?: string;
}
/**
* Optional Parameters.
*/
export interface VirtualMachineSizesListOptionalParams extends msRest.RequestOptionsBase {
/**
* Type of compute to filter by.
*/
computeType?: string;
/**
* Specifies whether to return recommended vm sizes or all vm sizes
*/
recommended?: boolean;
}
/**
* Optional Parameters.
*/
export interface WorkspaceConnectionsListOptionalParams extends msRest.RequestOptionsBase {
/**
* Target of the workspace connection.
*/
target?: string;
/**
* Category of the workspace connection.
*/
category?: string;
}
/**
* Optional Parameters.
*/
export interface MachineLearningComputeListByWorkspaceOptionalParams extends msRest.RequestOptionsBase {
/**
* Continuation token for pagination.
*/
skiptoken?: string;
}
/**
* Optional Parameters.
*/
export interface MachineLearningComputeListByWorkspaceNextOptionalParams extends msRest.RequestOptionsBase {
/**
* Continuation token for pagination.
*/
skiptoken?: string;
}
/**
* An interface representing AzureMachineLearningWorkspacesOptions.
*/
export interface AzureMachineLearningWorkspacesOptions extends AzureServiceClientOptions {
baseUri?: string;
}
/**
* Defines headers for CreateOrUpdate operation.
*/
export interface MachineLearningComputeCreateOrUpdateHeaders {
/**
* URI to poll for asynchronous operation status.
*/
azureAsyncOperation: string;
}
/**
* Defines headers for Delete operation.
*/
export interface MachineLearningComputeDeleteHeaders {
/**
* URI to poll for asynchronous operation status.
*/
azureAsyncOperation: string;
/**
* URI to poll for asynchronous operation result.
*/
location: string;
}
/**
* @interface
* An array of operations supported by the resource provider.
* @extends Array<Operation>
*/
export interface OperationListResult extends Array<Operation> {
}
/**
* @interface
* The result of a request to list machine learning workspaces.
* @extends Array<Workspace>
*/
export interface WorkspaceListResult extends Array<Workspace> {
/**
* The URI that can be used to request the next list of machine learning workspaces.
*/
nextLink?: string;
}
/**
* @interface
* The List Aml user feature operation response.
* @extends Array<AmlUserFeature>
*/
export interface ListAmlUserFeatureResult extends Array<AmlUserFeature> {
/**
* The URI to fetch the next page of AML user features information. Call ListNext() with this to
* fetch the next page of AML user features information.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* The List Usages operation response.
* @extends Array<Usage>
*/
export interface ListUsagesResult extends Array<Usage> {
/**
* The URI to fetch the next page of AML resource usage information. Call ListNext() with this to
* fetch the next page of AML resource usage information.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* The List WorkspaceQuotasByVMFamily operation response.
* @extends Array<ResourceQuota>
*/
export interface ListWorkspaceQuotas extends Array<ResourceQuota> {
/**
* The URI to fetch the next page of workspace quota information by VM Family. Call ListNext()
* with this to fetch the next page of Workspace Quota information.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* Paginated list of Workspace connection objects.
* @extends Array<WorkspaceConnection>
*/
export interface PaginatedWorkspaceConnectionsList extends Array<WorkspaceConnection> {
/**
* A continuation link (absolute URI) to the next page of results in the list.
*/
nextLink?: string;
}
/**
* @interface
* Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
* @extends Array<ComputeResource>
*/
export interface PaginatedComputeResourcesList extends Array<ComputeResource> {
/**
* A continuation link (absolute URI) to the next page of results in the list.
*/
nextLink?: string;
}
/**
* @interface
* List of skus with features
* @extends Array<WorkspaceSku>
*/
export interface SkuListResult extends Array<WorkspaceSku> {
/**
* The URI to fetch the next page of Workspace Skus. Call ListNext() with this URI to fetch the
* next page of Workspace Skus
*/
nextLink?: string;
}
/**
* Defines values for ProvisioningState.
* Possible values include: 'Unknown', 'Updating', 'Creating', 'Deleting', 'Succeeded', 'Failed',
* 'Canceled'
* @readonly
* @enum {string}
*/
export type ProvisioningState = 'Unknown' | 'Updating' | 'Creating' | 'Deleting' | 'Succeeded' | 'Failed' | 'Canceled';
/**
* Defines values for EncryptionStatus.
* Possible values include: 'Enabled', 'Disabled'
* @readonly
* @enum {string}
*/
export type EncryptionStatus = 'Enabled' | 'Disabled';
/**
* Defines values for PrivateEndpointServiceConnectionStatus.
* Possible values include: 'Pending', 'Approved', 'Rejected', 'Disconnected', 'Timeout'
* @readonly
* @enum {string}
*/
export type PrivateEndpointServiceConnectionStatus = 'Pending' | 'Approved' | 'Rejected' | 'Disconnected' | 'Timeout';
/**
* Defines values for PrivateEndpointConnectionProvisioningState.
* Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Failed'
* @readonly
* @enum {string}
*/
export type PrivateEndpointConnectionProvisioningState = 'Succeeded' | 'Creating' | 'Deleting' | 'Failed';
/**
* Defines values for UsageUnit.
* Possible values include: 'Count'
* @readonly
* @enum {string}
*/
export type UsageUnit = 'Count';
/**
* Defines values for VMPriceOSType.
* Possible values include: 'Linux', 'Windows'
* @readonly
* @enum {string}
*/
export type VMPriceOSType = 'Linux' | 'Windows';
/**
* Defines values for VMTier.
* Possible values include: 'Standard', 'LowPriority', 'Spot'
* @readonly
* @enum {string}
*/
export type VMTier = 'Standard' | 'LowPriority' | 'Spot';
/**
* Defines values for QuotaUnit.
* Possible values include: 'Count'
* @readonly
* @enum {string}
*/
export type QuotaUnit = 'Count';
/**
* Defines values for Status.
* Possible values include: 'Undefined', 'Success', 'Failure', 'InvalidQuotaBelowClusterMinimum',
* 'InvalidQuotaExceedsSubscriptionLimit', 'InvalidVMFamilyName', 'OperationNotSupportedForSku',
* 'OperationNotEnabledForRegion'
* @readonly
* @enum {string}
*/
export type Status = 'Undefined' | 'Success' | 'Failure' | 'InvalidQuotaBelowClusterMinimum' | 'InvalidQuotaExceedsSubscriptionLimit' | 'InvalidVMFamilyName' | 'OperationNotSupportedForSku' | 'OperationNotEnabledForRegion';
/**
* Defines values for ResourceIdentityType.
* Possible values include: 'SystemAssigned', 'UserAssigned', 'SystemAssigned,UserAssigned', 'None'
* @readonly
* @enum {string}
*/
export type ResourceIdentityType = 'SystemAssigned' | 'UserAssigned' | 'SystemAssigned,UserAssigned' | 'None';
/**
* Defines values for VmPriority.
* Possible values include: 'Dedicated', 'LowPriority'
* @readonly
* @enum {string}
*/
export type VmPriority = 'Dedicated' | 'LowPriority';
/**
* Defines values for RemoteLoginPortPublicAccess.
* Possible values include: 'Enabled', 'Disabled', 'NotSpecified'
* @readonly
* @enum {string}
*/
export type RemoteLoginPortPublicAccess = 'Enabled' | 'Disabled' | 'NotSpecified';
/**
* Defines values for AllocationState.
* Possible values include: 'Steady', 'Resizing'
* @readonly
* @enum {string}
*/
export type AllocationState = 'Steady' | 'Resizing';
/**
* Defines values for ApplicationSharingPolicy.
* Possible values include: 'Personal', 'Shared'
* @readonly
* @enum {string}
*/
export type ApplicationSharingPolicy = 'Personal' | 'Shared';
/**
* Defines values for SshPublicAccess.
* Possible values include: 'Enabled', 'Disabled'
* @readonly
* @enum {string}
*/
export type SshPublicAccess = 'Enabled' | 'Disabled';
/**
* Defines values for ComputeInstanceState.
* Possible values include: 'Creating', 'CreateFailed', 'Deleting', 'Running', 'Restarting',
* 'JobRunning', 'SettingUp', 'SetupFailed', 'Starting', 'Stopped', 'Stopping', 'UserSettingUp',
* 'UserSetupFailed', 'Unknown', 'Unusable'
* @readonly
* @enum {string}
*/
export type ComputeInstanceState = 'Creating' | 'CreateFailed' | 'Deleting' | 'Running' | 'Restarting' | 'JobRunning' | 'SettingUp' | 'SetupFailed' | 'Starting' | 'Stopped' | 'Stopping' | 'UserSettingUp' | 'UserSetupFailed' | 'Unknown' | 'Unusable';
/**
* Defines values for OperationName.
* Possible values include: 'Create', 'Start', 'Stop', 'Restart', 'Reimage', 'Delete'
* @readonly
* @enum {string}
*/
export type OperationName = 'Create' | 'Start' | 'Stop' | 'Restart' | 'Reimage' | 'Delete';
/**
* Defines values for OperationStatus.
* Possible values include: 'InProgress', 'Succeeded', 'CreateFailed', 'StartFailed', 'StopFailed',
* 'RestartFailed', 'ReimageFailed', 'DeleteFailed'
* @readonly
* @enum {string}
*/
export type OperationStatus = 'InProgress' | 'Succeeded' | 'CreateFailed' | 'StartFailed' | 'StopFailed' | 'RestartFailed' | 'ReimageFailed' | 'DeleteFailed';
/**
* Defines values for NodeState.
* Possible values include: 'idle', 'running', 'preparing', 'unusable', 'leaving', 'preempted'
* @readonly
* @enum {string}
*/
export type NodeState = 'idle' | 'running' | 'preparing' | 'unusable' | 'leaving' | 'preempted';
/**
* Defines values for ComputeType.
* Possible values include: 'AKS', 'AmlCompute', 'ComputeInstance', 'DataFactory',
* 'VirtualMachine', 'HDInsight', 'Databricks', 'DataLakeAnalytics'
* @readonly
* @enum {string}
*/
export type ComputeType = 'AKS' | 'AmlCompute' | 'ComputeInstance' | 'DataFactory' | 'VirtualMachine' | 'HDInsight' | 'Databricks' | 'DataLakeAnalytics';
/**
* Defines values for ReasonCode.
* Possible values include: 'NotSpecified', 'NotAvailableForRegion', 'NotAvailableForSubscription'
* @readonly
* @enum {string}
*/
export type ReasonCode = 'NotSpecified' | 'NotAvailableForRegion' | 'NotAvailableForSubscription';
/**
* Defines values for UnderlyingResourceAction.
* Possible values include: 'Delete', 'Detach'
* @readonly
* @enum {string}
*/
export type UnderlyingResourceAction = 'Delete' | 'Detach';
/**
* Defines values for Status1.
* Possible values include: 'Disabled', 'Enabled'
* @readonly
* @enum {string}
*/
export type Status1 = 'Disabled' | 'Enabled';
/**
* Contains response data for the list operation.
*/
export type OperationsListResponse = OperationListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OperationListResult;
};
};
/**
* Contains response data for the get operation.
*/
export type WorkspacesGetResponse = Workspace & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Workspace;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type WorkspacesCreateOrUpdateResponse = Workspace & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Workspace;
};
};
/**
* Contains response data for the update operation.
*/
export type WorkspacesUpdateResponse = Workspace & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Workspace;
};
};
/**
* Contains response data for the listByResourceGroup operation.
*/
export type WorkspacesListByResourceGroupResponse = WorkspaceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceListResult;
};
};
/**
* Contains response data for the listKeys operation.
*/
export type WorkspacesListKeysResponse = ListWorkspaceKeysResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListWorkspaceKeysResult;
};
};
/**
* Contains response data for the listBySubscription operation.
*/
export type WorkspacesListBySubscriptionResponse = WorkspaceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceListResult;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type WorkspacesBeginCreateOrUpdateResponse = Workspace & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Workspace;
};
};
/**
* Contains response data for the listByResourceGroupNext operation.
*/
export type WorkspacesListByResourceGroupNextResponse = WorkspaceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceListResult;
};
};
/**
* Contains response data for the listBySubscriptionNext operation.
*/
export type WorkspacesListBySubscriptionNextResponse = WorkspaceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceListResult;
};
};
/**
* Contains response data for the list operation.
*/
export type WorkspaceFeaturesListResponse = ListAmlUserFeatureResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListAmlUserFeatureResult;
};
};
/**
* Contains response data for the listNext operation.
*/
export type WorkspaceFeaturesListNextResponse = ListAmlUserFeatureResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListAmlUserFeatureResult;
};
};
/**
* Contains response data for the prepare operation.
*/
export type NotebooksPrepareResponse = NotebookResourceInfo & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: NotebookResourceInfo;
};
};
/**
* Contains response data for the beginPrepare operation.
*/
export type NotebooksBeginPrepareResponse = NotebookResourceInfo & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: NotebookResourceInfo;
};
};
/**
* Contains response data for the list operation.
*/
export type UsagesListResponse = ListUsagesResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListUsagesResult;
};
};
/**
* Contains response data for the listNext operation.
*/
export type UsagesListNextResponse = ListUsagesResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListUsagesResult;
};
};
/**
* Contains response data for the list operation.
*/
export type VirtualMachineSizesListResponse = VirtualMachineSizeListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: VirtualMachineSizeListResult;
};
};
/**
* Contains response data for the update operation.
*/
export type QuotasUpdateResponse = UpdateWorkspaceQuotasResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: UpdateWorkspaceQuotasResult;
};
};
/**
* Contains response data for the list operation.
*/
export type QuotasListResponse = ListWorkspaceQuotas & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListWorkspaceQuotas;
};
};
/**
* Contains response data for the listNext operation.
*/
export type QuotasListNextResponse = ListWorkspaceQuotas & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListWorkspaceQuotas;
};
};
/**
* Contains response data for the list operation.
*/
export type WorkspaceConnectionsListResponse = PaginatedWorkspaceConnectionsList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PaginatedWorkspaceConnectionsList;
};
};
/**
* Contains response data for the create operation.
*/
export type WorkspaceConnectionsCreateResponse = WorkspaceConnection & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceConnection;
};
};
/**
* Contains response data for the get operation.
*/
export type WorkspaceConnectionsGetResponse = WorkspaceConnection & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceConnection;
};
};
/**
* Contains response data for the listByWorkspace operation.
*/
export type MachineLearningComputeListByWorkspaceResponse = PaginatedComputeResourcesList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PaginatedComputeResourcesList;
};
};
/**
* Contains response data for the get operation.
*/
export type MachineLearningComputeGetResponse = ComputeResource & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ComputeResource;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type MachineLearningComputeCreateOrUpdateResponse = ComputeResource & MachineLearningComputeCreateOrUpdateHeaders & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The parsed HTTP response headers.
*/
parsedHeaders: MachineLearningComputeCreateOrUpdateHeaders;
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ComputeResource;
};
};
/**
* Contains response data for the update operation.
*/
export type MachineLearningComputeUpdateResponse = ComputeResource & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ComputeResource;
};
};
/**
* Contains response data for the deleteMethod operation.
*/
export type MachineLearningComputeDeleteResponse = MachineLearningComputeDeleteHeaders & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The parsed HTTP response headers.
*/
parsedHeaders: MachineLearningComputeDeleteHeaders;
};
};
/**
* Contains response data for the listNodes operation.
*/
export type MachineLearningComputeListNodesResponse = AmlComputeNodesInformation & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: AmlComputeNodesInformation;
};
};
/**
* Contains response data for the listKeys operation.
*/
export type MachineLearningComputeListKeysResponse = ComputeSecretsUnion & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ComputeSecretsUnion;
};
};
/**
* Contains response data for the beginUpdate operation.
*/
export type MachineLearningComputeBeginUpdateResponse = ComputeResource & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ComputeResource;
};
};
/**
* Contains response data for the listByWorkspaceNext operation.
*/
export type MachineLearningComputeListByWorkspaceNextResponse = PaginatedComputeResourcesList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PaginatedComputeResourcesList;
};
};
/**
* Contains response data for the listSkus operation.
*/
export type ListSkusResponse = SkuListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: SkuListResult;
};
};
/**
* Contains response data for the listSkusNext operation.
*/
export type ListSkusNextResponse = SkuListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: SkuListResult;
};
};
/**
* Contains response data for the get operation.
*/
export type PrivateEndpointConnectionsGetResponse = PrivateEndpointConnection & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PrivateEndpointConnection;
};
};
/**
* Contains response data for the put operation.
*/
export type PrivateEndpointConnectionsPutResponse = PrivateEndpointConnection & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PrivateEndpointConnection;
};
};
/**
* Contains response data for the listByWorkspace operation.
*/
export type PrivateLinkResourcesListByWorkspaceResponse = PrivateLinkResourceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: PrivateLinkResourceListResult;
};
}; | the_stack |
import { Parser } from "@siteimprove/alfa-parser";
import { Predicate } from "@siteimprove/alfa-predicate";
import { Result, Err } from "@siteimprove/alfa-result";
import { Slice } from "@siteimprove/alfa-slice";
import { Token } from "./token";
const { fromCharCode } = String;
const { zeroOrMore } = Parser;
/**
* @public
*/
export namespace Lexer {
export function lex(input: string): Slice<Token> {
const points = new Array<number>(input.length);
for (let i = 0, n = input.length; i < n; i++) {
points[i] = input.charCodeAt(i);
}
const tokens: Array<Token> = [];
for (let i = 0, n = points.length; i < n; ) {
let token: Token | null;
[[, i], token] = consumeToken([points, i]);
if (token === null) {
break;
}
tokens.push(token);
}
return Slice.of(tokens);
}
}
/**
* {@link https://drafts.csswg.org/css-syntax/#digit}
*/
const isDigit: Predicate<number> = (code) => code >= 0x30 && code <= 0x39;
const digit: Parser<[Array<number>, number], number, string> = ([input, i]) => {
const code = input[i];
return isDigit(code)
? Result.of([[input, i + 1], code])
: Err.of("Expected a digit");
};
/**
* {@link https://drafts.csswg.org/css-syntax/#hex-digit}
*/
const isHexDigit: Predicate<number> = (code) =>
isDigit(code) ||
(code >= 0x41 && code <= 0x46) ||
(code >= 0x61 && code <= 0x66);
const hexDigit: Parser<[Array<number>, number], number, string> = ([
input,
i,
]) => {
const code = input[i];
return isHexDigit(code)
? Result.of([[input, i + 1], code])
: Err.of("Expected a hex digit");
};
/**
* {@link https://drafts.csswg.org/css-syntax/#uppercase-letter}
*/
const isUppercaseLetter: Predicate<number> = (code) =>
code >= 0x41 && code <= 0x5a;
/**
* {@link https://drafts.csswg.org/css-syntax/#lowercase-letter}
*/
const isLowercaseLetter: Predicate<number> = (code) =>
code >= 0x61 && code <= 0x7a;
/**
* {@link https://drafts.csswg.org/css-syntax/#letter}
*/
const isLetter: Predicate<number> = (code) =>
isUppercaseLetter(code) || isLowercaseLetter(code);
/**
* {@link https://drafts.csswg.org/css-syntax/#non-ascii-code-point}
*/
const isNonAscii: Predicate<number> = (code) => code >= 0x80;
/**
* {@link https://drafts.csswg.org/css-syntax/#newline}
*/
const isNewline: Predicate<number> = (code) => code === 0xa;
/**
* {@link https://drafts.csswg.org/css-syntax/#whitespace}
*/
const isWhitespace: Predicate<number> = (code) =>
isNewline(code) || code === 0x9 || code === 0x20;
/**
* {@link https://drafts.csswg.org/css-syntax/#non-printable-code-point}
*/
const isNonPrintable: Predicate<number> = (code) =>
(code >= 0 && code <= 0x8) ||
code === 0xb ||
(code >= 0xe && code <= 0x1f) ||
code === 0x7f;
/**
* {@link https://drafts.csswg.org/css-syntax/#name-start-code-point}
*/
const isNameStart: Predicate<number> = (code) =>
isLetter(code) || isNonAscii(code) || code === 0x5f;
/**
* {@link https://drafts.csswg.org/css-syntax/#name-code-point}
*/
const isName: Predicate<number> = (code) =>
isNameStart(code) || isDigit(code) || code === 0x2d;
/**
* {@link https://infra.spec.whatwg.org/#surrogate}
*/
const isSurrogate: Predicate<number> = (code) =>
code >= 0xd800 && code <= 0xdfff;
/**
* {@link https://drafts.csswg.org/css-syntax/#starts-with-a-valid-escape}
*/
const startsValidEscape: Predicate<[Array<number>, number]> = ([input, i]) =>
input[i] === 0x5c && !isNewline(input[i + 1]);
/**
* {@link https://drafts.csswg.org/css-syntax/#starts-with-a-number}
*/
const startsNumber: Predicate<[Array<number>, number]> = ([input, i]) => {
switch (input[i]) {
case 0x2b:
case 0x2d:
if (input[i + 1] === 0x2e) {
return isDigit(input[i + 3]);
} else {
return isDigit(input[i + 1]);
}
case 0x2e:
return isDigit(input[i + 1]);
default:
return isDigit(input[i]);
}
};
/**
* {@link https://drafts.csswg.org/css-syntax/#would-start-an-identifier}
*/
const startsIdentifier: Predicate<[Array<number>, number]> = ([input, i]) => {
switch (input[i]) {
case 0x2d:
return (
isNameStart(input[i + 1]) ||
input[i + 1] === 0x2d ||
startsValidEscape([input, i + 1])
);
case 0x5c:
return startsValidEscape([input, i]);
default:
return isNameStart(input[i]);
}
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-a-name}
*/
const consumeName: Parser.Infallible<[Array<number>, number], string> = ([
input,
i,
]) => {
let name = "";
let code: number;
for (const n = input.length; i < n; ) {
code = input[i];
if (isName(code)) {
i++;
name += fromCharCode(code);
} else if (startsValidEscape([input, i])) {
[[input, i], code] = consumeEscapedCodePoint([input, i + 1]);
name += fromCharCode(code);
} else {
break;
}
}
return [[input, i], name];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-an-escaped-code-point}
*/
const consumeEscapedCodePoint: Parser.Infallible<
[Array<number>, number],
number
> = ([input, i]) => {
const byte = input[i];
if (isNaN(byte)) {
return [[input, i], 0xfffd];
}
i++;
if (isHexDigit(byte)) {
const bytes = [byte];
for (const n = i + 5; i < n; i++) {
const result = hexDigit([input, i]);
if (result.isErr()) {
break;
}
const [, byte] = result.get();
bytes.push(byte);
}
let code = 0;
for (let i = 0, n = bytes.length; i < n; i++) {
let byte = bytes[i];
if (isDigit(byte)) {
byte = byte - 0x30;
} else if (isLowercaseLetter(byte)) {
byte = byte - 0x61 + 10;
} else if (isUppercaseLetter(byte)) {
byte = byte - 0x41 + 10;
}
code = 0x10 * code + byte;
}
if (isWhitespace(input[i])) {
i++;
}
if (code === 0 || isSurrogate(code) || code > 0x10ffff) {
return [[input, i], 0xfffd];
}
return [[input, i], code];
}
return [[input, i], byte];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-a-number}
*/
const consumeNumber: Parser.Infallible<
[Array<number>, number],
Token.Number
> = ([input, i]) => {
const number: Array<number> = [];
let code = input[i];
let isSigned = false;
let isInteger = true;
if (code === 0x2b || code === 0x2d) {
number.push(code);
code = input[++i];
isSigned = true;
}
while (isDigit(code)) {
number.push(code);
code = input[++i];
}
if (code === 0x2e && isDigit(input[i + 1])) {
number.push(0x2e, input[i + 1]);
code = input[(i += 2)];
isInteger = false;
while (isDigit(code)) {
number.push(code);
code = input[++i];
}
}
if (code === 0x45 || code === 0x65) {
let offset = 1;
if (input[i + 1] === 0x2b || input[i + 1] === 0x2d) {
offset = 2;
}
if (isDigit(input[i + offset])) {
number.push(...input.slice(i, i + offset + 1));
code = input[(i += offset + 1)];
isInteger = false;
while (isDigit(code)) {
number.push(code);
code = input[++i];
}
}
}
return [[input, i], Token.number(convert(number), isInteger, isSigned)];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-a-numeric-token}
*/
const consumeNumeric: Parser.Infallible<
[Array<number>, number],
Token.Number | Token.Dimension | Token.Percentage
> = ([input, i]) => {
let number: Token.Number;
[[input, i], number] = consumeNumber([input, i]);
if (startsIdentifier([input, i])) {
let name: string;
[[input, i], name] = consumeName([input, i]);
return [
[input, i],
Token.dimension(number.value, name, number.isInteger, number.isSigned),
];
}
if (input[i] === 0x25) {
return [
[input, i + 1],
Token.percentage(number.value / 100, number.isInteger),
];
}
return [[input, i], number];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-an-ident-like-token}
*/
const consumeIdentifierLike: Parser.Infallible<
[Array<number>, number],
Token.Ident | Token.Function | Token.URL | Token.BadURL
> = ([input, i]) => {
let string: string;
[[input, i], string] = consumeName([input, i]);
const code = input[i];
if (string.toLowerCase() === "url" && code === 0x28) {
i++;
while (isWhitespace(input[i]) && isWhitespace(input[i + 1])) {
i++;
}
if (
input[i] === 0x22 ||
input[i] === 0x27 ||
(isWhitespace(input[i]) &&
(input[i + 1] === 0x22 || input[i + 1] === 0x27))
) {
return [[input, i], Token.func(string)];
}
return consumeURL([input, i]);
}
if (code === 0x28) {
return [[input, i + 1], Token.func(string)];
}
return [[input, i], Token.ident(string)];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-a-string-token}
*/
const consumeString: Parser.Infallible<
[Array<number>, number],
Token.String
> = ([input, i]) => {
const end = input[i++];
let string = "";
let code: number;
while (i < input.length) {
code = input[i++];
if (isNewline(code) || code === end) {
break;
}
string += fromCharCode(code);
}
return [[input, i], Token.string(string)];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-a-url-token}
*/
const consumeURL: Parser.Infallible<
[Array<number>, number],
Token.URL | Token.BadURL
> = ([input, i]) => {
while (isWhitespace(input[i])) {
i++;
}
let value = "";
let code: number;
while (i < input.length) {
code = input[i];
if (code === 0x29) {
i++;
break;
}
if (isWhitespace(code)) {
while (isWhitespace(input[i])) {
i++;
}
if (input[i] === 0x29) {
i++;
break;
} else {
return consumeBadURL([input, i]);
}
}
if (
code === 0x22 ||
code === 0x27 ||
code === 0x28 ||
isNonPrintable(code)
) {
return consumeBadURL([input, i + 1]);
}
if (code === 0x5c) {
if (startsValidEscape([input, i])) {
[[input, i], code] = consumeEscapedCodePoint([input, i]);
value += fromCharCode(code);
continue;
} else {
return consumeBadURL([input, i]);
}
}
i++;
value += fromCharCode(code);
}
return [[input, i], Token.url(value)];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-remnants-of-bad-url}
*/
const consumeBadURL: Parser.Infallible<
[Array<number>, number],
Token.BadURL
> = ([input, i]) => {
let code: number;
while (i < input.length) {
if (startsValidEscape([input, i])) {
[[input, i]] = consumeEscapedCodePoint([input, i]);
} else {
code = input[i++];
if (code === 0x29) {
break;
}
}
}
return [[input, i], Token.badURL()];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#consume-a-token}
*/
const consumeToken: Parser.Infallible<
[Array<number>, number],
Token | null
> = ([input, i]) => {
// https://drafts.csswg.org/css-syntax/#consume-comments
while (i < input.length) {
if (input[i] === 0x2f && input[i + 1] === 0x2a) {
i += 2;
while (i < input.length) {
if (input[i] === 0x2a && input[i + 1] === 0x2f) {
i += 2;
break;
}
i++;
}
} else {
break;
}
}
if (i >= input.length) {
return [[input, i], null];
}
const code = input[i];
if (isWhitespace(code)) {
i++;
while (isWhitespace(input[i])) {
i++;
}
return [[input, i], Token.whitespace()];
}
if (isNameStart(code)) {
return consumeIdentifierLike([input, i]);
}
if (isDigit(code)) {
return consumeNumeric([input, i]);
}
switch (code) {
case 0x22:
return consumeString([input, i]);
case 0x23:
i++;
if (isName(input[i]) || startsValidEscape([input, i])) {
const isIdentifier = startsIdentifier([input, i]);
let name: string;
[[input, i], name] = consumeName([input, i]);
return [[input, i], Token.hash(name, isIdentifier)];
}
return [[input, i + 1], Token.delim(code)];
case 0x27:
return consumeString([input, i]);
case 0x28:
return [[input, i + 1], Token.openParenthesis()];
case 0x29:
return [[input, i + 1], Token.closeParenthesis()];
case 0x2b:
if (startsNumber([input, i])) {
return consumeNumeric([input, i]);
}
return [[input, i + 1], Token.delim(code)];
case 0x2c:
return [[input, i + 1], Token.comma()];
case 0x2d:
if (startsNumber([input, i])) {
return consumeNumeric([input, i]);
}
if (input[i + 1] === 0x2d && input[i + 2] === 0x3e) {
return [[input, i + 3], Token.closeComment()];
}
if (startsIdentifier([input, i])) {
return consumeIdentifierLike([input, i]);
}
return [[input, i + 1], Token.delim(code)];
case 0x2e:
if (startsNumber([input, i])) {
return consumeNumeric([input, i]);
}
return [[input, i + 1], Token.delim(code)];
case 0x3a:
return [[input, i + 1], Token.colon()];
case 0x3b:
return [[input, i + 1], Token.semicolon()];
case 0x3c:
if (
input[i + 1] === 0x21 &&
input[i + 2] === 0x2d &&
input[i + 3] === 0x2d
) {
return [[input, i + 4], Token.openComment()];
}
return [[input, i + 1], Token.delim(code)];
case 0x40:
i++;
if (startsIdentifier([input, i])) {
let name: string;
[[input, i], name] = consumeName([input, i]);
return [[input, i], Token.atKeyword(name)];
}
return [[input, i], Token.delim(code)];
case 0x5b:
return [[input, i + 1], Token.openSquareBracket()];
case 0x5c:
if (startsValidEscape([input, i])) {
return consumeIdentifierLike([input, i]);
}
return [[input, i + 1], Token.delim(code)];
case 0x5d:
return [[input, i + 1], Token.closeSquareBracket()];
case 0x7b:
return [[input, i + 1], Token.openCurlyBracket()];
case 0x7d:
return [[input, i + 1], Token.closeCurlyBracket()];
}
return [[input, i + 1], Token.delim(code)];
};
/**
* {@link https://drafts.csswg.org/css-syntax/#convert-a-string-to-a-number}
*/
function convert(input: Array<number>): number {
let i = 0;
let s = input[i] === 0x2d ? -1 : input[i] === 0x2b ? 1 : null;
if (s !== null) {
i++;
} else {
s = 1;
}
let n: Array<number>;
let v = 0;
for ([[input, i], n] of zeroOrMore(digit)([input, i])) {
v = n.reduce((v, c) => 10 * v + (c - 0x30), v);
}
if (input[i] === 0x2e) {
i++;
}
let f = 0;
let d = 0;
for ([[input, i], n] of zeroOrMore(digit)([input, i])) {
[f, d] = n.reduce(([f, d], c) => [10 * f + (c - 0x30), d + 1], [f, d]);
}
if (input[i] === 0x45 || input[i] === 0x65) {
i++;
}
let t = input[i] === 0x2d ? -1 : input[i] === 0x2b ? 1 : null;
if (t !== null) {
i++;
} else {
t = 1;
}
let e = 0;
for ([[input, i], n] of zeroOrMore(digit)([input, i])) {
e = n.reduce((e, c) => 10 * e + (c - 0x30), e);
}
// To account for floating point precision errors, we flip the sign of the
// exponents (`d` and `t`) and divide rather than multiply.
return (s * (v + f / 10 ** d)) / 10 ** (-t * e);
} | the_stack |
import {Debugger} from '../common/debugger';
import * as DomUtil from '../common/dom_util';
import {EngineConst} from '../common/engine';
import * as AudioUtil from './audio_util';
import {AuditoryDescription} from './auditory_description';
import {XmlRenderer} from './xml_renderer';
export class LayoutRenderer extends XmlRenderer {
/**
* @override
*/
public finalize(str: string) {
return setTwoDim(str);
}
/**
* @override
*/
public pause(_pause: AudioUtil.Pause) {
return '';
}
/**
* @override
*/
public prosodyElement(attr: string, value: number) {
return attr === EngineConst.personalityProps.LAYOUT ?
`<${value}>` : '';
}
/**
* @override
*/
public closeTag(tag: string) {
return `</${tag}>`;
}
/**
* @override
*/
public markup(descrs: AuditoryDescription[]) {
// TODO: Include personality range computations.
let result = [];
let content: AuditoryDescription[] = [];
for (let descr of descrs) {
if (!descr.layout) {
content.push(descr);
continue;
}
result.push(this.processContent(content));
content = [];
let value = descr.layout;
if (value.match(/^begin/)) {
result.push('<' + value.replace(/^begin/, '') + '>');
continue;
}
if (value.match(/^end/)) {
result.push('</' + value.replace(/^end/, '') + '>');
continue;
}
console.warn('Something went wrong with layout markup: ' + value);
}
result.push(this.processContent(content));
return result.join('');
}
private processContent(content: AuditoryDescription[]) {
let result = [];
let markup = AudioUtil.personalityMarkup(content);
for (let i = 0, descr: AudioUtil.Markup; descr = markup[i]; i++) {
if (descr.span) {
result.push(this.merge(descr.span));
continue;
}
if (AudioUtil.isPauseElement(descr)) {
continue;
}
}
return result.join(''); // this.merge(result);
}
}
// Postprocessing
let twodExpr = '';
let handlers: {[key: string]: Function} = {
TABLE: handleTable,
CASES: handleCases,
CAYLEY: handleCayley,
MATRIX: handleMatrix,
CELL: recurseTree,
FENCE: recurseTree,
ROW: recurseTree,
FRACTION: handleFraction,
NUMERATOR: handleFractionPart,
DENOMINATOR: handleFractionPart
};
function applyHandler(element: Element): string {
let tag = DomUtil.tagName(element as Element);
let handler = handlers[tag];
return handler ? handler(element) : element.textContent;
}
function setTwoDim(str: string): string {
twodExpr = '';
let dom = DomUtil.parseInput(`<all>${str}</all>`);
Debugger.getInstance().output(DomUtil.formatXml(dom.toString()));
twodExpr = recurseTree(dom);
return twodExpr;
}
function combineContent(str1: string, str2: string): string {
if (!str1 || !str2) {
return str1 + str2;
}
let height1 = strHeight(str1);
let height2 = strHeight(str2);
let diff = height1 - height2;
str1 = diff < 0 ? padCell(str1, height2, strWidth(str1)) : str1;
str2 = diff > 0 ? padCell(str2, height1, strWidth(str2)) : str2;
let lines1 = str1.split(/\r\n|\r|\n/);
let lines2 = str2.split(/\r\n|\r|\n/);
let result = [];
for (let i = 0; i < lines1.length; i++) {
result.push(lines1[i] + lines2[i]);
}
return result.join('\n');
}
/**
* Recurses the children of the given node by applying handlers and assembling a
* layout element.
*
* @param dom A node.
* @return The resulting layout element.
*/
function recurseTree(dom: Element): string {
let result = '';
for (let child of Array.from(dom.childNodes)) {
if (child.nodeType === DomUtil.NodeType.TEXT_NODE) {
result = combineContent(result, child.textContent);
continue;
}
result = combineContent(result, applyHandler(child as Element));
}
return result;
}
/**
*
* @param {string} str
*/
function strHeight(str: string) {
return str.split(/\r\n|\r|\n/).length;
}
/**
*
* @param {string} str
*/
function strWidth(str: string) {
return str.split(/\r\n|\r|\n/).reduce(
(max, x) => Math.max(x.length, max), 0);
}
/**
*
* @param {string} str
* @param {number} height
*/
function padHeight(str: string, height: number): string {
let padding = height - strHeight(str);
return str + (padding > 0 ? new Array(padding + 1).join('\n') : '');
}
/**
*
* @param {string} str
* @param {number} width
*/
function padWidth(str: string, width: number): string {
let lines = str.split(/\r\n|\r|\n/);
let result = [];
for (let line of lines) {
let padding = width - line.length;
result.push(line + (padding > 0 ? new Array(padding + 1).join('⠀') : ''));
}
return result.join('\n');
}
/**
*
* @param {string} str
* @param {number} heigth
* @param {number} width
*/
function padCell(str: string, height: number, width: number): string {
str = padHeight(str, height);
return padWidth(str, width);
}
declare type row = {
lfence: string,
rfence: string,
sep: string,
cells: string[],
height: number,
width: number[]
};
// Clean row elements and assemble row structure.
function assembleRows(matrix: Element): row[] {
let children = Array.from(matrix.childNodes);
let mat = [];
for (let row of children) {
if (row.nodeType !== DomUtil.NodeType.ELEMENT_NODE) {
continue;
}
mat.push(handleRow(row as Element));
}
return mat;
}
// Compute max height and width
function getMaxParameters(mat: row[]): [number, number[]] {
let maxHeight = mat.reduce((max, x) => Math.max(x.height, max), 0);
let maxWidth = [];
for (let i = 0; i < mat[0].width.length; i++) {
maxWidth.push(
mat.map(x => x.width[i]).reduce((max, x) => Math.max(max, x), 0)
);
}
return [maxHeight, maxWidth];
}
// Pad cells and assemble rows.
function combineCells(mat: row[], maxWidth: number[]): row[] {
let newMat = [];
for (let row of mat) {
if (row.height === 0) {
continue;
}
let newCells = [];
for (let i = 0; i < row.cells.length; i++) {
newCells.push(padCell(row.cells[i], row.height, maxWidth[i]));
}
row.cells = newCells;
newMat.push(row);
}
return newMat;
}
// Combine rows into matrix
function combineRows(mat: row[], maxHeight: number): string {
// If all rows are of heigth 1 assemble them directly.
if (maxHeight === 1) {
return mat.map(
row => row.lfence + row.cells.join(row.sep) + row.rfence).join('\n');
}
let result = [];
// Otherwise insert extra empty rows if necessary
for (let row of mat) {
let sep = verticalArrange(row.sep, row.height);
let str = row.cells.shift();
while (row.cells.length) {
str = combineContent(str, sep);
str = combineContent(str, row.cells.shift());
}
str = combineContent(verticalArrange(row.lfence, row.height), str);
str = combineContent(str, verticalArrange(row.rfence, row.height));
result.push(str);
result.push(row.lfence +
new Array(strWidth(str) - 3).join(row.sep) + row.rfence);
}
return result.slice(0, -1).join('\n');
}
function handleMatrix(matrix: Element): string {
let mat = assembleRows(matrix);
let [maxHeight, maxWidth] = getMaxParameters(mat);
mat = combineCells(mat, maxWidth);
return combineRows(mat, maxHeight);
}
function handleTable(matrix: Element): string {
let mat = assembleRows(matrix);
// TODO: Adapt this so cells of length one will be retained!
mat.forEach(row => {
row.cells = row.cells.slice(1).slice(0, -1);
row.width = row.width.slice(1).slice(0, -1);
});
let [maxHeight, maxWidth] = getMaxParameters(mat);
mat = combineCells(mat, maxWidth);
return combineRows(mat, maxHeight);
}
// TODO: Check with Michael why the number indicator is omitted (e.g., 16.4-1)
function handleCases(matrix: Element): string {
let mat = assembleRows(matrix);
mat.forEach(row => {
row.cells = row.cells.slice(0, -1);
row.width = row.width.slice(0, -1);
});
let [maxHeight, maxWidth] = getMaxParameters(mat);
mat = combineCells(mat, maxWidth);
return combineRows(mat, maxHeight);
}
function handleCayley(matrix: Element): string {
let mat = assembleRows(matrix);
mat.forEach(row => {
row.cells = row.cells.slice(1).slice(0, -1);
row.width = row.width.slice(1).slice(0, -1);
row.sep = row.sep + row.sep;
});
let [maxHeight, maxWidth] = getMaxParameters(mat);
let bar = {
lfence: '', rfence: '',
cells: maxWidth.map(x => '⠐' + new Array(x).join('⠒')),
width: maxWidth,
height: 1,
sep: mat[0].sep
};
mat.splice(1, 0, bar);
mat = combineCells(mat, maxWidth);
return combineRows(mat, maxHeight);
}
function verticalArrange(char: string, height: number) {
let str = '';
while (height) {
str += char + '\n';
height--;
}
return str.slice(0, -1);
}
function handleRow(row: Element): row {
let children = Array.from(row.childNodes);
let lfence = getFence(children[0]);
let rfence = getFence(children[children.length - 1]);
if (lfence) {
children.shift();
}
if (rfence) {
children.pop();
}
let sep = '';
let cells = [];
for (let child of children) {
if (child.nodeType === DomUtil.NodeType.TEXT_NODE) {
sep = child.textContent;
continue;
}
let result = applyHandler(child as Element);
cells.push(result);
}
return {lfence: lfence, rfence: rfence, sep: sep,
cells: cells,
height: cells.reduce((max, x) => Math.max(strHeight(x), max), 0),
width: cells.map(strWidth)
};
}
function getFence(node: Node): string {
if (node.nodeType === DomUtil.NodeType.ELEMENT_NODE &&
DomUtil.tagName(node as Element) === 'FENCE') {
return applyHandler(node as Element);
}
return '';
}
function centerCell(cell: string, width: number): string {
let cw = strWidth(cell);
let center = (width - cw) / 2;
let [lpad, rpad] = Math.floor(center) === center ? [center, center] :
[Math.floor(center), Math.ceil(center)];
let lines = cell.split(/\r\n|\r|\n/);
let result = [];
let [lstr, rstr] = [new Array(lpad + 1).join('⠀'),
new Array(rpad + 1).join('⠀')];
for (let line of lines) {
result.push(lstr + line + rstr);
}
return result.join('\n');
}
function handleFraction(frac: Node): string {
let [open, num, , den, close] = Array.from(frac.childNodes);
let numerator = applyHandler(num as Element);
let denominator = applyHandler(den as Element);
let nwidth = strWidth(numerator);
let dwidth = strWidth(denominator);
let maxWidth = Math.max(nwidth, dwidth);
let bar = open + new Array(maxWidth + 1).join('⠒') + close;
maxWidth = bar.length;
return `${centerCell(numerator, maxWidth)}\n${bar}\n`
+ `${centerCell(denominator, maxWidth)}`;
}
function handleFractionPart(prt: Element): string {
let fchild = prt.firstChild as Element;
let content = recurseTree(prt);
if (fchild && fchild.nodeType === DomUtil.NodeType.ELEMENT_NODE) {
if (DomUtil.tagName(fchild) === 'ENGLISH') {
return '⠰' + content;
}
if (DomUtil.tagName(fchild) === 'NUMBER') {
return '⠼' + content;
}
}
return content;
} | the_stack |
// @Filename:privacyFunctionParameterDeclFile_externalModule.ts
class privateClass {
}
export class publicClass {
}
export interface publicInterfaceWithPrivateParmeterTypes {
new (param: privateClass): publicClass; // Error
(param: privateClass): publicClass; // Error
myMethod(param: privateClass): void; // Error
}
export interface publicInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
interface privateInterfaceWithPrivateParmeterTypes {
new (param: privateClass): privateClass;
(param: privateClass): privateClass;
myMethod(param: privateClass): void;
}
interface privateInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
export class publicClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) { // Error
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) { // Error
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) { // Error
}
}
export class publicClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
class privateClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) {
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) {
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) {
}
}
class privateClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
export function publicFunctionWithPrivateParmeterTypes(param: privateClass) { // Error
}
export function publicFunctionWithPublicParmeterTypes(param: publicClass) {
}
function privateFunctionWithPrivateParmeterTypes(param: privateClass) {
}
function privateFunctionWithPublicParmeterTypes(param: publicClass) {
}
export declare function publicAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void; // Error
export declare function publicAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
declare function privateAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void;
declare function privateAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
export interface publicInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass; // Error
(param: privateModule.publicClass): publicClass; // Error
myMethod(param: privateModule.publicClass): void; // Error
}
export class publicClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) { // Error
}
myPublicMethod(param: privateModule.publicClass) { // Error
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) { // Error
}
}
export function publicFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) { // Error
}
export declare function publicAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void; // Error
interface privateInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass;
(param: privateModule.publicClass): publicClass;
myMethod(param: privateModule.publicClass): void;
}
class privateClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) {
}
myPublicMethod(param: privateModule.publicClass) {
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) {
}
}
function privateFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) {
}
declare function privateAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void;
export module publicModule {
class privateClass {
}
export class publicClass {
}
export interface publicInterfaceWithPrivateParmeterTypes {
new (param: privateClass): publicClass; // Error
(param: privateClass): publicClass; // Error
myMethod(param: privateClass): void; // Error
}
export interface publicInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
interface privateInterfaceWithPrivateParmeterTypes {
new (param: privateClass): privateClass;
(param: privateClass): privateClass;
myMethod(param: privateClass): void;
}
interface privateInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
export class publicClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) { // Error
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) { // Error
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) { // Error
}
}
export class publicClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
class privateClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) {
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) {
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) {
}
}
class privateClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
export function publicFunctionWithPrivateParmeterTypes(param: privateClass) { // Error
}
export function publicFunctionWithPublicParmeterTypes(param: publicClass) {
}
function privateFunctionWithPrivateParmeterTypes(param: privateClass) {
}
function privateFunctionWithPublicParmeterTypes(param: publicClass) {
}
export declare function publicAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void; // Error
export declare function publicAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
declare function privateAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void;
declare function privateAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
export interface publicInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass; // Error
(param: privateModule.publicClass): publicClass; // Error
myMethod(param: privateModule.publicClass): void; // Error
}
export class publicClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) { // Error
}
myPublicMethod(param: privateModule.publicClass) { // Error
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) { // Error
}
}
export function publicFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) { // Error
}
export declare function publicAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void; // Error
interface privateInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass;
(param: privateModule.publicClass): publicClass;
myMethod(param: privateModule.publicClass): void;
}
class privateClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) {
}
myPublicMethod(param: privateModule.publicClass) {
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) {
}
}
function privateFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) {
}
declare function privateAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void;
}
module privateModule {
class privateClass {
}
export class publicClass {
}
export interface publicInterfaceWithPrivateParmeterTypes {
new (param: privateClass): publicClass;
(param: privateClass): publicClass;
myMethod(param: privateClass): void;
}
export interface publicInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
interface privateInterfaceWithPrivateParmeterTypes {
new (param: privateClass): privateClass;
(param: privateClass): privateClass;
myMethod(param: privateClass): void;
}
interface privateInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
export class publicClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) {
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) {
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) {
}
}
export class publicClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
class privateClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) {
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) {
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) {
}
}
class privateClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
export function publicFunctionWithPrivateParmeterTypes(param: privateClass) {
}
export function publicFunctionWithPublicParmeterTypes(param: publicClass) {
}
function privateFunctionWithPrivateParmeterTypes(param: privateClass) {
}
function privateFunctionWithPublicParmeterTypes(param: publicClass) {
}
export declare function publicAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void;
export declare function publicAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
declare function privateAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void;
declare function privateAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
export interface publicInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass;
(param: privateModule.publicClass): publicClass;
myMethod(param: privateModule.publicClass): void;
}
export class publicClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) {
}
myPublicMethod(param: privateModule.publicClass) {
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) {
}
}
export function publicFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) {
}
export declare function publicAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void;
interface privateInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass;
(param: privateModule.publicClass): publicClass;
myMethod(param: privateModule.publicClass): void;
}
class privateClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) {
}
myPublicMethod(param: privateModule.publicClass) {
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) {
}
}
function privateFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) {
}
declare function privateAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void;
}
// @Filename: privacyFunctionParameterDeclFile_GlobalFile.ts
class publicClassInGlobal {
}
interface publicInterfaceWithPublicParmeterTypesInGlobal {
new (param: publicClassInGlobal): publicClassInGlobal;
(param: publicClassInGlobal): publicClassInGlobal;
myMethod(param: publicClassInGlobal): void;
}
class publicClassWithWithPublicParmeterTypesInGlobal {
static myPublicStaticMethod(param: publicClassInGlobal) {
}
private static myPrivateStaticMethod(param: publicClassInGlobal) {
}
myPublicMethod(param: publicClassInGlobal) {
}
private myPrivateMethod(param: publicClassInGlobal) {
}
constructor(param: publicClassInGlobal, private param1: publicClassInGlobal, public param2: publicClassInGlobal) {
}
}
function publicFunctionWithPublicParmeterTypesInGlobal(param: publicClassInGlobal) {
}
declare function publicAmbientFunctionWithPublicParmeterTypesInGlobal(param: publicClassInGlobal): void;
module publicModuleInGlobal {
class privateClass {
}
export class publicClass {
}
module privateModule {
class privateClass {
}
export class publicClass {
}
export interface publicInterfaceWithPrivateParmeterTypes {
new (param: privateClass): publicClass;
(param: privateClass): publicClass;
myMethod(param: privateClass): void;
}
export interface publicInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
interface privateInterfaceWithPrivateParmeterTypes {
new (param: privateClass): privateClass;
(param: privateClass): privateClass;
myMethod(param: privateClass): void;
}
interface privateInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
export class publicClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) {
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) {
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) {
}
}
export class publicClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
class privateClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) {
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) {
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) {
}
}
class privateClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
export function publicFunctionWithPrivateParmeterTypes(param: privateClass) {
}
export function publicFunctionWithPublicParmeterTypes(param: publicClass) {
}
function privateFunctionWithPrivateParmeterTypes(param: privateClass) {
}
function privateFunctionWithPublicParmeterTypes(param: publicClass) {
}
export declare function publicAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void;
export declare function publicAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
declare function privateAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void;
declare function privateAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
export interface publicInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass;
(param: privateModule.publicClass): publicClass;
myMethod(param: privateModule.publicClass): void;
}
export class publicClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) {
}
myPublicMethod(param: privateModule.publicClass) {
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) {
}
}
export function publicFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) {
}
export declare function publicAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void;
interface privateInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass;
(param: privateModule.publicClass): publicClass;
myMethod(param: privateModule.publicClass): void;
}
class privateClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) {
}
myPublicMethod(param: privateModule.publicClass) {
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) {
}
}
function privateFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) {
}
declare function privateAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void;
}
export interface publicInterfaceWithPrivateParmeterTypes {
new (param: privateClass): publicClass; // Error
(param: privateClass): publicClass; // Error
myMethod(param: privateClass): void; // Error
}
export interface publicInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
interface privateInterfaceWithPrivateParmeterTypes {
new (param: privateClass): privateClass;
(param: privateClass): privateClass;
myMethod(param: privateClass): void;
}
interface privateInterfaceWithPublicParmeterTypes {
new (param: publicClass): publicClass;
(param: publicClass): publicClass;
myMethod(param: publicClass): void;
}
export class publicClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) { // Error
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) { // Error
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) { // Error
}
}
export class publicClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
class privateClassWithWithPrivateParmeterTypes {
static myPublicStaticMethod(param: privateClass) {
}
private static myPrivateStaticMethod(param: privateClass) {
}
myPublicMethod(param: privateClass) {
}
private myPrivateMethod(param: privateClass) {
}
constructor(param: privateClass, private param1: privateClass, public param2: privateClass) {
}
}
class privateClassWithWithPublicParmeterTypes {
static myPublicStaticMethod(param: publicClass) {
}
private static myPrivateStaticMethod(param: publicClass) {
}
myPublicMethod(param: publicClass) {
}
private myPrivateMethod(param: publicClass) {
}
constructor(param: publicClass, private param1: publicClass, public param2: publicClass) {
}
}
export function publicFunctionWithPrivateParmeterTypes(param: privateClass) { // Error
}
export function publicFunctionWithPublicParmeterTypes(param: publicClass) {
}
function privateFunctionWithPrivateParmeterTypes(param: privateClass) {
}
function privateFunctionWithPublicParmeterTypes(param: publicClass) {
}
export declare function publicAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void; // Error
export declare function publicAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
declare function privateAmbientFunctionWithPrivateParmeterTypes(param: privateClass): void;
declare function privateAmbientFunctionWithPublicParmeterTypes(param: publicClass): void;
export interface publicInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass; // Error
(param: privateModule.publicClass): publicClass; // Error
myMethod(param: privateModule.publicClass): void; // Error
}
export class publicClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) { // Error
}
myPublicMethod(param: privateModule.publicClass) { // Error
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) { // Error
}
}
export function publicFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) { // Error
}
export declare function publicAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void; // Error
interface privateInterfaceWithPrivateModuleParameterTypes {
new (param: privateModule.publicClass): publicClass;
(param: privateModule.publicClass): publicClass;
myMethod(param: privateModule.publicClass): void;
}
class privateClassWithPrivateModuleParameterTypes {
static myPublicStaticMethod(param: privateModule.publicClass) {
}
myPublicMethod(param: privateModule.publicClass) {
}
constructor(param: privateModule.publicClass, private param1: privateModule.publicClass, public param2: privateModule.publicClass) {
}
}
function privateFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass) {
}
declare function privateAmbientFunctionWithPrivateModuleParameterTypes(param: privateModule.publicClass): void;
} | the_stack |
import { createElement } from '@syncfusion/ej2-base';
import { Diagram } from '../../../src/diagram/diagram';
import { DiagramElement } from '../../../src/diagram/core/elements/diagram-element';
import { Container } from '../../../src/diagram/core/containers/container';
import { DiagramModel, NodeModel, NodeConstraints } from '../../../src/diagram/index';
import { MouseEvents } from '../../../spec/diagram/interaction/mouseevents.spec'
import { profile, inMB, getMemoryProfile } from '../../../spec/common.spec';
describe('Diagram Control', () => {
let mouseEvents: MouseEvents = new MouseEvents();
let diagram: Diagram;
describe('Simple stack panel without children height and width- Horizontal', () => {
let ele: HTMLElement;
beforeAll((): void => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
ele = createElement('div', { id: 'diagram' });
document.body.appendChild(ele);
let nodes: NodeModel[] = [
{
id: 'node1', annotations: [{ content: '1' }], //verticalAlignment: 'Stretch' // width: 50, height: 50,
},
{
id: 'node2', annotations: [{ content: '2' }],// width: 50, height: 50, //margin: { left: 150, top: 50 }
},
{
id: 'node3', annotations: [{ content: '3' }],// width: 50, height: 50, //margin: { left: 150, top: 50 }
},
{
id: 'node4', annotations: [{ content: '4' }],// width: 50, height: 50,// margin: { left: 150, top: 50 }
},
{
id: 'node5', annotations: [{ content: '5' }],//width: 50, height: 50,// margin: { left: 150, top: 50 }
},
{
id: 'group', children: ['node1',
'node2', 'node3', 'node4'
],
constraints: NodeConstraints.Default | NodeConstraints.AllowDrop,
width: 300, height: 250, offsetX: 200, offsetY: 200,
container: { type: 'Stack', orientation: 'Horizontal' }
},
];
diagram = new Diagram({
width: '800px', height: '1000px', nodes: nodes
});
diagram.appendTo('#diagram');
});
afterAll((): void => {
diagram.destroy();
ele.remove();
});
it('Checking stack panel without streatc', (done: Function) => {
expect(diagram.nameTable['node1'].wrapper.bounds.height === 250
&& diagram.nameTable['node1'].wrapper.bounds.width == 50).toBe(true);
done();
});
it('Checking stack panel drag a node and drop on diagram', (done: Function) => {
let diagramCanvas = document.getElementById(diagram.element.id + 'content');
mouseEvents.clickEvent(diagramCanvas, 100, 100);
mouseEvents.mouseDownEvent(diagramCanvas, 100, 100);
mouseEvents.mouseMoveEvent(diagramCanvas, 700, 100);
mouseEvents.mouseUpEvent(diagramCanvas, 700, 100);
expect(diagram.nameTable['group'].wrapper.children.length === 4).toBe(true);
diagram.undo();
expect(diagram.nameTable['group'].wrapper.children.length === 5).toBe(true);
diagram.redo();
expect(diagram.nameTable['group'].wrapper.children.length === 4).toBe(true);
diagram.undo();
done();
});
it('Checking stack panel drag and drop', (done: Function) => {
let diagramCanvas = document.getElementById(diagram.element.id + 'content');
mouseEvents.clickEvent(diagramCanvas, 80, 100);
mouseEvents.mouseDownEvent(diagramCanvas, 80, 100);
mouseEvents.mouseMoveEvent(diagramCanvas, 180, 100);
mouseEvents.mouseUpEvent(diagramCanvas, 180, 100);
expect(diagram.nameTable['group'].wrapper.children[2].id === 'node1').toBe(true);
diagram.undo();
expect(diagram.nameTable['group'].wrapper.children[0].id === 'node1').toBe(true);
diagram.redo();
expect(diagram.nameTable['group'].wrapper.children[2].id === 'node1').toBe(true);
diagram.undo();
done();
});
it('Checking stack panel drag and drop', (done: Function) => {
let diagramCanvas = document.getElementById(diagram.element.id + 'content');
mouseEvents.clickEvent(diagramCanvas, 300, 100);
diagram.copy();
diagram.paste();
mouseEvents.mouseDownEvent(diagramCanvas, 300, 100);
mouseEvents.mouseMoveEvent(diagramCanvas, 700, 100);
mouseEvents.mouseUpEvent(diagramCanvas, 700, 100);
mouseEvents.clickEvent(diagramCanvas, 220, 100);
mouseEvents.mouseDownEvent(diagramCanvas, 220, 100);
mouseEvents.mouseMoveEvent(diagramCanvas, 700, 100);
mouseEvents.mouseUpEvent(diagramCanvas, 700, 100);
expect(diagram.nameTable['node4'].parentId !== 'group').toBe(true);
done();
});
it('Checking stack panel drag the child', (done: Function) => {
let diagramCanvas = document.getElementById(diagram.element.id + 'content');
mouseEvents.clickEvent(diagramCanvas, 0, 0);
let bounds = diagram.nameTable['node5'].wrapper.bounds.center;
mouseEvents.mouseDownEvent(diagramCanvas, bounds.x, bounds.y);
mouseEvents.mouseMoveEvent(diagramCanvas, 300, 100);
mouseEvents.mouseUpEvent(diagramCanvas, 300, 100);
diagram.undo();
expect(diagram.nameTable['group'].wrapper.children.indexOf(diagram.nameTable['node5'].wrapper) === -1).toBe(true);
diagram.redo();
done();
});
});
describe('Simple stack panel without children - Horizontal', () => {
let ele: HTMLElement;
beforeAll((): void => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
ele = createElement('div', { id: 'diagram' });
document.body.appendChild(ele);
let nodes: NodeModel[] = [
{
id: 'node1', annotations: [{ content: '1' }], width: 50, height: 50, margin: { top: 10, bottom: 10 }
},
{
id: 'node2', annotations: [{ content: '2' }], width: 50, height: 50, margin: { left: 150, top: 50 }
},
{
id: 'node3', annotations: [{ content: '3' }], width: 50, height: 50, margin: { left: 150, top: 50 }
},
{
id: 'node4', annotations: [{ content: '4' }], width: 50, height: 50, margin: { left: 150, top: 50 }
},
// {
// id: 'node5', annotations: [{ content: '5' }],//width: 50, height: 50,// margin: { left: 150, top: 50 }
// },
{
id: 'group', children: ['node1',
'node2', 'node3', 'node4'
],
constraints: NodeConstraints.Default | NodeConstraints.AllowDrop,
width: 300, height: 250, offsetX: 200, offsetY: 200,
container: { type: 'Stack', orientation: 'Horizontal' }
},
];
diagram = new Diagram({
width: '800px', height: '1000px', nodes: nodes
});
diagram.appendTo('#diagram');
});
afterAll((): void => {
diagram.destroy();
ele.remove();
});
it('Checking stack panel without streatc', (done: Function) => {
expect(diagram.nameTable['node1'].wrapper.bounds.height === 50
&& diagram.nameTable['node1'].wrapper.bounds.width == 50).toBe(true);
done();
});
});
describe('Simple stack panel without children height and width- Vertical', () => {
let ele: HTMLElement;
beforeAll((): void => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
ele = createElement('div', { id: 'diagram' });
document.body.appendChild(ele);
let nodes: NodeModel[] = [
{
id: 'node1', annotations: [{ content: '1' }], verticalAlignment: 'Stretch' // width: 50, height: 50,
},
{
id: 'node2', annotations: [{ content: '2' }],// width: 50, height: 50, //margin: { left: 150, top: 50 }
},
{
id: 'node3', annotations: [{ content: '3' }],// width: 50, height: 50, //margin: { left: 150, top: 50 }
},
{
id: 'node4', annotations: [{ content: '4' }],// width: 50, height: 50,// margin: { left: 150, top: 50 }
},
// {
// id: 'node5', annotations: [{ content: '5' }],//width: 50, height: 50,// margin: { left: 150, top: 50 }
// },
{
id: 'group', children: ['node1',
'node2', 'node3', 'node4'
],
constraints: NodeConstraints.Default | NodeConstraints.AllowDrop,
width: 300, height: 250, offsetX: 200, offsetY: 200,
container: { type: 'Stack', orientation: 'Vertical' }
},
];
diagram = new Diagram({
width: '800px', height: '1000px', nodes: nodes
});
diagram.appendTo('#diagram');
});
afterAll((): void => {
diagram.destroy();
ele.remove();
});
it('Checking stack panel without streatc', (done: Function) => {
expect(diagram.nameTable['node1'].wrapper.bounds.height === 50
&& diagram.nameTable['node1'].wrapper.bounds.width == 300).toBe(true);
done();
});
it('Checking stack panel drag and drop', (done: Function) => {
let diagramCanvas = document.getElementById(diagram.element.id + 'content');
mouseEvents.clickEvent(diagramCanvas, 100, 100);
mouseEvents.mouseDownEvent(diagramCanvas, 100, 100);
mouseEvents.mouseMoveEvent(diagramCanvas, 100, 200);
mouseEvents.mouseUpEvent(diagramCanvas, 100, 200);
expect(diagram.nameTable['group'].wrapper.children.length === 5).toBe(true);
diagram.undo();
diagram.redo();
done();
});
});
describe('Simple stack panel without children - Vertical', () => {
let ele: HTMLElement;
beforeAll((): void => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
ele = createElement('div', { id: 'diagram' });
document.body.appendChild(ele);
let nodes: NodeModel[] = [
{
id: 'node1', annotations: [{ content: '1' }], width: 50, height: 50, margin: { top: 10, bottom: 10 }
},
{
id: 'node2', annotations: [{ content: '2' }], width: 50, height: 50, margin: { left: 150, top: 50 }
},
{
id: 'node3', annotations: [{ content: '3' }], width: 50, height: 50, margin: { left: 150, top: 50 }
},
{
id: 'node4', annotations: [{ content: '4' }], width: 50, height: 50, margin: { left: 150, top: 50 }
},
{
id: 'group', children: ['node1',
'node2', 'node3', 'node4'
],
constraints: NodeConstraints.Default | NodeConstraints.AllowDrop,
width: 300, height: 250, offsetX: 200, offsetY: 200,
container: { type: 'Stack', orientation: 'Vertical' }
},
];
diagram = new Diagram({
width: '800px', height: '1000px', nodes: nodes
});
diagram.appendTo('#diagram');
});
afterAll((): void => {
diagram.destroy();
ele.remove();
});
it('Checking stack panel without streatc', (done: Function) => {
expect(diagram.nameTable['node1'].wrapper.bounds.height === 50
&& diagram.nameTable['node1'].wrapper.bounds.width == 50).toBe(true);
done();
});
it('memory leak', () => {
profile.sample();
let average: any = inMB(profile.averageChange)
//Check average change in memory samples to not be over 10MB
expect(average).toBeLessThan(10);
let memory: any = inMB(getMemoryProfile())
//Check the final memory usage against the first usage, there should be little change if everything was properly deallocated
expect(memory).toBeLessThan(profile.samples[0] + 0.25);
})
});
}); | the_stack |
* @fileoverview A SpriteAtlas is a THREE.js Texture that handles the backing
* image data for a collection of Sprites.
*/
import {TextFitter} from './text';
declare var THREE: any;
export interface SpriteImageData {
/**
* The type of data. E.g. 'image/png' or 'text'.
*/
type: string;
/**
* Data content, like base64 encoded PNG or the text to be drawn.
*/
data: string;
/**
* For text, whether this value is a special value (will be in italics).
*/
special?: boolean;
}
/**
* Jobs in the SpriteAtlas's draw queue. These jobs will either be to create
* Images from data, or to draw created Images to the canvas.
*/
export interface DrawJob {
/**
* Index of the sprite we're working on.
*/
spriteIndex: number;
/**
* When this job went on the queue.
*/
timestamp: number;
/**
* Incoming data about the image to draw.
*/
imageData?: SpriteImageData;
/**
* Actual Image object for the image data. Creating this is severely slow
* on Chrome/Linux, so we do the jobs separately.
*/
image?: HTMLImageElement;
/**
* Optional (but recommended) callback to execute when the draw job finishes.
*/
callback?: (spriteIndex: number) => any;
}
/**
* Default amount of time to spend drawing before ceding control back to the UI
* thread. 16 ms = ~60 FPS.
*/
const DEFAULT_DRAW_TIMEOUT = 50; // ~20 FPS.
/**
* Default amount of time to wait in ms before next draw. We use setTimeout
* instead of requestAnimationFrame so that the visualization will keep working
* on drawing queued images if the user switches tabs.
*/
const DEFAULT_WAIT_TIMEOUT = 1;
/**
* Font family to use when rendering text.
*/
const FONT_FAMILY = `'Roboto Mono', 'Consolas', 'Menlo', monospace`;
/**
* Amount of padding as a proportion of sprite size to preserve within sprite
* when rendering text.
*/
const TEXT_PADDING = 0.125;
/**
* A SpriteAtlas is a dynamic texture atlas which holds image data for the
* Sprites of a SpriteMesh. It is backed by a <canvas> elment which individual
* Sprite image data is drawn into.
*
* The width and height of the atlas are both roughly the square root of the
* capacity times the sprite width and height respectively. Each sprite gets
* a parcel of the atlas space based on its index, starting with the first
* sprite (index 0) at the top-left hand corner, then proceeding across and
* down.
*
* Consider a typical scenario consisting of 10,000 sprites of size 32x32.
*
* |.32px..|
*
* - +-------+-------+-------+- - - -+-------+ -
* . | | | | | | .
* . | 0 | 1 | 2 | ... | 99 | 32px
* . | | | | | | .
* . +-------+-------+-------+- - - -+-------+ -
* . | | | | | |
* . | 100 | 101 | 102 | ... | 199 |
* . | | | | | |
* 3,200px +-------+-------+-------+- - - -+-------+
* . | | | | | |
* . ... ... ... ... ...
* . | | | | | |
* . +-------+-------+-------+- - - -+-------+
* . | | | | | |
* . | 9,900 | 9,901 | 9,902 | ... | 9,999 |
* . | | | | | |
* - +-------+-------+-------+- - - -+-------+
*
* |. . . . . . . . 3,200px . . . . . . . .|
*
* Given the index of a sprite, we can determine the coordinates of its parcel
* of the atlas. Meanwhile, the SpriteMaterial's vertex shader can compute the
* UVs for each sprite vertex.
*/
export class SpriteAtlas extends THREE.Texture {
// SETTINGS.
/**
* How many sprites this texture can hold.
*/
capacity: number;
/**
* Width of one sprite backing image in pixels.
*/
imageWidth: number;
/**
* Height of one sprite backing image in pixels.
*/
imageHeight: number;
/**
* Amount of time in ms to spend drawing images to the backing canvas before
* ceding control back to the UI thread. Setting this to zero disables the
* timeout. See DEFAULT_DRAW_TIMEOUT.
*/
drawTimeout: number;
/**
* Amount of time in ms to wait between batches of drawing images.
* See DEFAULT_WAIT_TIMEOUT.
*/
waitTimeout: number;
// READ-ONLY PROPERTIES.
/**
* Backing canvas for the texture atlas.
*/
canvas: HTMLCanvasElement;
/**
* 2D context for drawing onto the backing canvas.
*/
context: CanvasRenderingContext2D;
/**
* Number of rows of sprites from top to bottom.
*/
spriteRows: number;
/**
* Number of columns of sprites from left-to-right.
*/
spriteColumns: number;
/**
* Queue of sprite images to draw asynchronously.
*/
drawQueue: DrawJob[];
/**
* Whether a call to draw images has been queued.
*/
isDrawQueued: boolean;
/**
* In the case of an incoming montage, or other cancellation, we don't want
* any queued draw jobs or outstanding event handlers to run. So we keep track
* of the last time a request to clear was set, so that any draw jobs before
* it are ignored.
*/
lastClearTimestamp: number;
/**
* Callbacks and their arguments that are ready to be invoked at the next
* available time after the texture has been flushed to the GPU.
*/
callbackQueue: Array < {
callback: (...args: any[]) => any;
args: any[];
}
> ;
/**
* Callback handler to invoke when finished drawing all queued images.
*/
onDrawFinished?: () => any;
/**
* Number of Images which are currently in flight to be loaded. This number
* increases when we create images, and decreases when they either load or
* error out.
*/
pendingImageCount: number;
// PRIVATE.
/**
* TextFitter object used for fitting text into sprites.
*/
fitter: TextFitter;
constructor(capacity: number, imageWidth: number, imageHeight: number) {
const spriteColumns = Math.ceil(Math.sqrt(capacity));
const spriteRows = Math.ceil(capacity / spriteColumns);
const canvas = document.createElement('canvas');
canvas.width = imageWidth * spriteColumns;
canvas.height = imageHeight * spriteRows;
const context = canvas.getContext('2d')!;
super(canvas);
this.capacity = capacity;
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
this.spriteColumns = spriteColumns;
this.spriteRows = spriteRows;
this.canvas = canvas;
this.context = context;
this.minFilter = THREE.LinearFilter;
this.magFilter = THREE.LinearFilter;
this.drawTimeout = DEFAULT_DRAW_TIMEOUT;
this.waitTimeout = DEFAULT_WAIT_TIMEOUT;
this.drawQueue = [];
this.isDrawQueued = false;
this.lastClearTimestamp = 0;
this.callbackQueue = [];
this.pendingImageCount = 0;
this.fitter = new TextFitter({
x: imageWidth * TEXT_PADDING,
y: imageHeight * TEXT_PADDING,
width: imageWidth * (1 - 2 * TEXT_PADDING),
height: imageHeight * (1 - 2 * TEXT_PADDING),
});
}
/**
* Clear out the draw queue, the callback queue, and update the last clear
* timeout, which is also the returned value. This prevents any outstanding
* images from performing actions on completion as well.
*/
clearQueues(): number {
this.drawQueue = [];
this.callbackQueue = [];
this.pendingImageCount = 0;
return this.lastClearTimestamp = Date.now();
}
/**
* Set the given sprite's image data asynchronously and call the callback when
* finished.
*/
setSpriteImageData(
spriteIndex: number, imageData: SpriteImageData,
callback?: (spriteIndex: number) => any) {
this.drawQueue.push(
{spriteIndex, timestamp: Date.now(), imageData, callback});
this.queueDraw();
}
/**
* Given a URL to a image image, load that image and draw its pixels to the
* entire backing canvas.
*/
setAtlasUrl(
atlasUrl: string, crossOrigin?: string,
callback?: (image: HTMLImageElement) => any) {
const ts = this.clearQueues();
const image = new Image();
if (crossOrigin !== undefined) {
image.crossOrigin = crossOrigin;
}
this.pendingImageCount++;
image.onerror = () => {
// TODO(jimbo): Express failures through the callback?
if (this.lastClearTimestamp > ts) {
// Something else triggered a clear while the image was being loaded.
return;
}
this.pendingImageCount--;
};
image.onload = () => {
// TODO(jimbo): Express failures through the callback?
if (this.lastClearTimestamp > ts) {
// Something else triggered a clear while the image was being loaded.
return;
}
this.pendingImageCount--;
this.updatePropertiesToMatchImageDimensions(image.width, image.height);
this.context.drawImage(
image, 0, 0, this.canvas.width, this.canvas.height);
this.needsUpdate = true;
if (callback) {
this.callbackQueue.push({callback, args: [image]});
}
if (this.onDrawFinished) {
this.onDrawFinished();
}
};
image.src = atlasUrl;
}
/**
* Given the width and height of an incoming atlas image to render, update
* internal properties, including cavnas dimensions, to match.
*
* If the image is already the correct size, then no changes to internal state
* should be made. If the image is large enough, but does not match with the
* current number of sprites per row/column, then those properties will be
* updated.
*
* If the image is too small to accomodate the specified sprite capacity, or
* if the geometry of the image is not a whole number of sprites, then an
* error will be thrown.
*/
updatePropertiesToMatchImageDimensions(width: number, height: number) {
if (width === this.imageWidth * this.spriteColumns &&
height === this.imageHeight * this.spriteRows) {
return;
}
const spriteColumns = width / this.imageWidth;
const spriteRows = height / this.imageHeight;
if (spriteColumns * spriteRows < this.capacity) {
throw Error('Atlas image too small to accommodate atlas capacity.');
}
if (spriteColumns !== Math.round(spriteColumns) ||
spriteRows !== Math.round(spriteRows)) {
throw Error('Atlas image dimensions do not fit sprite image dimensions.');
}
this.spriteColumns = spriteColumns;
this.spriteRows = spriteRows;
this.canvas.width = width;
this.canvas.height = height;
}
/**
* Perform post-rendering tasks. This should be called by upstream users of
* the texture at a time after the texture has been successfully uploaded to
* the GPU.
*/
postRender() {
// Short-circuit if there are any outstanding tasks.
if (this.drawQueue.length || this.pendingImageCount > 0) {
return;
}
// Dequeue all callbacks.
while (this.callbackQueue.length) {
const {callback, args} = this.callbackQueue.shift()!;
callback.apply(null, args);
}
}
/**
* Queue up a future call to drawImages().
*/
queueDraw() {
if (this.isDrawQueued) {
return;
}
this.isDrawQueued = true;
setTimeout(() => {
if (this.isDrawQueued) {
this.workOnDrawJobs();
}
}, this.waitTimeout);
}
/**
* Fulfill as many draw jobs as possible within the allowed time.
*/
workOnDrawJobs() {
this.isDrawQueued = false;
const ts = Date.now();
const stopTime = ts + (this.drawTimeout || Infinity);
while (this.drawQueue.length && Date.now() < stopTime) {
const {spriteIndex, timestamp, imageData, image, callback} =
this.drawQueue.shift() as DrawJob;
if (image) {
// Image has loaded, draw it and add its callback to the queue.
const width = this.imageWidth;
const height = this.imageHeight;
const x = width * (spriteIndex % this.spriteColumns);
const y = height * Math.floor(spriteIndex / this.spriteColumns);
this.context.clearRect(x, y, width, height);
this.context.drawImage(image, x, y, width, height);
if (typeof callback === 'function') {
this.callbackQueue.push({callback, args: [spriteIndex]});
}
} else if (imageData && imageData.type === 'text') {
// Draw text directly to the backing canvas and add its callback to the
// queue.
const offsetX = this.imageWidth * (spriteIndex % this.spriteColumns);
const offsetY =
this.imageHeight * Math.floor(spriteIndex / this.spriteColumns);
const cx = offsetX + this.imageWidth / 2;
const cy = offsetY + this.imageHeight / 2;
const specs = this.fitter.fit(imageData.data + '');
const ctx = this.context;
ctx.clearRect(offsetX, offsetY, this.imageWidth, this.imageHeight);
// Use translate and scale to draw an ellipse.
ctx.save();
ctx.translate(cx, cy);
ctx.scale(this.imageWidth, this.imageHeight);
ctx.beginPath();
ctx.arc(0, 0, 0.5, 0, 2 * Math.PI);
ctx.restore();
ctx.fillStyle = '#555555';
ctx.fill();
ctx.fillStyle = 'white';
ctx.textBaseline = 'hanging';
const fontStyle = imageData.special ? 'italic' : 'bold';
ctx.font = `${fontStyle} ${specs.fontSize}px ${FONT_FAMILY}`;
const lines = specs.lines;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// If you are certain the expression is always non-null/undefined,
// remove this comment.
const textWidth = line.text.length *
this.fitter.settings.glyphAspectRatio! * specs.fontSize;
ctx.fillText(
line.text, offsetX + line.x, offsetY + line.y, textWidth);
}
if (typeof callback === 'function') {
this.callbackQueue.push({callback, args: [spriteIndex]});
}
} else if (imageData) {
// Create an image from the data and queue up a draw when it loads.
const image = new Image();
this.pendingImageCount++;
image.onload = () => {
// Short-circuit if state has been cleared while this was loading.
if (ts < this.lastClearTimestamp) {
return;
}
this.pendingImageCount--;
// Move it to the front of the line. Already waited once.
this.drawQueue.unshift({spriteIndex, timestamp, image, callback});
this.queueDraw();
};
image.onerror = () => {
// Short-circuit if state has been cleared while this was loading.
if (ts < this.lastClearTimestamp) {
return;
}
this.pendingImageCount--;
// Try again later.
// TODO(jimbo): Limit the number of times to retry.
this.drawQueue.push({spriteIndex, timestamp, imageData, callback});
this.queueDraw();
};
// Setting the image src seems to be quite expensive, especially on
// Linux. Setting the image content from data appears to be a
// synchronous operation that must be done on the UI thread.
if (imageData.type === 'svg') {
const svg =
new Blob([imageData.data], {type: 'image/svg+xml;charset=utf-8'});
image.src = URL.createObjectURL(svg);
} else {
image.src = `data:${imageData.type};base64,${imageData.data}`;
}
}
}
if (this.drawQueue.length || this.pendingImageCount > 0) {
this.queueDraw();
} else {
this.needsUpdate = true;
if (this.onDrawFinished) {
this.onDrawFinished();
}
}
}
} | the_stack |
import * as diff from 'diff'
import { radix64 } from '../performance/radix64'
import { VNode, VSNode, SnapshotRecord, RecordData, RecordType } from '@timecat/share'
export const isDev = process.env.NODE_ENV === 'development'
export const version = '__VERSION__'
export function logError(e: Error | string): string {
const msg = (e as Error).message || (e as string)
console.error(`TimeCat Error: ${msg}`)
return msg
}
export function logWarn(e: Error | string): string {
const msg = (e as Error).message || (e as string)
console.warn(`TimeCat Warning: ${msg}`)
return msg
}
export function logAdvice(msg: string): string {
console.log(`%c TimeCat Advice: ${msg}`, 'color:#0f0;')
return msg
}
export function getTime(): number {
return Date.now()
}
export function getRadix64TimeStr() {
return radix64.btoa(getTime())
}
export function getRandomCode(len: 6 | 7 | 8 = 8) {
const code = (Math.random() * 20 + 16).toString(36).substring(2, len + 2)
return code.toUpperCase()
}
export function secondToTime(second: number) {
if (second <= 0) {
second = 0
}
const [h, m, s] = [Math.floor(second / 3600), Math.floor((second / 60) % 60), Math.floor(second % 60)]
const timeStr = [h, m, s].map(i => (i <= 9 ? '0' + i : i)).join(':')
return timeStr.replace(/^00\:/, '')
}
export function getDateTime(timestamp: number) {
const date = new Date(timestamp)
const hours = date.getHours()
const minutes = '0' + date.getMinutes()
const seconds = '0' + date.getSeconds()
const formattedTime = (hours < 10 ? '0' + hours : hours) + ':' + minutes.substr(-2) + ':' + seconds.substr(-2)
return formattedTime
}
export function toTimeStamp(timeStr: string) {
const parts = timeStr.split(':')
if (parts.length === 2) {
const [min, sec] = parts
return (+min * 60 + +sec) * 1000
}
const [hour, min, sec] = parts
return (+hour * 3600 + +min * 60 + +sec) * 1000
}
export function isSnapshot(frame: RecordData) {
return (frame as SnapshotRecord).type === RecordType.SNAPSHOT && !(frame as SnapshotRecord).data.frameId
}
export async function delay(t = 200): Promise<void> {
return new Promise(r => {
setTimeout(() => r(), t)
})
}
export function isVNode(n: VNode | VSNode) {
return !!(n as VNode).tag
}
export function getStrDiffPatches(oldStr: string, newStr: string) {
return getPatches(diff.diffChars(oldStr, newStr))
}
export function revertStrByPatches(str: string, changes: ReturnType<typeof getStrDiffPatches>) {
changes.forEach((change: any) => {
const { type, value, len } = change
switch (type) {
case 'add':
str = str.substring(0, change.index) + value + str.substring(change.index)
break
case 'rm':
str = str.substring(0, change.index) + str.substring(change.index + len)
break
}
})
return str
}
function getPatches(changes: diff.Change[]) {
let index = 0
const patches = changes
.map(change => {
const { added: add, removed: rm, value, count } = change
const len = count || 0
if (add) {
const ret = {
index,
type: 'add',
value
}
index += len
return ret
} else if (rm) {
const ret = {
index,
type: 'rm',
len
}
return ret
}
index += len
})
.filter(Boolean)
return patches as Array<{
index: number
type: 'add' | 'rm'
value?: string
len?: number
}>
}
export function isNumeric(n: string) {
return !isNaN(parseFloat(n)) && isFinite(parseFloat(n))
}
export function throttle(func: Function, wait: number, options: { leading?: boolean; trailing?: boolean } = {}): any {
let context: any
let args: any
let result: any
let timeout: any = null
let previous = 0
const later = function () {
previous = options.leading === false ? 0 : Date.now()
timeout = null
result = func.apply(context, args)
if (!timeout) context = args = null
}
return function (this: any) {
const now = Date.now()
if (!previous && options.leading === false) previous = now
const remaining = wait - (now - previous)
context = this
args = arguments
if (remaining <= 0 || remaining > wait) {
if (timeout) {
clearTimeout(timeout)
timeout = null
}
previous = now
result = func.apply(context, args)
if (!timeout) context = args = null
} else if (!timeout && options.trailing !== false) {
timeout = setTimeout(later, remaining)
}
return result
}
}
type Procedure = (...args: any[]) => void
type Options = {
isImmediate?: boolean
// not standard
isTrailing?: boolean
}
export function debounce<F extends Procedure>(
func: F,
waitMilliseconds: number,
options: Options = {
isImmediate: false,
isTrailing: false
}
): (this: ThisParameterType<F>, ...args: Parameters<F>) => void {
let timeoutId: ReturnType<typeof setTimeout> | undefined
return function (this: ThisParameterType<F>, ...args: Parameters<F>) {
const context = this
const doLater = function () {
timeoutId = undefined
if (!options.isImmediate || options.isTrailing) {
func.apply(context, args)
}
}
const shouldCallNow = options.isImmediate && timeoutId === undefined
if (timeoutId !== undefined) {
clearTimeout(timeoutId)
}
timeoutId = setTimeout(doLater, waitMilliseconds)
if (shouldCallNow) {
func.apply(context, args)
}
}
}
export function createURL(url: string, base?: string) {
try {
return new URL(url, base)
} catch (e) {
logError(e)
}
return { href: url, pathname: url }
}
export function stateDebounce<T extends string | boolean | number>(
stateHandle: (setState: (state: T) => void) => void,
delay: ((state: T) => number) | number,
initState?: T
) {
let preState = initState
let timer = 0
return (cb: (state: T) => void) => {
stateHandle(delayExec)
function delayExec(state: T) {
if (timer) {
clearTimeout(timer)
}
timer = window.setTimeout(
() => {
if (preState === state) {
return
}
cb(state)
preState = state
clearTimeout(timer)
timer = 0
},
typeof delay === 'number' ? delay : delay(state)
)
}
}
}
export function logAsciiLogo() {
/* eslint-disable */
return console.log(
`%c
______ _ _____ _
|_ _(_) / __ \\ | |
| | _ _ __ ___ ___| / \\/ __ _| |_
| | | | '_ \` _ \\ / _ \\ | / _\` | __|
| | | | | | | | | __/ \\__/\\ (_| | |_
\\_/ |_|_| |_| |_|\\___|\\____/\\__,_|\\__|
`,
'color: #1475b2;'
)
}
export function logBadge(opts: { title: string; content: string; titleColor?: string; backgroundColor?: string }) {
const { title, content, titleColor, backgroundColor } = opts
const tColor = titleColor || '#606060'
const bColor = backgroundColor || '#1475b2'
const args = [
'%c '.concat(title, ' %c ').concat(content, ' '),
'padding: 1px; border-radius: 3px 0 0 3px; color: #fff; background: '.concat(tColor, ';'),
'padding: 1px; border-radius: 0 3px 3px 0; color: #fff; background: '.concat(bColor, ';')
]
console.log.apply(void 0, args)
}
export function logInfo() {
logAsciiLogo()
logBadge({ title: 'version', content: version })
logBadge({ title: 'more info', content: 'github.com/oct16/timecat' })
}
export function removeGlobalVariables() {
const keys = Object.keys(window)
const targetKeys = keys.filter(key => {
if (key) {
if (key.startsWith('G_RECORD') || key.startsWith('G_REPLAY')) {
return true
}
}
}) as (keyof Window)[]
targetKeys.forEach(key => {
delete window[key]
})
}
export function isNativeFunction(fn: Function) {
if (fn.toString().indexOf('[native code]') > -1) {
return true
}
return false
}
export const tempEmptyFn = () => {}
export const tempEmptyPromise = () => Promise.resolve()
export const canvasContext2DAttrs: (keyof CanvasRenderingContext2D)[] = [
'direction',
'fillStyle',
'filter',
'font',
'globalAlpha',
'globalCompositeOperation',
'imageSmoothingEnabled',
'imageSmoothingQuality',
'lineCap',
'lineDashOffset',
'lineJoin',
'lineWidth',
'miterLimit',
'shadowBlur',
'shadowColor',
'shadowOffsetX',
'shadowOffsetY',
'strokeStyle',
'textAlign',
'textBaseline'
]
export const canvasContext2DMethods: (keyof CanvasRenderingContext2D)[] = [
'arc',
'arcTo',
'beginPath',
'bezierCurveTo',
'clearRect',
'clip',
'closePath',
'createImageData',
'createLinearGradient',
'createPattern',
'createRadialGradient',
'drawFocusIfNeeded',
'drawImage',
'ellipse',
'fill',
'fillRect',
'fillText',
'getImageData',
'getLineDash',
'getTransform',
'isPointInPath',
'isPointInStroke',
'lineTo',
'measureText',
'moveTo',
'putImageData',
'quadraticCurveTo',
'rect',
'resetTransform',
'restore',
'rotate',
'save',
'scale',
'setLineDash',
'setTransform',
'stroke',
'strokeRect',
'strokeText',
'transform',
'translate'
]
export const canvasContext2DKeys: (keyof CanvasRenderingContext2D)[] = [
...canvasContext2DAttrs,
...canvasContext2DMethods
]
export const canvasContextWebGLAttrs: (keyof WebGLRenderingContext)[] = [
'drawingBufferWidth',
'drawingBufferHeight',
'ACTIVE_ATTRIBUTES',
'ACTIVE_TEXTURE',
'ACTIVE_UNIFORMS',
'ALIASED_LINE_WIDTH_RANGE',
'ALIASED_POINT_SIZE_RANGE',
'ALPHA',
'ALPHA_BITS',
'ALWAYS',
'ARRAY_BUFFER',
'ARRAY_BUFFER_BINDING',
'ATTACHED_SHADERS',
'BACK',
'BLEND',
'BLEND_COLOR',
'BLEND_DST_ALPHA',
'BLEND_DST_RGB',
'BLEND_EQUATION',
'BLEND_EQUATION_ALPHA',
'BLEND_EQUATION_RGB',
'BLEND_SRC_ALPHA',
'BLEND_SRC_RGB',
'BLUE_BITS',
'BOOL',
'BOOL_VEC2',
'BOOL_VEC3',
'BOOL_VEC4',
'BROWSER_DEFAULT_WEBGL',
'BUFFER_SIZE',
'BUFFER_USAGE',
'BYTE',
'CCW',
'CLAMP_TO_EDGE',
'COLOR_ATTACHMENT0',
'COLOR_BUFFER_BIT',
'COLOR_CLEAR_VALUE',
'COLOR_WRITEMASK',
'COMPILE_STATUS',
'COMPRESSED_TEXTURE_FORMATS',
'CONSTANT_ALPHA',
'CONSTANT_COLOR',
'CONTEXT_LOST_WEBGL',
'CULL_FACE',
'CULL_FACE_MODE',
'CURRENT_PROGRAM',
'CURRENT_VERTEX_ATTRIB',
'CW',
'DECR',
'DECR_WRAP',
'DELETE_STATUS',
'DEPTH_ATTACHMENT',
'DEPTH_BITS',
'DEPTH_BUFFER_BIT',
'DEPTH_CLEAR_VALUE',
'DEPTH_COMPONENT',
'DEPTH_COMPONENT16',
'DEPTH_FUNC',
'DEPTH_RANGE',
'DEPTH_STENCIL',
'DEPTH_STENCIL_ATTACHMENT',
'DEPTH_TEST',
'DEPTH_WRITEMASK',
'DITHER',
'DONT_CARE',
'DST_ALPHA',
'DST_COLOR',
'DYNAMIC_DRAW',
'ELEMENT_ARRAY_BUFFER',
'ELEMENT_ARRAY_BUFFER_BINDING',
'EQUAL',
'FASTEST',
'FLOAT',
'FLOAT_MAT2',
'FLOAT_MAT3',
'FLOAT_MAT4',
'FLOAT_VEC2',
'FLOAT_VEC3',
'FLOAT_VEC4',
'FRAGMENT_SHADER',
'FRAMEBUFFER',
'FRAMEBUFFER_ATTACHMENT_OBJECT_NAME',
'FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE',
'FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE',
'FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL',
'FRAMEBUFFER_BINDING',
'FRAMEBUFFER_COMPLETE',
'FRAMEBUFFER_INCOMPLETE_ATTACHMENT',
'FRAMEBUFFER_INCOMPLETE_DIMENSIONS',
'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT',
'FRAMEBUFFER_UNSUPPORTED',
'FRONT',
'FRONT_AND_BACK',
'FRONT_FACE',
'FUNC_ADD',
'FUNC_REVERSE_SUBTRACT',
'FUNC_SUBTRACT',
'GENERATE_MIPMAP_HINT',
'GEQUAL',
'GREATER',
'GREEN_BITS',
'HIGH_FLOAT',
'HIGH_INT',
'IMPLEMENTATION_COLOR_READ_FORMAT',
'IMPLEMENTATION_COLOR_READ_TYPE',
'INCR',
'INCR_WRAP',
'INT',
'INT_VEC2',
'INT_VEC3',
'INT_VEC4',
'INVALID_ENUM',
'INVALID_FRAMEBUFFER_OPERATION',
'INVALID_OPERATION',
'INVALID_VALUE',
'INVERT',
'KEEP',
'LEQUAL',
'LESS',
'LINEAR',
'LINEAR_MIPMAP_LINEAR',
'LINEAR_MIPMAP_NEAREST',
'LINES',
'LINE_LOOP',
'LINE_STRIP',
'LINE_WIDTH',
'LINK_STATUS',
'LOW_FLOAT',
'LOW_INT',
'LUMINANCE',
'LUMINANCE_ALPHA',
'MAX_COMBINED_TEXTURE_IMAGE_UNITS',
'MAX_CUBE_MAP_TEXTURE_SIZE',
'MAX_FRAGMENT_UNIFORM_VECTORS',
'MAX_RENDERBUFFER_SIZE',
'MAX_TEXTURE_IMAGE_UNITS',
'MAX_TEXTURE_SIZE',
'MAX_VARYING_VECTORS',
'MAX_VERTEX_ATTRIBS',
'MAX_VERTEX_TEXTURE_IMAGE_UNITS',
'MAX_VERTEX_UNIFORM_VECTORS',
'MAX_VIEWPORT_DIMS',
'MEDIUM_FLOAT',
'MEDIUM_INT',
'MIRRORED_REPEAT',
'NEAREST',
'NEAREST_MIPMAP_LINEAR',
'NEAREST_MIPMAP_NEAREST',
'NEVER',
'NICEST',
'NONE',
'NOTEQUAL',
'NO_ERROR',
'ONE',
'ONE_MINUS_CONSTANT_ALPHA',
'ONE_MINUS_CONSTANT_COLOR',
'ONE_MINUS_DST_ALPHA',
'ONE_MINUS_DST_COLOR',
'ONE_MINUS_SRC_ALPHA',
'ONE_MINUS_SRC_COLOR',
'OUT_OF_MEMORY',
'PACK_ALIGNMENT',
'POINTS',
'POLYGON_OFFSET_FACTOR',
'POLYGON_OFFSET_FILL',
'POLYGON_OFFSET_UNITS',
'RED_BITS',
'RENDERBUFFER',
'RENDERBUFFER_ALPHA_SIZE',
'RENDERBUFFER_BINDING',
'RENDERBUFFER_BLUE_SIZE',
'RENDERBUFFER_DEPTH_SIZE',
'RENDERBUFFER_GREEN_SIZE',
'RENDERBUFFER_HEIGHT',
'RENDERBUFFER_INTERNAL_FORMAT',
'RENDERBUFFER_RED_SIZE',
'RENDERBUFFER_STENCIL_SIZE',
'RENDERBUFFER_WIDTH',
'RENDERER',
'REPEAT',
'REPLACE',
'RGB',
'RGB565',
'RGB5_A1',
'RGBA',
'RGBA4',
'SAMPLER_2D',
'SAMPLER_CUBE',
'SAMPLES',
'SAMPLE_ALPHA_TO_COVERAGE',
'SAMPLE_BUFFERS',
'SAMPLE_COVERAGE',
'SAMPLE_COVERAGE_INVERT',
'SAMPLE_COVERAGE_VALUE',
'SCISSOR_BOX',
'SCISSOR_TEST',
'SHADER_TYPE',
'SHADING_LANGUAGE_VERSION',
'SHORT',
'SRC_ALPHA',
'SRC_ALPHA_SATURATE',
'SRC_COLOR',
'STATIC_DRAW',
'STENCIL_ATTACHMENT',
'STENCIL_BACK_FAIL',
'STENCIL_BACK_FUNC',
'STENCIL_BACK_PASS_DEPTH_FAIL',
'STENCIL_BACK_PASS_DEPTH_PASS',
'STENCIL_BACK_REF',
'STENCIL_BACK_VALUE_MASK',
'STENCIL_BACK_WRITEMASK',
'STENCIL_BITS',
'STENCIL_BUFFER_BIT',
'STENCIL_CLEAR_VALUE',
'STENCIL_FAIL',
'STENCIL_FUNC',
'STENCIL_INDEX8',
'STENCIL_PASS_DEPTH_FAIL',
'STENCIL_PASS_DEPTH_PASS',
'STENCIL_REF',
'STENCIL_TEST',
'STENCIL_VALUE_MASK',
'STENCIL_WRITEMASK',
'STREAM_DRAW',
'SUBPIXEL_BITS',
'TEXTURE',
'TEXTURE0',
'TEXTURE1',
'TEXTURE10',
'TEXTURE11',
'TEXTURE12',
'TEXTURE13',
'TEXTURE14',
'TEXTURE15',
'TEXTURE16',
'TEXTURE17',
'TEXTURE18',
'TEXTURE19',
'TEXTURE2',
'TEXTURE20',
'TEXTURE21',
'TEXTURE22',
'TEXTURE23',
'TEXTURE24',
'TEXTURE25',
'TEXTURE26',
'TEXTURE27',
'TEXTURE28',
'TEXTURE29',
'TEXTURE3',
'TEXTURE30',
'TEXTURE31',
'TEXTURE4',
'TEXTURE5',
'TEXTURE6',
'TEXTURE7',
'TEXTURE8',
'TEXTURE9',
'TEXTURE_2D',
'TEXTURE_BINDING_2D',
'TEXTURE_BINDING_CUBE_MAP',
'TEXTURE_CUBE_MAP',
'TEXTURE_CUBE_MAP_NEGATIVE_X',
'TEXTURE_CUBE_MAP_NEGATIVE_Y',
'TEXTURE_CUBE_MAP_NEGATIVE_Z',
'TEXTURE_CUBE_MAP_POSITIVE_X',
'TEXTURE_CUBE_MAP_POSITIVE_Y',
'TEXTURE_CUBE_MAP_POSITIVE_Z',
'TEXTURE_MAG_FILTER',
'TEXTURE_MIN_FILTER',
'TEXTURE_WRAP_S',
'TEXTURE_WRAP_T',
'TRIANGLES',
'TRIANGLE_FAN',
'TRIANGLE_STRIP',
'UNPACK_ALIGNMENT',
'UNPACK_COLORSPACE_CONVERSION_WEBGL',
'UNPACK_FLIP_Y_WEBGL',
'UNPACK_PREMULTIPLY_ALPHA_WEBGL',
'UNSIGNED_BYTE',
'UNSIGNED_INT',
'UNSIGNED_SHORT',
'UNSIGNED_SHORT_4_4_4_4',
'UNSIGNED_SHORT_5_5_5_1',
'UNSIGNED_SHORT_5_6_5',
'VALIDATE_STATUS',
'VENDOR',
'VERSION',
'VERTEX_ATTRIB_ARRAY_BUFFER_BINDING',
'VERTEX_ATTRIB_ARRAY_ENABLED',
'VERTEX_ATTRIB_ARRAY_NORMALIZED',
'VERTEX_ATTRIB_ARRAY_POINTER',
'VERTEX_ATTRIB_ARRAY_SIZE',
'VERTEX_ATTRIB_ARRAY_STRIDE',
'VERTEX_ATTRIB_ARRAY_TYPE',
'VERTEX_SHADER',
'VIEWPORT',
'ZERO'
]
export const canvasContextWebGLMethods: (keyof WebGLRenderingContext)[] = [
'activeTexture',
'attachShader',
'bindAttribLocation',
'bindBuffer',
'bindFramebuffer',
'bindRenderbuffer',
'bindTexture',
'blendColor',
'blendEquation',
'blendEquationSeparate',
'blendFunc',
'blendFuncSeparate',
'bufferData',
'bufferSubData',
'checkFramebufferStatus',
'clear',
'clearColor',
'clearDepth',
'clearStencil',
'colorMask',
'compileShader',
'compressedTexImage2D',
'compressedTexSubImage2D',
'copyTexImage2D',
'copyTexSubImage2D',
'createBuffer',
'createFramebuffer',
'createProgram',
'createRenderbuffer',
'createShader',
'createTexture',
'cullFace',
'deleteBuffer',
'deleteFramebuffer',
'deleteProgram',
'deleteRenderbuffer',
'deleteShader',
'deleteTexture',
'depthFunc',
'depthMask',
'depthRange',
'detachShader',
'disable',
'disableVertexAttribArray',
'drawArrays',
'drawElements',
'enable',
'enableVertexAttribArray',
'finish',
'flush',
'framebufferRenderbuffer',
'framebufferTexture2D',
'frontFace',
'generateMipmap',
'getActiveAttrib',
'getActiveUniform',
'getAttachedShaders',
'getAttribLocation',
'getBufferParameter',
'getContextAttributes',
'getError',
'getExtension',
'getFramebufferAttachmentParameter',
'getParameter',
'getProgramInfoLog',
'getProgramParameter',
'getRenderbufferParameter',
'getShaderInfoLog',
'getShaderParameter',
'getShaderPrecisionFormat',
'getShaderSource',
'getSupportedExtensions',
'getTexParameter',
'getUniform',
'getUniformLocation',
'getVertexAttrib',
'getVertexAttribOffset',
'hint',
'isBuffer',
'isContextLost',
'isEnabled',
'isFramebuffer',
'isProgram',
'isRenderbuffer',
'isShader',
'isTexture',
'lineWidth',
'linkProgram',
'makeXRCompatible' as any,
'pixelStorei',
'polygonOffset',
'readPixels',
'renderbufferStorage',
'sampleCoverage',
'scissor',
'shaderSource',
'stencilFunc',
'stencilFuncSeparate',
'stencilMask',
'stencilMaskSeparate',
'stencilOp',
'stencilOpSeparate',
'texImage2D',
'texParameterf',
'texParameteri',
'texSubImage2D',
'uniform1f',
'uniform1fv',
'uniform1i',
'uniform1iv',
'uniform2f',
'uniform2fv',
'uniform2i',
'uniform2iv',
'uniform3f',
'uniform3fv',
'uniform3i',
'uniform3iv',
'uniform4f',
'uniform4fv',
'uniform4i',
'uniform4iv',
'uniformMatrix2fv',
'uniformMatrix3fv',
'uniformMatrix4fv',
'useProgram',
'validateProgram',
'vertexAttrib1f',
'vertexAttrib1fv',
'vertexAttrib2f',
'vertexAttrib2fv',
'vertexAttrib3f',
'vertexAttrib3fv',
'vertexAttrib4f',
'vertexAttrib4fv',
'vertexAttribPointer',
'viewport'
]
export const canvasContextWebGLKeys: (keyof WebGLRenderingContext)[] = [
...canvasContextWebGLAttrs,
...canvasContextWebGLMethods
] | the_stack |
import * as log from '../log';
/* TODO: for now, copy-pasted this file from an external source -- so disable tslint temporarily */
/* tslint:disable */
declare namespace OfficeExtension {
interface HttpRequestInfo {
/** HTTP request method */
method: string;
/** Request URL */
url: string;
/** Request headers */
headers: { [name: string]: string };
/** Request body */
body: string | any;
}
/**
* HTTP response information.
*/
interface HttpResponseInfo {
/** Response status code */
statusCode: number;
/** Response headers */
headers: { [name: string]: string };
/** Response body */
body: string | any;
}
class HttpUtility {
static sendLocalDocumentRequest(request: HttpRequestInfo): Promise<HttpResponseInfo>;
}
}
export interface JupyterConnectionInfo {
token: string;
baseUrl: string;
}
interface JupyterWebSocketMessageHeader {
msg_id?: string;
msg_type?: string;
username?: string;
session?: string;
date?: string;
version?: string;
}
interface JupyterWebSocketMessage {
header: JupyterWebSocketMessageHeader;
msg_id: string;
msg_type: string;
parent_header?: JupyterWebSocketMessageHeader;
metadata: any;
content: any;
buffers: any[];
channel: string;
}
interface JuypterWebSocketMessageExecuteReplyContent {
status: 'ok' | string;
}
interface JuypterWebSocketMessageExecuteResultContent {
data: { [key: string]: any };
metadata?: { [key: string]: any };
execution_count: number;
}
interface JupyterWebSocketMessageInputRequestContent {
prompt: string;
password: boolean;
}
interface JupyterWebSocketMessageInputReplyContent {
value: string;
}
interface JupyterWebSocketMessageStreamResultContent {
name: string;
text: string;
}
interface JupyterWebSocketMessageExecuteReplyContent {
status: JupyterWebSocketMessageExecuteReplyContentStatus;
ename?: string;
evalue?: string;
}
enum JupyterWebSocketMessageExecuteReplyContentStatus {
ok = 'ok',
error = 'error',
}
enum JupyterWebSocketMessageType {
execute_result = 'execute_result',
stream = 'stream',
execute_reply = 'execute_reply',
}
/*
var msg =
{ "header":
{
"msg_id": "960fa11f-be831bb05301a5c2b95b379c",
"msg_type": "execute_result",
"username": "username",
"session": "1ff9a80a-9e20eb2b4ff2d3ad35d10bc2",
"date": "2019-03-25T04:59:30.661474Z",
"version": "5.3"
},
"msg_id": "960fa11f-be831bb05301a5c2b95b379c",
"msg_type": "execute_result",
"parent_header":
{
"msg_id": "abe66fa0b98c4c07a8dce0ba5985a6ad",
"username": "username",
"session": "84db38e52f6140b087196b72276a456a",
"msg_type": "execute_request",
"version": "5.2",
"date": "2019-03-25T04:59:30.657479Z"
},
"metadata": {},
"content":
{ "data": { "text/plain": "142" }, "metadata": {}, "execution_count": 7 },
"buffers": [],
"channel": "iopub"
};
*/
export class JupyterNotebook {
private m_sessionId: string;
private m_kernelId: string;
private m_channelSessionId: string;
private m_ws: WebSocket;
private m_executePromiseMap: {
[key: string]: { resolve: (value: string) => void; reject: (error: any) => void };
};
private m_connected: boolean;
constructor(private m_conn: JupyterConnectionInfo, private m_path: string) {
this.m_executePromiseMap = {};
}
private connect(): Promise<void> {
let url = Util.combineUrl(this.m_conn.baseUrl, 'api/sessions');
let sessionCreationInfo = {
path: this.m_path,
type: 'notebook',
name: '',
kernel: {
id: null,
name: 'python3',
},
};
return fetch(url, {
headers: {
'Content-Type': 'application/json',
Authorization: 'token ' + this.m_conn.token,
},
method: 'POST',
cache: 'no-cache',
body: JSON.stringify(sessionCreationInfo),
})
.then(response => response.json())
.then(returnData => {
this.m_sessionId = returnData.id;
this.m_kernelId = returnData.kernel.id;
this.m_channelSessionId = Util.uuid();
let wsUrl: string;
if (this.m_conn.baseUrl.substr(0, 'http://'.length) === 'http://') {
wsUrl = 'ws://' + this.m_conn.baseUrl.substr('http://'.length);
} else if (this.m_conn.baseUrl.substr(0, 'https://'.length) === 'https://') {
wsUrl = 'wss://' + this.m_conn.baseUrl.substr('https://'.length);
}
wsUrl = Util.combineUrl(
wsUrl,
'api/kernels/' +
this.m_kernelId +
'/channels?token=' +
this.m_conn.token +
'&session_id=' +
this.m_channelSessionId,
);
Util.log('WebSocket url:' + wsUrl);
return new Promise<void>((resolve, reject) => {
this.m_ws = new WebSocket(wsUrl);
this.m_ws.onopen = () => {
Util.log('onopen');
resolve();
};
this.m_ws.onmessage = (e: MessageEvent) => {
Util.log('onmessage');
if (typeof e.data === 'string') {
Util.log(e.data);
let msg: JupyterWebSocketMessage = JSON.parse(e.data);
this.handleWebSocketMessage(msg);
} else {
Util.log('unknown message');
}
};
this.m_ws.onclose = () => {
Util.log('onclose');
};
this.m_ws.onerror = () => {
Util.log('onerror');
};
});
});
}
async ensureConnected(): Promise<void> {
if (!this.m_connected) {
await this.connect();
this.m_connected = true;
}
}
executeCode(code: string): Promise<any> {
return this.ensureConnected().then(() => {
var content = {
code: code,
silent: false,
store_history: true,
user_expressions: {},
allow_stdin: true,
stop_on_error: true,
};
let p = new Promise<string>((resolve, reject) => {
let msgId = this.sendShellMessage('execute_request', content, null);
this.m_executePromiseMap[msgId] = { resolve: resolve, reject: reject };
});
return p;
});
}
private sendShellMessage(msgType: string, content: any, metadata: any): string {
let msg = this.buildMessage('shell', msgType, content, metadata);
let stringMessage = JSON.stringify(msg);
Util.log('sending:' + stringMessage);
this.m_ws.send(stringMessage);
return msg.msg_id;
}
private buildMessage(
channel: string,
msgType: string,
content: any,
metadata: any,
): JupyterWebSocketMessage {
let msgId = Util.uuid();
var msg = {
header: {
msg_id: msgId,
username: 'username',
session: this.m_channelSessionId,
msg_type: msgType,
version: '5.2',
},
msg_id: msgId,
msg_type: msgType,
metadata: metadata || {},
content: content,
buffers: [],
parent_header: {},
channel: channel,
};
return msg;
}
private handleWebSocketMessage(msg: JupyterWebSocketMessage) {
switch (msg.channel) {
case 'shell':
return this.handleShellReply(msg);
case 'iopub':
return this.handleIopubMessage(msg);
case 'stdin':
return this.handleInputRequest(msg);
default:
console.error('unrecognized message channel', msg.channel, msg);
}
}
private handleShellReply(msg: JupyterWebSocketMessage) {
if (msg.msg_type == JupyterWebSocketMessageType.execute_reply) {
let content: JupyterWebSocketMessageExecuteReplyContent = msg.content;
let parentMsgId = msg.parent_header.msg_id;
let p = this.m_executePromiseMap[parentMsgId];
if (p) {
delete this.m_executePromiseMap[parentMsgId];
if (content.status == JupyterWebSocketMessageExecuteReplyContentStatus.ok) {
p.resolve(null);
} else if (
content.status == JupyterWebSocketMessageExecuteReplyContentStatus.error
) {
p.reject(content.ename + ':' + content.evalue);
} else {
console.error('unrecognized message status', content.status);
}
}
}
}
private handleIopubMessage(msg: JupyterWebSocketMessage) {
if (msg.msg_type === JupyterWebSocketMessageType.execute_result) {
let content: JuypterWebSocketMessageExecuteResultContent = msg.content;
let text = content.data['text/plain'];
let parentMsgId = msg.parent_header.msg_id;
let p = this.m_executePromiseMap[parentMsgId];
Util.log('ExecuteResult=' + text);
if (p) {
delete this.m_executePromiseMap[parentMsgId];
p.resolve(text);
}
} else if (msg.msg_type == JupyterWebSocketMessageType.stream) {
const content: JupyterWebSocketMessageStreamResultContent = msg.content;
if (content.name === 'stdout') {
let text = content.text;
if (!Util.isNullOrEmptyString(text)) {
text = text.replace('\\n', '');
}
Util.logConsole(text);
}
}
}
private handleInputRequest(msg: JupyterWebSocketMessage) {
let content: JupyterWebSocketMessageInputRequestContent = msg.content;
const officeApiPrefix = '[Office-Api]';
if (
typeof content.prompt === 'string' &&
content.prompt.substr(0, officeApiPrefix.length) === officeApiPrefix
) {
let requestInfoStr = content.prompt.substr(officeApiPrefix.length);
let requestInfo: OfficeExtension.HttpRequestInfo = JSON.parse(requestInfoStr);
OfficeExtension.HttpUtility.sendLocalDocumentRequest(requestInfo).then(
(responseInfo: OfficeExtension.HttpResponseInfo) => {
let responseInfoStr = JSON.stringify(responseInfo);
let respMsg = this.buildMessage(
'stdin',
'input_reply',
{ value: responseInfoStr },
null,
);
respMsg.parent_header = msg.header;
let strRespMsg = JSON.stringify(respMsg);
Util.log('sending:' + strRespMsg);
this.m_ws.send(strRespMsg);
},
);
}
}
}
export class PythonCodeHelper {
static buildFunctionInvokeStatement(functionName: string, parameters: any[]): string {
let ret = functionName;
ret += '(';
if (parameters) {
for (let i = 0; i < parameters.length; i++) {
if (i !== 0) {
ret += ', ';
}
ret += PythonCodeHelper.buildLiteral(parameters[i]);
}
}
ret += ')';
return ret;
}
static parseFromPythonLiteral(text: string): any {
if (text === 'True') {
return true;
} else if (text === 'False') {
return false;
} else if (text === 'None') {
return null;
} else if (text.charAt(0) == "'") {
return text.substr(1, text.length - 2);
} else {
return JSON.parse(text);
}
}
private static buildLiteral(value: any): string {
if (typeof value === 'undefined' || value === null) {
return 'None';
}
if (typeof value === 'boolean') {
if (value) {
return 'True';
} else {
return 'False';
}
}
if (typeof value === 'number') {
return JSON.stringify(value);
}
if (typeof value === 'string') {
// TODO: The Python string is different from JSON string
// It uses \xhh instead of \uxxxx
return JSON.stringify(value);
}
if (Array.isArray(value)) {
let ret = '[';
for (let i = 0; i < value.length; i++) {
if (i !== 0) {
ret = ret + ',';
}
ret = ret + PythonCodeHelper.buildLiteral(value[i]);
}
return ret + ']';
}
return 'None';
}
}
export class Util {
static combineUrl(parent: string, child: string) {
if (Util.isNullOrEmptyString(child)) {
return parent;
}
if (parent.substr(parent.length - 1) === '/') {
parent = parent.substr(0, parent.length - 1);
}
if (child.charAt(0) == '/') {
child = child.substr(1);
}
return parent + '/' + child;
}
static isNullOrEmptyString(str: string): boolean {
if (typeof str === 'undefined' || str === null) {
return true;
}
return str.length == 0;
}
static uuid(): string {
/**
* http://www.ietf.org/rfc/rfc4122.txt
*/
var s = [];
var hexDigits = '0123456789abcdef';
for (var i = 0; i < 32; i++) {
s[i] = hexDigits.substr(Math.floor(Math.random() * 0x10), 1);
}
s[12] = '4'; // bits 12-15 of the time_hi_and_version field to 0010
s[16] = hexDigits.substr((s[16] & 0x3) | 0x8, 1); // bits 6-7 of the clock_seq_hi_and_reserved to 01
var uuid = s.join('');
return uuid;
}
static log(text: string): void {
const logger = log.getLogger('Jupyter');
logger.info(text);
}
static logResult(text: string): void {
const logger = log.getLogger('Jupyter');
logger.info(text);
}
static logConsole(text: string): void {
console.log(text);
}
} | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as Models from "../models";
import * as Mappers from "../models/devicesMappers";
import * as Parameters from "../models/parameters";
import { IotHubGatewayServiceAPIsContext } from "../iotHubGatewayServiceAPIsContext";
/** Class representing a Devices. */
export class Devices {
private readonly client: IotHubGatewayServiceAPIsContext;
/**
* Create a Devices.
* @param {IotHubGatewayServiceAPIsContext} client Reference to the service client.
*/
constructor(client: IotHubGatewayServiceAPIsContext) {
this.client = client;
}
/**
* Gets the identities of multiple devices from the IoT Hub identity registry. Not recommended. Use
* the IoT Hub query API to retrieve device twin and device identity information. See
* https://docs.microsoft.com/en-us/rest/api/iothub/service/queryiothub and
* https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-devguide-query-language for more
* information.
* @param [options] The optional parameters
* @returns Promise<Models.DevicesGetDevicesResponse>
*/
getDevices(options?: Models.DevicesGetDevicesOptionalParams): Promise<Models.DevicesGetDevicesResponse>;
/**
* @param callback The callback
*/
getDevices(callback: msRest.ServiceCallback<Models.Device[]>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
getDevices(options: Models.DevicesGetDevicesOptionalParams, callback: msRest.ServiceCallback<Models.Device[]>): void;
getDevices(options?: Models.DevicesGetDevicesOptionalParams | msRest.ServiceCallback<Models.Device[]>, callback?: msRest.ServiceCallback<Models.Device[]>): Promise<Models.DevicesGetDevicesResponse> {
return this.client.sendOperationRequest(
{
options
},
getDevicesOperationSpec,
callback) as Promise<Models.DevicesGetDevicesResponse>;
}
/**
* Gets a device from the identity registry of the IoT Hub.
* @param id The unique identifier of the device.
* @param [options] The optional parameters
* @returns Promise<Models.DevicesGetIdentityResponse>
*/
getIdentity(id: string, options?: msRest.RequestOptionsBase): Promise<Models.DevicesGetIdentityResponse>;
/**
* @param id The unique identifier of the device.
* @param callback The callback
*/
getIdentity(id: string, callback: msRest.ServiceCallback<Models.Device>): void;
/**
* @param id The unique identifier of the device.
* @param options The optional parameters
* @param callback The callback
*/
getIdentity(id: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Device>): void;
getIdentity(id: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Device>, callback?: msRest.ServiceCallback<Models.Device>): Promise<Models.DevicesGetIdentityResponse> {
return this.client.sendOperationRequest(
{
id,
options
},
getIdentityOperationSpec,
callback) as Promise<Models.DevicesGetIdentityResponse>;
}
/**
* Creates or updates the identity of a device in the identity registry of the IoT Hub.
* @param id The unique identifier of the device.
* @param device The contents of the device identity.
* @param [options] The optional parameters
* @returns Promise<Models.DevicesCreateOrUpdateIdentityResponse>
*/
createOrUpdateIdentity(id: string, device: Models.Device, options?: Models.DevicesCreateOrUpdateIdentityOptionalParams): Promise<Models.DevicesCreateOrUpdateIdentityResponse>;
/**
* @param id The unique identifier of the device.
* @param device The contents of the device identity.
* @param callback The callback
*/
createOrUpdateIdentity(id: string, device: Models.Device, callback: msRest.ServiceCallback<Models.Device>): void;
/**
* @param id The unique identifier of the device.
* @param device The contents of the device identity.
* @param options The optional parameters
* @param callback The callback
*/
createOrUpdateIdentity(id: string, device: Models.Device, options: Models.DevicesCreateOrUpdateIdentityOptionalParams, callback: msRest.ServiceCallback<Models.Device>): void;
createOrUpdateIdentity(id: string, device: Models.Device, options?: Models.DevicesCreateOrUpdateIdentityOptionalParams | msRest.ServiceCallback<Models.Device>, callback?: msRest.ServiceCallback<Models.Device>): Promise<Models.DevicesCreateOrUpdateIdentityResponse> {
return this.client.sendOperationRequest(
{
id,
device,
options
},
createOrUpdateIdentityOperationSpec,
callback) as Promise<Models.DevicesCreateOrUpdateIdentityResponse>;
}
/**
* Deletes the identity of a device from the identity registry of the IoT Hub.
* @param id The unique identifier of the device.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteIdentity(id: string, options?: Models.DevicesDeleteIdentityOptionalParams): Promise<msRest.RestResponse>;
/**
* @param id The unique identifier of the device.
* @param callback The callback
*/
deleteIdentity(id: string, callback: msRest.ServiceCallback<void>): void;
/**
* @param id The unique identifier of the device.
* @param options The optional parameters
* @param callback The callback
*/
deleteIdentity(id: string, options: Models.DevicesDeleteIdentityOptionalParams, callback: msRest.ServiceCallback<void>): void;
deleteIdentity(id: string, options?: Models.DevicesDeleteIdentityOptionalParams | msRest.ServiceCallback<void>, callback?: msRest.ServiceCallback<void>): Promise<msRest.RestResponse> {
return this.client.sendOperationRequest(
{
id,
options
},
deleteIdentityOperationSpec,
callback);
}
/**
* Gets the device twin. See https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-device-twins
* for more information.
* @param id The unique identifier of the device.
* @param [options] The optional parameters
* @returns Promise<Models.DevicesGetTwinResponse>
*/
getTwin(id: string, options?: msRest.RequestOptionsBase): Promise<Models.DevicesGetTwinResponse>;
/**
* @param id The unique identifier of the device.
* @param callback The callback
*/
getTwin(id: string, callback: msRest.ServiceCallback<Models.Twin>): void;
/**
* @param id The unique identifier of the device.
* @param options The optional parameters
* @param callback The callback
*/
getTwin(id: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Twin>): void;
getTwin(id: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Twin>, callback?: msRest.ServiceCallback<Models.Twin>): Promise<Models.DevicesGetTwinResponse> {
return this.client.sendOperationRequest(
{
id,
options
},
getTwinOperationSpec,
callback) as Promise<Models.DevicesGetTwinResponse>;
}
/**
* Replaces the tags and desired properties of a device twin. See
* https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-device-twins for more information.
* @param id The unique identifier of the device.
* @param deviceTwinInfo The device twin info that will replace the existing info.
* @param [options] The optional parameters
* @returns Promise<Models.DevicesReplaceTwinResponse>
*/
replaceTwin(id: string, deviceTwinInfo: Models.Twin, options?: Models.DevicesReplaceTwinOptionalParams): Promise<Models.DevicesReplaceTwinResponse>;
/**
* @param id The unique identifier of the device.
* @param deviceTwinInfo The device twin info that will replace the existing info.
* @param callback The callback
*/
replaceTwin(id: string, deviceTwinInfo: Models.Twin, callback: msRest.ServiceCallback<Models.Twin>): void;
/**
* @param id The unique identifier of the device.
* @param deviceTwinInfo The device twin info that will replace the existing info.
* @param options The optional parameters
* @param callback The callback
*/
replaceTwin(id: string, deviceTwinInfo: Models.Twin, options: Models.DevicesReplaceTwinOptionalParams, callback: msRest.ServiceCallback<Models.Twin>): void;
replaceTwin(id: string, deviceTwinInfo: Models.Twin, options?: Models.DevicesReplaceTwinOptionalParams | msRest.ServiceCallback<Models.Twin>, callback?: msRest.ServiceCallback<Models.Twin>): Promise<Models.DevicesReplaceTwinResponse> {
return this.client.sendOperationRequest(
{
id,
deviceTwinInfo,
options
},
replaceTwinOperationSpec,
callback) as Promise<Models.DevicesReplaceTwinResponse>;
}
/**
* Updates the tags and desired properties of a device twin. See
* https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-device-twins for more information.
* @param id The unique identifier of the device.
* @param deviceTwinInfo The device twin info containing the tags and desired properties to be
* updated.
* @param [options] The optional parameters
* @returns Promise<Models.DevicesUpdateTwinResponse>
*/
updateTwin(id: string, deviceTwinInfo: Models.Twin, options?: Models.DevicesUpdateTwinOptionalParams): Promise<Models.DevicesUpdateTwinResponse>;
/**
* @param id The unique identifier of the device.
* @param deviceTwinInfo The device twin info containing the tags and desired properties to be
* updated.
* @param callback The callback
*/
updateTwin(id: string, deviceTwinInfo: Models.Twin, callback: msRest.ServiceCallback<Models.Twin>): void;
/**
* @param id The unique identifier of the device.
* @param deviceTwinInfo The device twin info containing the tags and desired properties to be
* updated.
* @param options The optional parameters
* @param callback The callback
*/
updateTwin(id: string, deviceTwinInfo: Models.Twin, options: Models.DevicesUpdateTwinOptionalParams, callback: msRest.ServiceCallback<Models.Twin>): void;
updateTwin(id: string, deviceTwinInfo: Models.Twin, options?: Models.DevicesUpdateTwinOptionalParams | msRest.ServiceCallback<Models.Twin>, callback?: msRest.ServiceCallback<Models.Twin>): Promise<Models.DevicesUpdateTwinResponse> {
return this.client.sendOperationRequest(
{
id,
deviceTwinInfo,
options
},
updateTwinOperationSpec,
callback) as Promise<Models.DevicesUpdateTwinResponse>;
}
/**
* Invokes a direct method on a device. See
* https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-direct-methods for more information.
* @param deviceId The unique identifier of the device.
* @param directMethodRequest The parameters to execute a direct method on the device.
* @param [options] The optional parameters
* @returns Promise<Models.DevicesInvokeMethodResponse>
*/
invokeMethod(deviceId: string, directMethodRequest: Models.CloudToDeviceMethod, options?: msRest.RequestOptionsBase): Promise<Models.DevicesInvokeMethodResponse>;
/**
* @param deviceId The unique identifier of the device.
* @param directMethodRequest The parameters to execute a direct method on the device.
* @param callback The callback
*/
invokeMethod(deviceId: string, directMethodRequest: Models.CloudToDeviceMethod, callback: msRest.ServiceCallback<Models.CloudToDeviceMethodResult>): void;
/**
* @param deviceId The unique identifier of the device.
* @param directMethodRequest The parameters to execute a direct method on the device.
* @param options The optional parameters
* @param callback The callback
*/
invokeMethod(deviceId: string, directMethodRequest: Models.CloudToDeviceMethod, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CloudToDeviceMethodResult>): void;
invokeMethod(deviceId: string, directMethodRequest: Models.CloudToDeviceMethod, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CloudToDeviceMethodResult>, callback?: msRest.ServiceCallback<Models.CloudToDeviceMethodResult>): Promise<Models.DevicesInvokeMethodResponse> {
return this.client.sendOperationRequest(
{
deviceId,
directMethodRequest,
options
},
invokeMethodOperationSpec,
callback) as Promise<Models.DevicesInvokeMethodResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const getDevicesOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "devices",
queryParameters: [
Parameters.top,
Parameters.apiVersion
],
responses: {
200: {
bodyMapper: {
serializedName: "parsedResponse",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "Device"
}
}
}
}
},
default: {}
},
serializer
};
const getIdentityOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "devices/{id}",
urlParameters: [
Parameters.id
],
queryParameters: [
Parameters.apiVersion
],
responses: {
200: {
bodyMapper: Mappers.Device
},
default: {}
},
serializer
};
const createOrUpdateIdentityOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "devices/{id}",
urlParameters: [
Parameters.id
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.ifMatch
],
requestBody: {
parameterPath: "device",
mapper: {
...Mappers.Device,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Device
},
default: {}
},
serializer
};
const deleteIdentityOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "devices/{id}",
urlParameters: [
Parameters.id
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.ifMatch
],
responses: {
204: {},
default: {}
},
serializer
};
const getTwinOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "twins/{id}",
urlParameters: [
Parameters.id
],
queryParameters: [
Parameters.apiVersion
],
responses: {
200: {
bodyMapper: Mappers.Twin
},
default: {}
},
serializer
};
const replaceTwinOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "twins/{id}",
urlParameters: [
Parameters.id
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.ifMatch
],
requestBody: {
parameterPath: "deviceTwinInfo",
mapper: {
...Mappers.Twin,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Twin
},
default: {}
},
serializer
};
const updateTwinOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "twins/{id}",
urlParameters: [
Parameters.id
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.ifMatch
],
requestBody: {
parameterPath: "deviceTwinInfo",
mapper: {
...Mappers.Twin,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Twin
},
default: {}
},
serializer
};
const invokeMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "twins/{deviceId}/methods",
urlParameters: [
Parameters.deviceId
],
queryParameters: [
Parameters.apiVersion
],
requestBody: {
parameterPath: "directMethodRequest",
mapper: {
...Mappers.CloudToDeviceMethod,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.CloudToDeviceMethodResult
},
default: {}
},
serializer
}; | the_stack |
import express from "express";
import {IpFilter, IpDeniedError} from "express-ipfilter";
// must load before node-red
const runtime = require("@node-red/runtime");
const installer = require("@node-red/registry/lib/installer");
const Node = require("@node-red/runtime/lib/nodes/Node")
import newExec from "./node-red-runtime-exec";
import RED from "node-red";
import http from "http";
import { ipcMain, app, ipcRenderer } from "electron";
import path from "path";
import log from "./log";
import { AppStatus } from "./main";
import fs from "fs";
const CustomStorage = require("./custom-storage");
const registry = require("@node-red/registry");
import _ from "lodash";
import bcryptjs from "bcryptjs";
import basicAuth from "basic-auth";
import merge from "deepmerge";
const IP_ALLOWS = ["127.0.0.1"];
if (process.env.NRD_IP_ALLOWS) {
IP_ALLOWS.push(...process.env.NRD_IP_ALLOWS.split(/,/))
}
const HELP_WEB_URL = "https://sakazuki.github.io/node-red-desktop/";
export const NPM_COMMAND = process.platform === 'win32' ? 'npm.cmd' : 'npm';
export const DEFAULT_NODES_EXCLUDES = [
"10-mqtt.js",
"16-range.js",
"31-tcpin.js",
"32-udp.js",
"89-trigger.js"
];
export class NodeREDApp {
private app: express.Express;
private server: http.Server;
private settings: any;
private adminPath: string;
private uiPath: string;
private listenIp: string;
public listenPort: number;
private status: AppStatus;
constructor(status: AppStatus) {
this.status = status;
this.app = express();
this.adminPath = "/admin";
this.uiPath = "/";
this.settings = this.setupSettings();
this.server = this.setupServer();
this.listenIp = process.env.NRD_LISTEN_IP || process.env.LISTEN_IP || "127.0.0.1";
this.listenPort = this.defineListenPort();
this.patchInstaller();
this.patchRuntimeExec();
this.setupRED();
}
private defineListenPort(): number {
return parseInt(process.env.NRD_LISTEN_PORT || process.env.LISTEN_PORT || this.status.listenPort || String(Math.random() * 16383 + 49152))
}
public windowTitle() {
const filePath = path.parse(this.status.currentFile);
return `${filePath.base} - ${app.name}`;
}
private loadUserSettings(){
const SETTINGS_FILE = "settings.js"
if (!this.status.userDir) return {}
if (!fs.existsSync(path.join(this.status.userDir, SETTINGS_FILE))) return {}
try {
return require(path.join(this.status.userDir, SETTINGS_FILE))
} catch(err) {
log.error(err)
return {}
}
}
private setupSettings() {
const _this = this;
const userSettings = this.loadUserSettings();
const config = {
verbose: true,
httpAdminRoot: this.adminPath,
httpNodeRoot: this.uiPath,
userDir: this.status.userDir,
flowFile: this.status.currentFile,
storageModule: CustomStorage,
credentialSecret: this.status.credentialSecret,
httpNodeCors: {
origin: "*",
methods: "GET,PUT,POST,DELETE"
},
httpNodeAUth: undefined,
functionGlobalContext: {
get NGROK_URL(): string { return _this.status.ngrokUrl }
},
functionExternalModules: true,
editorTheme: {
page: {
title: app.name,
favicon: path.join(__dirname, "..", "images", "favicon.ico"),
scripts: path.join(__dirname, "..", "renderer/renderer.js"),
css: path.join(__dirname, "..", "renderer/desktop.css")
},
header: {
title: app.name
},
palette: {
editable: true
},
menu: {
"menu-item-help": {
label: app.name,
url: HELP_WEB_URL
}
},
login: {
image: path.join(__dirname, "images", "node-red-256.png")
},
projects: {
enabled: this.status.projectsEnabled || false
}
},
nodesExcludes: this.status.nodesExcludes || [],
logging: {
electron: {
level: "debug",
metrics: true,
handler(){
const electronLogLevel = function(noderedLevel: number): string {
const levelMap: any = {
10: "error",
20: "error",
30: "warn",
40: "info",
50: "debug",
60: "verbose",
98: "info",
99: "info"
};
return levelMap[noderedLevel];
};
return function(msg: {level: number, msg?: {stack?: object}, type?: string}) {
var m = electronLogLevel(msg.level);
if(m && msg.msg) (log as any)[m](msg.msg);
}
}
}
}
};
// @ts-ignore
if (this.status.projectsEnabled) delete config.storageModule;
if (this.status.httpNodeAuth.user.length > 0 && this.status.httpNodeAuth.pass.length) {
//@ts-ignore
config.httpNodeAuth = {
user: this.status.httpNodeAuth.user,
pass: bcryptjs.hashSync(this.status.httpNodeAuth.pass, 8)
}
}
return merge(userSettings, config);
}
private setupServer() {
this.app.use(this.adminPath, IpFilter(IP_ALLOWS, {
mode: "allow",
logLevel: "deny",
detectIp(req: express.Request) {
return req.headers["x-forwarded-for"] || IP_ALLOWS[0]
}
}));
this.app.use((err: any, req: express.Request, res: express.Response, _next: express.NextFunction) => {
if(err instanceof IpDeniedError){
res.status(401);
} else {
res.status(err.status || 500);
}
res.send({
error: err.message
});
})
return http.createServer(this.app);
}
public getAdminUrl() {
return `http://${this.listenIp}:${this.listenPort}${this.adminPath}`
}
public getHttpUrl() {
return `http://${this.listenIp}:${this.listenPort}${this.uiPath}`
}
// based on the code in node-red/red.js
private basicAuthMiddleware(user: string, pass: string) {
let localCachedPassword: string;
const checkPassword = function(p: string) {
return bcryptjs.compareSync(p,pass);
}
const checkPasswordAndCache = function(p: string) {
if (localCachedPassword === p) {
return true;
}
var result = checkPassword(p);
if (result) {
localCachedPassword = p;
}
return result;
}
return function(req: express.Request, res: express.Response, next: express.NextFunction) {
if (req.method === 'OPTIONS') {
return next();
}
var requestUser = basicAuth(req);
if (!requestUser || requestUser.name !== user || !checkPasswordAndCache(requestUser.pass)) {
res.set('WWW-Authenticate', 'Basic realm="Authorization Required"');
return res.sendStatus(401);
}
next();
}
}
private setupDebugOut() {
Node.prototype._send = Node.prototype.send
const me = this
Node.prototype.send = function(msg: any) {
Node.prototype._send.call(this, msg)
if (!me.status.debugOut) return
const _data = {
id: this.id,
z: this.z,
name: this.name,
topic: msg.topic,
msg: msg,
_path: msg._path
}
const data = RED.runtime.util.encodeObject(_data);
RED.runtime.events.emit("comms", {
topic: "debug",
data: data,
retain: false
})
}
}
private setupRED() {
log.debug(">>> settings", this.settings);
RED.init(this.server, this.settings);
this.setupDebugOut()
this.app.use(this.settings.httpAdminRoot, RED.httpAdmin);
if (this.settings.httpNodeAuth) {
this.app.use(
this.settings.httpNodeRoot,
this.basicAuthMiddleware(
this.settings.httpNodeAuth.user,
this.settings.httpNodeAuth.pass
)
);
}
this.app.use(this.settings.httpNodeRoot, RED.httpNode);
}
private patchInstaller() {
installer._checkPrereq = installer.checkPrereq;
installer.checkPrereq = () => {
return new Promise<void>(resolve => {
resolve();
})
}
}
private patchRuntimeExec() {
newExec.init(RED.runtime._, this.status);
runtime._.nodes.installerEnabled = () => { return true };
}
get exec() {
return newExec;
}
public async startRED() {
this.server.close();
try {
await RED.start();
this.server.listen(this.listenPort, this.listenIp, () => {
ipcMain.emit("browser:go", this.getAdminUrl());
});
} catch (err) {
log.error(err);
}
}
public async setFlowFileAndRestart(file: string) {
if (!fs.existsSync(file)) {
log.error(`File does not exist ${file}`);
return;
}
await RED.nodes.stopFlows();
ipcMain.emit("browser:loading");
this.setFlowFile(file);
await RED.nodes.loadFlows(true);
ipcMain.emit("browser:go", this.getAdminUrl());
}
public setFlowFile(file: string) {
this.status.currentFile = file;
this.settings = this.setupSettings();
this.settings.storageModule.init(this.settings, RED.runtime._);
ipcMain.emit("browser:update-title");
}
private loadPackageInfo(file: string): any {
const data = fs.readFileSync(file);
return JSON.parse(data.toString());
}
private async addModule(pkgname: string) {
try {
const info: {nodes: any} = await registry.addModule(pkgname);
RED.runtime.events.emit("runtime-event", {
id: "node/added",
payload: info.nodes,
retain: false
});
} catch (err: any) {
if (err.code === "module_already_loaded") {
this.error(err, `${pkgname} already loaded`);
return;
}
if (err.code !== "MODULE_NOT_FOUND") throw err;
this.success(`${pkgname} installed`);
}
}
private success(message: string, timeout = 3000) {
ipcMain.emit("dialog:show", "success", message, timeout);
}
private error(err: any, message: string) {
log.info(err, message);
ipcMain.emit("dialog:show", "error", JSON.stringify([message, err]));
}
public async execNpmLink(dir: string) {
try {
const pkginfo = this.loadPackageInfo(path.join(dir, "package.json"));
if (!pkginfo.hasOwnProperty("node-red")) throw new Error("This module does not have a node-red property");
// const res = await this.exec.run(NPM_COMMAND, ["link", dir], {cwd: this.status.userDir}, true);
// if (res.code !== 0) throw res;
const regist = await this.exec.run(NPM_COMMAND, ["link"], {cwd: dir}, true);
if (regist.code !== 0) throw regist;
const install = await this.exec.run(NPM_COMMAND, ["link", pkginfo.name], {cwd: this.status.userDir}, true);
if (install.code !== 0) throw install;
this.addModule(pkginfo.name);
} catch(err) {
this.error(err, "fail to add a node. check detail in log.");
}
}
public async execNpmInstall(args: string) {
try {
const before = this.loadPackageInfo(path.join(this.status.userDir, "package.json"));
const res = await this.exec.run(NPM_COMMAND, ["install", args], {cwd: this.status.userDir}, true);
if (res.code !== 0) throw res;
const after = this.loadPackageInfo(path.join(this.status.userDir, "package.json"));
const newPkgs = _.difference(Object.keys(after.dependencies), Object.keys(before.dependencies));
log.info("Installed packages", newPkgs)
for (const pkgname of newPkgs) {
const pkginfo = this.loadPackageInfo(path.join(this.status.userDir, "node_modules", pkgname, "package.json"));
log.debug(pkginfo);
if (pkginfo.hasOwnProperty("node-red")) this.addModule(pkgname);
}
} catch(err) {
this.error(err, "fail to npm install. check detail in log.");
};
}
public getNode(id: string) {
return RED.nodes.getNode(id);
}
public info() {
return `Node-RED version: ${RED.version()}
Node.js version: ${process.version}
Electron version: ${process.versions.electron}`;
}
} | the_stack |
import { createComponent } from "../test-utils";
import {
mockCreateDistributionWithTags,
mockUpdateDistribution,
mockCreateDistributionWithTagsPromise,
mockGetDistributionConfigPromise,
mockUpdateDistributionPromise,
mockListTagsForResource,
mockListTagsForResourcePromise,
mockUntagResource,
mockTagResource
} from "../__mocks__/aws-sdk.mock";
jest.mock("aws-sdk", () => require("../__mocks__/aws-sdk.mock"));
describe("General options propagation", () => {
let component;
// sample origins
const origins = ["https://exampleorigin.com"];
beforeEach(async () => {
mockCreateDistributionWithTagsPromise.mockResolvedValueOnce({
Distribution: {
Id: "distribution123",
ARN: "distributionArn"
}
});
mockGetDistributionConfigPromise.mockResolvedValueOnce({
ETag: "etag",
DistributionConfig: {
Origins: {
Items: []
}
}
});
mockUpdateDistributionPromise.mockResolvedValueOnce({
Distribution: {
Id: "distribution123",
ARN: "distributionArn"
}
});
component = await createComponent();
});
it("create distribution with comment and update it", async () => {
await component.default({
comment: "test comment",
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
Comment: "test comment"
})
})
})
);
await component.default({
comment: "updated comment",
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
Comment: "updated comment"
})
})
);
});
it("create disabled distribution and update it", async () => {
await component.default({
enabled: false,
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
Enabled: false
})
})
})
);
await component.default({
enabled: true,
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
Enabled: true
})
})
);
});
it("create distribution with aliases and update it", async () => {
await component.default({
aliases: ["foo.example.com"],
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
Aliases: {
Items: ["foo.example.com"],
Quantity: 1
}
})
})
})
);
await component.default({
aliases: ["bar.example.com"],
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
Aliases: {
Items: ["bar.example.com"],
Quantity: 1
}
})
})
);
});
it("update distribution with undefined aliases does not override existing aliases", async () => {
// Create distribution
await component.default({ enabled: true, origins });
// Update distribution
await component.default({
enabled: false,
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.not.objectContaining({
Aliases: expect.anything()
})
})
);
});
it("create distribution with priceClass and update it", async () => {
await component.default({
priceClass: "PriceClass_All",
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
PriceClass: "PriceClass_All"
})
})
})
);
await component.default({
priceClass: "PriceClass_100",
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
PriceClass: "PriceClass_100"
})
})
);
});
it("create distribution with web ACL id and update it", async () => {
// Create
await component.default({
webACLId:
"arn:aws:wafv2:us-east-1:123456789012:global/webacl/ExampleWebACL/473e64fd-f30b-4765-81a0-62ad96dd167a",
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
WebACLId:
"arn:aws:wafv2:us-east-1:123456789012:global/webacl/ExampleWebACL/473e64fd-f30b-4765-81a0-62ad96dd167a"
})
})
})
);
// Update
await component.default({
webACLId:
"arn:aws:wafv2:us-east-1:123456789012:global/webacl/UpdatedWebACL/473e64fd-f30b-4765-81a0-62ad96dd167a",
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
WebACLId:
"arn:aws:wafv2:us-east-1:123456789012:global/webacl/UpdatedWebACL/473e64fd-f30b-4765-81a0-62ad96dd167a"
})
})
);
});
it("create distribution with web ACL id and delete it", async () => {
// Create
await component.default({
webACLId:
"arn:aws:wafv2:us-east-1:123456789012:global/webacl/ExampleWebACL/473e64fd-f30b-4765-81a0-62ad96dd167a",
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
WebACLId:
"arn:aws:wafv2:us-east-1:123456789012:global/webacl/ExampleWebACL/473e64fd-f30b-4765-81a0-62ad96dd167a"
})
})
})
);
// Delete
// Per AWS, providing an empty ACLId will remove the WAF association: https://docs.aws.amazon.com/waf/latest/APIReference/API_DisassociateWebACL.html
await component.default({
webACLId: "",
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
WebACLId: ""
})
})
);
});
it("create distribution with restrictions and updates it", async () => {
// Create
await component.default({
restrictions: {
geoRestriction: {
restrictionType: "blacklist",
items: ["AA"]
}
},
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
Restrictions: {
GeoRestriction: {
RestrictionType: "blacklist",
Quantity: 1,
Items: ["AA"]
}
}
})
})
})
);
// Update
await component.default({
restrictions: {
geoRestriction: {
restrictionType: "blacklist",
items: ["ZZ"]
}
},
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
Restrictions: {
GeoRestriction: {
RestrictionType: "blacklist",
Quantity: 1,
Items: ["ZZ"]
}
}
})
})
);
});
it("create distribution with restrictions and deletes it", async () => {
// Create
await component.default({
restrictions: {
geoRestriction: {
restrictionType: "blacklist",
items: ["AA"]
}
},
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
Restrictions: {
GeoRestriction: {
RestrictionType: "blacklist",
Quantity: 1,
Items: ["AA"]
}
}
})
})
})
);
// Delete
await component.default({
restrictions: {
geoRestriction: {
restrictionType: "none"
}
},
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
Restrictions: {
GeoRestriction: {
RestrictionType: "none",
Quantity: 0
}
}
})
})
);
// Restriction items not needed when deleting it
expect.objectContaining({
DistributionConfig: expect.not.objectContaining({
Restrictions: {
GeoRestriction: {
Items: expect.anything()
}
}
})
});
});
it("create distribution with certificate arn and updates it", async () => {
// Create
await component.default({
certificate: {
cloudFrontDefaultCertificate: false,
acmCertificateArn:
"arn:aws:acm:us-east-1:123456789012:certificate/12345678-1234-1234-1234-123456789012"
},
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
ViewerCertificate: {
CloudFrontDefaultCertificate: false,
ACMCertificateArn:
"arn:aws:acm:us-east-1:123456789012:certificate/12345678-1234-1234-1234-123456789012",
SSLSupportMethod: "sni-only",
MinimumProtocolVersion: "TLSv1.2_2019"
}
})
})
})
);
// Update
await component.default({
certificate: {
cloudFrontDefaultCertificate: false,
acmCertificateArn:
"arn:aws:acm:us-east-1:123456789012:certificate/updated"
},
origins
});
expect(mockUpdateDistribution).toBeCalledWith(
expect.objectContaining({
DistributionConfig: expect.objectContaining({
ViewerCertificate: {
CloudFrontDefaultCertificate: false,
ACMCertificateArn:
"arn:aws:acm:us-east-1:123456789012:certificate/updated",
SSLSupportMethod: "sni-only",
MinimumProtocolVersion: "TLSv1.2_2019"
}
})
})
);
});
it("create distribution with default certificate", async () => {
// Create
await component.default({
certificate: {
cloudFrontDefaultCertificate: true
},
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
ViewerCertificate: {
CloudFrontDefaultCertificate: true,
SSLSupportMethod: "sni-only",
MinimumProtocolVersion: "TLSv1.2_2019"
}
})
})
})
);
});
it("create distribution with IAM certificate", async () => {
// Create
await component.default({
certificate: {
cloudFrontDefaultCertificate: false,
iamCertificateId: "12345"
},
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
DistributionConfig: expect.objectContaining({
ViewerCertificate: {
CloudFrontDefaultCertificate: false,
IAMCertificateId: "12345",
SSLSupportMethod: "sni-only",
MinimumProtocolVersion: "TLSv1.2_2019"
}
})
})
})
);
});
it("creates distribution with tags", async () => {
await component.default({
tags: {
tag1: "val1",
tag2: "val2"
},
origins
});
expect(mockCreateDistributionWithTags).toBeCalledWith(
expect.objectContaining({
DistributionConfigWithTags: expect.objectContaining({
Tags: {
Items: [
{
Key: "tag1",
Value: "val1"
},
{
Key: "tag2",
Value: "val2"
}
]
}
})
})
);
});
it("updates distribution with tags", async () => {
mockListTagsForResourcePromise.mockResolvedValueOnce({
Tags: {
Items: [{ Key: "existingTag", Tag: "existingValue" }]
}
});
// Create distribution
await component.default({
origins
});
expect(mockCreateDistributionWithTags).toBeCalled();
// Update distribution
await component.default({
tags: {
tag1: "val1",
tag2: "val2"
},
origins
});
expect(mockUpdateDistribution).toBeCalled();
expect(mockListTagsForResource).toBeCalledWith({
Resource: "distributionArn"
});
expect(mockUntagResource).toBeCalledWith({
Resource: "distributionArn",
TagKeys: {
Items: ["existingTag"]
}
});
expect(mockTagResource).toBeCalledWith({
Resource: "distributionArn",
Tags: {
Items: [
{
Key: "tag1",
Value: "val1"
},
{
Key: "tag2",
Value: "val2"
}
]
}
});
});
}); | the_stack |
import { Store as RelayModernStore, RecordSource, Environment as RelayModernEnvironment } from '../src';
import { Network as RelayNetwork, Observable as RelayObservable, createOperationDescriptor, createReaderSelector } from 'relay-runtime';
import { createPersistedStorage } from './Utils';
const { generateAndCompile } = require('./TestCompiler');
const RelayRecordSource = {
create: (data?: any) => new RecordSource({ storage: createPersistedStorage(), initialState: { ...data } }),
};
jest.useFakeTimers();
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
describe(`Relay Offline`, () => {
let callbacks;
let commentID;
let CommentFragment;
let CommentQuery;
let complete;
let CreateCommentMutation;
let CreateCommentWithSpreadMutation;
let environment;
let error;
let fetch;
let operation;
let queryOperation;
let source;
let store;
let subject;
let variables;
let queryVariables;
beforeEach(async () => {
jest.resetModules();
commentID = 'comment-id';
({ CreateCommentMutation, CreateCommentWithSpreadMutation, CommentFragment, CommentQuery } = generateAndCompile(`
mutation CreateCommentMutation($input: CommentCreateInput!) {
commentCreate(input: $input) {
comment {
id
body {
text
}
}
}
}
fragment CommentFragment on Comment {
id
body {
text
}
}
mutation CreateCommentWithSpreadMutation($input: CommentCreateInput!) {
commentCreate(input: $input) {
comment {
...CommentFragment
}
}
}
query CommentQuery($id: ID!) {
node(id: $id) {
id
...CommentFragment
}
}
`));
variables = {
input: {
clientMutationId: '0',
feedbackId: '1',
},
};
queryVariables = {
id: commentID,
};
operation = createOperationDescriptor(CreateCommentMutation, variables);
queryOperation = createOperationDescriptor(CommentQuery, queryVariables);
fetch = jest.fn((_query, _variables, _cacheConfig) =>
RelayObservable.create((sink) => {
subject = sink;
}),
);
source = RelayRecordSource.create();
store = new RelayModernStore(source, { storage: createPersistedStorage() });
environment = new RelayModernEnvironment(
{
network: RelayNetwork.create(fetch),
store,
},
{ storage: createPersistedStorage() },
);
complete = jest.fn();
error = jest.fn();
callbacks = { complete, error };
});
describe('hydrate', () => {
it('online', async () => {
await environment.hydrate();
expect(environment.isOnline()).toBeTruthy();
});
});
describe('offline options', () => {
let onlineGetter;
describe('offline-options start, finish, publish, onPublish', () => {
let execute;
let start;
let finish;
let onPublish;
beforeEach(async () => {
start = jest.fn((_mutations) => Promise.resolve(_mutations));
finish = jest.fn((_mutations, _error) => Promise.resolve(undefined));
onPublish = jest.fn((offlineRecord) => Promise.resolve(offlineRecord));
});
it('start/finish called when online', async () => {
const offlineOptions = {
start,
execute,
finish,
} as any;
environment.setOfflineOptions(offlineOptions);
expect(start).toHaveBeenCalledTimes(0);
expect(finish).toHaveBeenCalledTimes(0);
await environment.hydrate();
expect(start).toHaveBeenCalledTimes(1);
expect(finish).toHaveBeenCalledTimes(1);
});
it('start/finish not called when offline', async () => {
const offlineOptions = {
start,
execute,
finish,
} as any;
environment.setOfflineOptions(offlineOptions);
onlineGetter = jest.spyOn(window.navigator, 'onLine', 'get');
onlineGetter.mockReturnValue(false);
expect(start).toHaveBeenCalledTimes(0);
expect(finish).toHaveBeenCalledTimes(0);
await environment.hydrate();
expect(start).toHaveBeenCalledTimes(0);
expect(finish).toHaveBeenCalledTimes(0);
});
it('publish', async () => {
const offlineOptions = {
start,
execute,
onPublish,
} as any;
environment.setOfflineOptions(offlineOptions);
await environment.hydrate();
expect(onPublish).toHaveBeenCalledTimes(0);
onlineGetter = jest.spyOn(window.navigator, 'onLine', 'get');
onlineGetter.mockReturnValue(false);
environment.executeMutation({ operation }).subscribe(callbacks);
expect(complete).not.toBeCalled();
expect(error).not.toBeCalled();
jest.runAllTimers();
/*const request = {
payload: '/api/vi/test',
};
await environment.publish({
request,
});*/
expect(complete).toBeCalled();
expect(error).not.toBeCalled();
expect(onPublish).toHaveBeenCalledTimes(1);
expect(environment.getStoreOffline().getListMutation().length).toEqual(1);
expect(environment.getStoreOffline().getListMutation()[0].request.payload.operation).toEqual(operation);
});
});
describe('offline-options onComplete, onExecute, onDiscard', () => {
let onExecute;
let onComplete;
let onDiscard;
let executeReject;
let start;
let onPublish;
beforeEach(async () => {
start = jest.fn((_mutations) => Promise.resolve(_mutations));
onExecute = jest.fn((mutation) => Promise.resolve(mutation));
executeReject = jest.fn((_offlineRecord) => Promise.reject(_offlineRecord));
onComplete = jest.fn((_options) => Promise.resolve(true));
onDiscard = jest.fn((_options) => Promise.resolve(true));
onPublish = jest.fn((offlineRecord) => Promise.resolve(offlineRecord));
});
it('onExecute onComplete', async () => {
const offlineOptions = {
start,
onExecute,
onComplete,
onPublish,
} as any;
environment.setOfflineOptions(offlineOptions);
onlineGetter = jest.spyOn(window.navigator, 'onLine', 'get');
onlineGetter.mockReturnValue(false);
await environment.hydrate();
expect(environment.isOnline()).not.toBeTruthy();
environment.executeMutation({ operation }).subscribe(callbacks);
jest.runAllTimers();
expect(onPublish).toHaveBeenCalledTimes(1);
expect(environment.getStoreOffline().getListMutation().length).toEqual(1);
expect(environment.getStoreOffline().getListMutation()[0].request.payload.operation).toEqual(operation);
expect(onExecute).toHaveBeenCalledTimes(0);
expect(onComplete).toHaveBeenCalledTimes(0);
onlineGetter.mockReturnValue(true);
window.dispatchEvent(new Event('online'));
expect(environment.isOnline()).toBeTruthy();
jest.runAllTimers();
subject.next({
data: {
commentCreate: {
comment: {
id: commentID,
body: {
text: 'Gave Relay',
},
},
},
},
});
subject.complete();
jest.runAllTimers();
expect(start).toHaveBeenCalledTimes(1);
expect(onExecute).toHaveBeenCalledTimes(1);
expect(onComplete).toHaveBeenCalledTimes(1);
expect(environment.getStoreOffline().getListMutation().length).toEqual(0);
});
it('onExecute onDiscard', async () => {
const offlineOptions = {
start,
onExecute,
onDiscard,
onPublish,
} as any;
environment.setOfflineOptions(offlineOptions);
onlineGetter = jest.spyOn(window.navigator, 'onLine', 'get');
onlineGetter.mockReturnValue(false);
await environment.hydrate();
expect(environment.isOnline()).not.toBeTruthy();
environment.executeMutation({ operation }).subscribe(callbacks);
jest.runAllTimers();
expect(onPublish).toHaveBeenCalledTimes(1);
expect(environment.getStoreOffline().getListMutation().length).toEqual(1);
expect(environment.getStoreOffline().getListMutation()[0].request.payload.operation).toEqual(operation);
expect(onExecute).toHaveBeenCalledTimes(0);
expect(onComplete).toHaveBeenCalledTimes(0);
onlineGetter.mockReturnValue(true);
window.dispatchEvent(new Event('online'));
expect(environment.isOnline()).toBeTruthy();
jest.runAllTimers();
subject.error(new Error('wtf'));
jest.runAllTimers();
expect(start).toHaveBeenCalledTimes(1);
expect(onComplete).toHaveBeenCalledTimes(0);
expect(onDiscard).toHaveBeenCalledTimes(1);
expect(environment.getStoreOffline().getListMutation().length).toEqual(0);
});
});
describe('update store', () => {
beforeEach(async () => {
jest.resetModules();
onlineGetter = jest.spyOn(window.navigator, 'onLine', 'get');
onlineGetter.mockReturnValue(false);
environment.hydrate();
});
it('commits optimistic response with fragment spread', () => {
operation = createOperationDescriptor(CreateCommentWithSpreadMutation, variables);
const selector = createReaderSelector(CommentFragment, commentID, {}, queryOperation.request);
const snapshot = environment.lookup(selector);
const callback = jest.fn();
environment.subscribe(snapshot, callback);
environment
.executeMutation({
operation,
optimisticResponse: {
commentCreate: {
comment: {
id: commentID,
body: {
text: 'Give Relay',
},
},
},
},
})
.subscribe(callbacks);
jest.runAllTimers();
expect(complete).toBeCalled();
expect(error).not.toBeCalled();
expect(callback.mock.calls.length).toBe(1);
expect(callback.mock.calls[0][0].data).toEqual({
id: commentID,
body: {
text: 'Give Relay',
},
});
});
it('commits optimistic updater response with fragment spread', () => {
operation = createOperationDescriptor(CreateCommentWithSpreadMutation, variables);
const selector = createReaderSelector(CommentFragment, commentID, {}, queryOperation.request);
const snapshot = environment.lookup(selector);
const callback = jest.fn();
environment.subscribe(snapshot, callback);
const callOptimisticUpdate = jest.fn();
environment
.executeMutation({
operation,
optimisticUpdater: (_store) => {
callOptimisticUpdate();
const comment = _store.create(commentID, 'Comment');
comment.setValue(commentID, 'id');
const body = _store.create(commentID + '.text', 'Text');
comment.setLinkedRecord(body, 'body');
body.setValue('Give Relay', 'text');
},
})
.subscribe(callbacks);
jest.runAllTimers();
expect(complete).toBeCalled();
expect(error).not.toBeCalled();
expect(callOptimisticUpdate.mock.calls.length).toBe(2);
expect(callback.mock.calls.length).toBe(1);
expect(callback.mock.calls[0][0].data).toEqual({
id: commentID,
body: {
text: 'Give Relay',
},
});
});
it('commits optimistic response + updater response with fragment spread', () => {
operation = createOperationDescriptor(CreateCommentWithSpreadMutation, variables);
const selector = createReaderSelector(CommentFragment, commentID, {}, queryOperation.request);
const snapshot = environment.lookup(selector);
const callback = jest.fn();
environment.subscribe(snapshot, callback);
environment
.executeMutation({
operation,
optimisticResponse: {
commentCreate: {
comment: {
id: commentID,
body: {
text: 'Give Relay',
},
},
},
},
updater: (_store) => {
const comment = _store.get(commentID);
if (!comment) {
throw new Error('Expected comment to be in the store');
}
const body = comment.getLinkedRecord('body');
if (!body) {
throw new Error('Expected comment to have a body');
}
const bodyValue: string | null = body.getValue('text');
if (bodyValue == null) {
throw new Error('Expected comment body to have text');
}
body.setValue(bodyValue.toUpperCase(), 'text');
},
})
.subscribe(callbacks);
jest.runAllTimers();
expect(complete).toBeCalled();
expect(error).not.toBeCalled();
expect(callback.mock.calls.length).toBe(2);
expect(callback.mock.calls[0][0].data).toEqual({
id: commentID,
body: {
text: 'Give Relay',
},
});
expect(callback.mock.calls[1][0].data).toEqual({
id: commentID,
body: {
text: 'GIVE RELAY',
},
});
});
});
});
}); | the_stack |
* @title: Animation
* @description:
* This sample shows how to load multiple animations and build an animation controller hierarchy.
* An animation transition controller is used to blend between two animations.
* You can enable drawing of debug information to see the animation bounding box, the skeleton,
* and the mesh wireframe.
* You can also click on the rendering windows to move and rotate the camera around.
*/
/*{{ javascript("jslib/aabbtree.js") }}*/
/*{{ javascript("jslib/camera.js") }}*/
/*{{ javascript("jslib/geometry.js") }}*/
/*{{ javascript("jslib/material.js") }}*/
/*{{ javascript("jslib/light.js") }}*/
/*{{ javascript("jslib/scenenode.js") }}*/
/*{{ javascript("jslib/scene.js") }}*/
/*{{ javascript("jslib/vmath.js") }}*/
/*{{ javascript("jslib/effectmanager.js") }}*/
/*{{ javascript("jslib/shadermanager.js") }}*/
/*{{ javascript("jslib/texturemanager.js") }}*/
/*{{ javascript("jslib/animationmanager.js") }}*/
/*{{ javascript("jslib/renderingcommon.js") }}*/
/*{{ javascript("jslib/defaultrendering.js") }}*/
/*{{ javascript("jslib/observer.js") }}*/
/*{{ javascript("jslib/requesthandler.js") }}*/
/*{{ javascript("jslib/resourceloader.js") }}*/
/*{{ javascript("jslib/animation.js") }}*/
/*{{ javascript("jslib/scenedebugging.js") }}*/
/*{{ javascript("jslib/utilities.js") }}*/
/*{{ javascript("jslib/vertexbuffermanager.js") }}*/
/*{{ javascript("jslib/indexbuffermanager.js") }}*/
/*{{ javascript("jslib/services/turbulenzservices.js") }}*/
/*{{ javascript("jslib/services/turbulenzbridge.js") }}*/
/*{{ javascript("jslib/services/gamesession.js") }}*/
/*{{ javascript("jslib/services/mappingtable.js") }}*/
/*{{ javascript("scripts/sceneloader.js") }}*/
/*{{ javascript("scripts/motion.js") }}*/
/*{{ javascript("scripts/htmlcontrols.js") }}*/
/*global TurbulenzEngine: true */
/*global TurbulenzServices: false */
/*global RequestHandler: false */
/*global TextureManager: false */
/*global ShaderManager: false */
/*global EffectManager: false */
/*global Scene: false */
/*global SceneLoader: false */
/*global ResourceLoader: false */
/*global Camera: false */
/*global HTMLControls: false */
/*global AnimationManager: false */
/*global CameraController: false */
/*global DefaultRendering: false */
/*global SkinnedNode: false */
/*global VMath: false */
/*global GPUSkinController: false */
/*global InterpolatorController: false */
/*global ReferenceController: false */
/*global NodeTransformController: false */
/*global TransitionController: false */
// We put some custom data onto Scene
class CustomScene extends Scene
{
skinnedNodes: SkinnedNode[];
};
TurbulenzEngine.onload = function onloadFn()
{
var errorCallback = function errorCallback(msg)
{
window.alert(msg);
};
TurbulenzEngine.onerror = errorCallback;
var graphicsDeviceParameters = { };
var graphicsDevice = TurbulenzEngine.createGraphicsDevice(graphicsDeviceParameters);
if (!graphicsDevice.shadingLanguageVersion)
{
errorCallback("No shading language support detected.\nPlease check your graphics drivers are up to date.");
graphicsDevice = null;
return;
}
// Clear the background color of the engine window
var clearColor = [0.5, 0.5, 0.5, 1.0];
if (graphicsDevice.beginFrame())
{
graphicsDevice.clear(clearColor);
graphicsDevice.endFrame();
}
var mathDeviceParameters = { };
var mathDevice = TurbulenzEngine.createMathDevice(mathDeviceParameters);
var inputDeviceParameters = { };
var inputDevice = TurbulenzEngine.createInputDevice(inputDeviceParameters);
var requestHandlerParameters = { };
var requestHandler = RequestHandler.create(requestHandlerParameters);
var textureManager = TextureManager.create(graphicsDevice, requestHandler, null, errorCallback);
var shaderManager = ShaderManager.create(graphicsDevice, requestHandler, null, errorCallback);
var effectManager = EffectManager.create();
var animationManager = AnimationManager.create(errorCallback);
var mappingTable;
var resourceLoader = ResourceLoader.create();
// Setup world space
var worldUp = mathDevice.v3Build(0.0, 1.0, 0.0);
// Bounds are red, interpolatorColor is updated per animation
var boundsColor = [1, 0, 0];
var interpolatorColor = [0, 0, 0];
// Setup a camera to view a close-up object
var camera = Camera.create(mathDevice);
camera.nearPlane = 0.05;
camera.updateViewMatrix();
var animMinExtent, animMaxExtent;
var cameraController = CameraController.create(graphicsDevice, inputDevice, camera);
var maxSpeed = cameraController.maxSpeed;
var cameraDistanceFactor = 1.0;
var cameraDir = [1, 1, 1]; // The +/- direction the camera is moved from the scene in each axis
// Settings for the animation
var settings = {
animScale: 1,
defaultRate: 1,
drawDebug: false,
drawInterpolators: false,
drawWireframe: false,
loopAnimation: true,
blendAnimation: false,
transitionLength: 1
};
// The default animation to start with
var defaultAnimIndex = 0;
// The current playing animation
var curAnimIndex = 0;
// The controller references by index
var controllerMap = [];
// This is our base asset that includes a character and animations
var assetToLoad = "models/Seymour.dae";
// The list of animations to load pre-scene load (by reference)
// This is only for animations that are not included in the default scene
// e.g. "animations/default_walking.anim"
var addAnimations = ["models/Seymour_anim2_rot90_anim_only.dae"];
// The list of animations to be remove from the scene data pre-load
// This is for undesired animations that are packed in the scene
// All these animations are not required for this sample
var removeAnimations = ["default_astroboy_w_skel02_gog_polySurface5",
"default_astroboy_w_skel02_polySurface5",
"default_astroboy_w_skel02c_gog_polySurface5",
"default_astroboy_w_skel02c_polySurface5",
"default_gog_polySurface5",
"default_polySurface5"];
// The controller to blend the transisitions between animations, that don't have a matching key frame
var transitionController = null;
var transitionStartColor = [0, 0, 0];
var transitionEndColor = [0, 0, 0];
// Reference controller for the whole animation
var currentReferenceController = null;
var currentNodeController = null;
var animationsLoaded;
// When the JSON is loaded, add a prefix to uniquely identify that set of animation data
var animationsLoadedCallback = function animationsLoadedCallbackFn(jsonData) {
var addAnimNum = (addAnimations.length - animationsLoaded);
animationManager.loadData(jsonData, "AnimExtra" + addAnimNum + "-");
animationsLoaded -= 1;
};
var addAnimationsToScene = function addAnimationsToSceneFn()
{
// Attach additional animations to the scene (specify by path)
// The animations are added by reference and the resourceLoader will attempt to load them using request
animationsLoaded = addAnimations.length;
for (var i = 0; i < addAnimations.length; i += 1)
{
var path = addAnimations[i];
resourceLoader.load(mappingTable.getURL(path), {
append : true,
onload : animationsLoadedCallback,
requestHandler: requestHandler
});
}
};
var removeAnimationsFromScene = function removeAnimationsFromSceneFn(sceneData)
{
// Remove unrequired animations from scene, if they exist before load
var anims = sceneData.animations;
var animationRef;
if (anims)
{
for (var i = 0; i < removeAnimations.length; i += 1)
{
animationRef = removeAnimations[i];
if (anims[animationRef])
{
delete anims[animationRef];
}
}
}
};
// Sets the next animation to play
var nextAnimation = function nextAnimationFn(nextAnimIndex)
{
var controllers, interpStart, interpEnd, startColor, endColor, startSkinnedNode, endSkinnedNode, startNodes, endNodes;
controllers = controllerMap[curAnimIndex];
if (controllers)
{
interpStart = controllers.interpController;
startColor = controllers.color;
startSkinnedNode = controllers.skinnedNode;
startNodes = controllers.nodeCount;
}
controllers = controllerMap[nextAnimIndex];
if (controllers)
{
interpEnd = controllers.interpController;
endColor = controllers.color;
endSkinnedNode = controllers.skinnedNode;
endNodes = controllers.nodeCount;
}
if (interpEnd)
{
interpEnd.setTime(0);
// Set active on start skinned node to false
if (startSkinnedNode && (startSkinnedNode !== endSkinnedNode))
{
startSkinnedNode.active = false;
startSkinnedNode.setInputController(interpStart);
}
// Use the transition controller instead on this node
// Don't blend if the interpControllers are the same
// Don't blend if the number of nodes is different
if ((settings.blendAnimation || !interpStart) &&
(interpStart !== interpEnd) &&
(startNodes === endNodes))
{
if (!transitionController)
{
transitionController = TransitionController.create(interpStart, interpEnd, settings.transitionLength);
transitionController.onFinishedTransitionCallback = function onFinishedTransitionCallbackFn(/* transition */)
{
var controllers = controllerMap[curAnimIndex];
if (controllers)
{
var skinnedNode = controllers.skinnedNode;
currentReferenceController.setReferenceController(controllers.interpController);
currentReferenceController.setTime(0);
interpolatorColor = controllers.color;
if (skinnedNode)
{
skinnedNode.setInputController(currentReferenceController);
skinnedNode.active = true;
}
}
// Callback return value of 'false' tells the controller not to continue operating
// This is because we have decided to set a different controller to be used instead
return false;
};
transitionController.onUpdateCallback = function onUpdateCallbackFn(transition)
{
var delta = (transition.transitionTime / transition.transitionLength);
interpolatorColor = VMath.v3Add(VMath.v3ScalarMul(transitionStartColor, 1 - delta), VMath.v3ScalarMul(transitionEndColor, delta));
};
}
else
{
transitionController.setEndController(interpEnd);
transitionController.setStartController(interpStart);
transitionController.setTransitionLength(settings.transitionLength);
}
currentReferenceController.setReferenceController(transitionController);
transitionStartColor = startColor;
transitionEndColor = endColor;
}
else
{
// Set the node controller to be the reference controller
currentReferenceController.setReferenceController(interpEnd);
interpolatorColor = endColor;
}
// Reset the animation about to play
currentReferenceController.setTime(0);
if (endSkinnedNode)
{
endSkinnedNode.setInputController(currentReferenceController);
endSkinnedNode.active = true;
}
curAnimIndex = nextAnimIndex;
}
};
// Update the settings for an existing animation controller
var updateAnimations = function updateAnimationsFn(/* scene */)
{
var i, controllers, interp;
var length = controllerMap.length;
for (i = 0; i < length; i += 1)
{
controllers = controllerMap[i];
if (controllers)
{
interp = controllers.interpController;
if (interp)
{
interp.setAnimation(interp.currentAnim, settings.loopAnimation);
interp.setRate(settings.defaultRate);
}
}
}
};
function calcAnimationExtents(scene, animationIndex)
{
// Calculate the extents from the animation
var controllers = controllerMap[animationIndex];
if (controllers)
{
var maxValue = Number.MAX_VALUE;
var animExtentMin0 = maxValue;
var animExtentMin1 = maxValue;
var animExtentMin2 = maxValue;
var animExtentMax0 = -maxValue;
var animExtentMax1 = -maxValue;
var animExtentMax2 = -maxValue;
var bounds = controllers.animation.bounds;
var numFrames = bounds.length;
for (var i = 0; i < numFrames; i = i + 1)
{
var bound = bounds[i];
var center = bound.center;
var halfExtent = bound.halfExtent;
var c0 = center[0];
var c1 = center[1];
var c2 = center[2];
var h0 = halfExtent[0];
var h1 = halfExtent[1];
var h2 = halfExtent[2];
var min0 = (c0 - h0);
var min1 = (c1 - h1);
var min2 = (c2 - h2);
var max0 = (c0 + h0);
var max1 = (c1 + h1);
var max2 = (c2 + h2);
animExtentMin0 = (animExtentMin0 < min0 ? animExtentMin0 : min0);
animExtentMin1 = (animExtentMin1 < min1 ? animExtentMin1 : min1);
animExtentMin2 = (animExtentMin2 < min2 ? animExtentMin2 : min2);
animExtentMax0 = (animExtentMax0 > max0 ? animExtentMax0 : max0);
animExtentMax1 = (animExtentMax1 > max1 ? animExtentMax1 : max1);
animExtentMax2 = (animExtentMax2 > max2 ? animExtentMax2 : max2);
}
animMinExtent = mathDevice.v3Build(animExtentMin0, animExtentMin1, animExtentMin2);
animMaxExtent = mathDevice.v3Build(animExtentMax0, animExtentMax1, animExtentMax2);
}
else
{
// Use the scene extents
var sceneExtents = scene.getExtents();
animMinExtent = mathDevice.v3Build(sceneExtents[0], sceneExtents[1], sceneExtents[2]);
animMaxExtent = mathDevice.v3Build(sceneExtents[3], sceneExtents[4], sceneExtents[5]);
}
}
// Calculates a position for the camera to lookAt
var resetCamera = function resetCameraFn(camera, scene)
{
calcAnimationExtents(scene, curAnimIndex);
// Update the camera to scale to the size of the scene
var center = mathDevice.v3ScalarMul(mathDevice.v3Add(animMaxExtent, animMinExtent), 0.5);
var extent = mathDevice.v3Sub(center, animMinExtent);
camera.lookAt(center,
worldUp,
mathDevice.v3Build(center[0] + extent[0] * cameraDistanceFactor * cameraDir[0] * 2,
center[1] + extent[1] * cameraDistanceFactor * cameraDir[1],
center[2] + extent[2] * cameraDistanceFactor * cameraDir[2] * 2));
camera.updateViewMatrix();
// Calculates the appropriate nearPlane for the animation extents
var len = VMath.v3Length(extent);
if (len < 4.0)
{
camera.nearPlane = len * 0.1;
}
else
{
camera.nearPlane = 1.0;
}
camera.farPlane = Math.ceil(len) * 100.0;
camera.updateProjectionMatrix();
// Calculates the speed to move around the animation
maxSpeed = (len < 100 ? (len * 2) : (len * 0.5));
};
// Create object using scene loader
var scene = <CustomScene>(Scene.create(mathDevice));
var sceneLoader = SceneLoader.create();
var renderer;
var loadAssets = function loadAssets()
{
// Renderer for the scene (requires shader assets).
renderer = DefaultRendering.create(graphicsDevice,
mathDevice,
shaderManager,
effectManager);
renderer.setGlobalLightPosition(mathDevice.v3Build(0.5, 100.0, 0.5));
renderer.setAmbientColor(mathDevice.v3Build(0.3, 0.3, 0.4));
renderer.setDefaultTexture(textureManager.get("default"));
sceneLoader.setPathRemapping(mappingTable.urlMapping, mappingTable.assetPrefix);
sceneLoader.load({
scene : scene,
assetPath : assetToLoad,
graphicsDevice : graphicsDevice,
mathDevice : mathDevice,
textureManager : textureManager,
effectManager : effectManager,
shaderManager : shaderManager,
animationManager : animationManager,
requestHandler: requestHandler,
keepVertexData : true,
preSceneLoadFn : function (sceneData)
{
// Apply the modifications to the data from assetPath
addAnimationsToScene();
removeAnimationsFromScene(sceneData);
},
keepLights : true,
append : true
});
};
var mappingTableReceived = function mappingTableReceivedFn(mappingTable)
{
textureManager.setPathRemapping(mappingTable.urlMapping, mappingTable.assetPrefix);
shaderManager.setPathRemapping(mappingTable.urlMapping, mappingTable.assetPrefix);
sceneLoader.setPathRemapping(mappingTable.urlMapping, mappingTable.assetPrefix);
loadAssets();
};
var gameSessionCreated = function gameSessionCreatedFn(gameSession)
{
var defaultMappingSettings = {
mappingTablePrefix: "staticmax/",
assetPrefix: "missing/",
mappingTableURL: "mapping_table.json"
};
mappingTable = TurbulenzServices.createMappingTable(requestHandler,
gameSession,
mappingTableReceived,
defaultMappingSettings);
};
var gameSession = TurbulenzServices.createGameSession(requestHandler, gameSessionCreated);
// Controls
var htmlControls = HTMLControls.create();
htmlControls.addButtonControl({
id: "button01",
value: "Next",
fn: function ()
{
nextAnimation((curAnimIndex + 1) % controllerMap.length);
}
});
htmlControls.addButtonControl({
id: "button02",
value: "Previous",
fn: function ()
{
var index = (curAnimIndex - 1);
index = (index === -1) ? (controllerMap.length - 1) : index;
nextAnimation(index);
}
});
htmlControls.addButtonControl({
id: "button03",
value: "Reset Camera",
fn: function ()
{
resetCamera(camera, scene);
}
});
htmlControls.addCheckboxControl({
id: "checkbox01",
value: "loopAnimation",
isSelected: settings.loopAnimation,
fn: function ()
{
settings.loopAnimation = !settings.loopAnimation;
updateAnimations();
return settings.loopAnimation;
}
});
htmlControls.addCheckboxControl({
id: "checkbox02",
value: "blendAnimation",
isSelected: settings.blendAnimation,
fn: function ()
{
settings.blendAnimation = !settings.blendAnimation;
return settings.blendAnimation;
}
});
htmlControls.addCheckboxControl({
id: "checkbox03",
value: "drawDebug",
isSelected: settings.drawDebug,
fn: function ()
{
settings.drawDebug = !settings.drawDebug;
settings.drawWireframe = settings.drawDebug;
settings.drawInterpolators = settings.drawDebug;
return settings.drawDebug;
}
});
htmlControls.register();
// Provides a distinct array of colors to use for each controller
// Does not use red, black or white as they are reserved
var colorArray = [
[0.5, 1, 0.5],
[0.5, 1, 1],
[1, 1, 0.5],
[1, 0.5, 1],
[0.5, 0.5, 1]
];
var lastColor = (colorArray.length - 1);
var getControllerColor = function getControllerColorFn()
{
lastColor = (lastColor + 1) % colorArray.length;
return colorArray[lastColor];
};
// Initialise all animations with InterpolatorControllers set to start time
var initAnimations = function initAnimationsFn(scene: CustomScene)
{
var a, n, anim, interp, tempNode, skinnedNode, node, hierarchy;
var nodeHasSkeleton = animationManager.nodeHasSkeleton;
var sceneNodes = scene.rootNodes;
var numNodes = sceneNodes.length;
scene.skinnedNodes = [];
// For each node find which ones have skeletons
for (n = 0; n < numNodes; n += 1)
{
node = sceneNodes[n];
var skeleton = nodeHasSkeleton(node);
if (skeleton && skeleton.numNodes)
{
skinnedNode = SkinnedNode.create(graphicsDevice, mathDevice,
node, skeleton);
scene.skinnedNodes.push(skinnedNode);
}
}
numNodes = scene.skinnedNodes.length;
// For each animation, create an interpolation controller
var animations = animationManager.getAll();
for (a in animations)
{
if (animations.hasOwnProperty(a))
{
anim = animations[a];
hierarchy = anim.hierarchy;
// Create an interpolator controller for each animation
interp = InterpolatorController.create(hierarchy);
interp.setAnimation(anim, settings.loopAnimation);
interp.setTime(0);
interp.setRate(settings.defaultRate);
skinnedNode = null;
if (numNodes !== 0)
{
for (n = 0; n < numNodes; n += 1)
{
tempNode = scene.skinnedNodes[n];
if (tempNode && (anim.numNodes === tempNode.skinController.skeleton.numNodes))
{
skinnedNode = tempNode;
}
}
}
// Controller map used for every interp controller
controllerMap.push({
interpController : interp,
animation : anim,
nodeCount : hierarchy.numNodes,
skinnedNode : skinnedNode,
color : getControllerColor()
});
}
}
var index = (controllerMap.length - 1);
if (index === -1)
{
// Couldn't find any animations on the scene
return false;
}
var controllers = controllerMap[defaultAnimIndex];
if (!controllers)
{
// Pick the last added as a default
controllers = controllerMap[index];
}
else
{
index = defaultAnimIndex;
}
var defaultInterp = controllers.interpController;
currentReferenceController = ReferenceController.create(defaultInterp);
hierarchy = currentReferenceController.getHierarchy();
// Assumes hierarchy is the same for all anims
// If different hierarchies are used, create a new controller
currentNodeController = NodeTransformController.create(hierarchy, scene);
currentNodeController.setInputController(currentReferenceController);
currentNodeController.active = true;
// Set the initial animation
nextAnimation(index);
return true;
};
// Callback to draw extra debug information
function drawDebugCB()
{
// Draws the interpolators for debugging
if (currentNodeController.active)
{
if (settings.drawInterpolators)
{
var interp = currentNodeController.inputController;
var hierarchy = interp.getHierarchy();
scene.drawAnimationHierarchy(graphicsDevice, shaderManager, camera,
hierarchy,
hierarchy.numNodes,
interp,
null,
interpolatorColor,
boundsColor);
}
}
}
// Initialize the previous frame time
var previousFrameTime = TurbulenzEngine.time;
var renderFrame = function renderFrameFn()
{
var skinnedNodes, numSkins, skinnedNode, skin;
var currentTime = TurbulenzEngine.time;
var deltaTime = (currentTime - previousFrameTime);
if (deltaTime > 0.1)
{
deltaTime = 0.1;
}
cameraController.maxSpeed = (maxSpeed * deltaTime);
// Update input
inputDevice.update();
cameraController.update();
var deviceWidth = graphicsDevice.width;
var deviceHeight = graphicsDevice.height;
var aspectRatio = (deviceWidth / deviceHeight);
if (aspectRatio !== camera.aspectRatio)
{
camera.aspectRatio = aspectRatio;
camera.updateProjectionMatrix();
}
camera.updateViewProjectionMatrix();
// Update the current animation by using the node controller
// The node controller input is the referenceNodeController
currentNodeController.addTime(deltaTime * settings.animScale);
skinnedNodes = scene.skinnedNodes;
numSkins = skinnedNodes.length;
// Create an array to insert the list of update nodes
for (skin = 0; skin < numSkins; skin += 1)
{
skinnedNode = skinnedNodes[skin];
if (skinnedNode.active)
{
// The skinned node will peform the update
skinnedNode.update();
}
}
scene.update();
renderer.update(graphicsDevice, camera, scene, currentTime);
if (graphicsDevice.beginFrame())
{
// Only draw the skin if active
if (skinnedNode.active)
{
renderer.setWireframe(settings.drawWireframe);
if (renderer.updateBuffers(graphicsDevice, deviceWidth, deviceHeight))
{
renderer.draw(graphicsDevice, clearColor, null, null, drawDebugCB);
}
}
graphicsDevice.endFrame();
}
previousFrameTime = currentTime;
};
var intervalID;
var loadingLoop = function loadingLoopFn()
{
if (sceneLoader.complete() && animationsLoaded === 0)
{
TurbulenzEngine.clearInterval(intervalID);
// Init the animations from the scene
initAnimations(scene);
// Intial reset of the camera
resetCamera(camera, scene);
// Scene loading is complete, now update the loaded shaders
renderer.updateShader(shaderManager);
intervalID = TurbulenzEngine.setInterval(renderFrame, 1000 / 60);
}
};
intervalID = TurbulenzEngine.setInterval(loadingLoop, 1000 / 10);
// Create a scene destroy callback to run when the window is closed
TurbulenzEngine.onunload = function destroyScene()
{
TurbulenzEngine.clearInterval(intervalID);
if (gameSession)
{
gameSession.destroy();
gameSession = null;
}
if (scene)
{
scene.destroy();
scene = null;
}
requestHandler = null;
sceneLoader = null;
htmlControls = null;
transitionController = null;
transitionStartColor = [];
transitionEndColor = [];
currentReferenceController = null;
currentNodeController = null;
if (renderer)
{
renderer.destroy();
renderer = null;
}
addAnimations = [];
removeAnimations = [];
settings = null;
cameraController = null;
camera = null;
cameraDir = [];
mappingTable = null;
animMinExtent = [];
animMaxExtent = [];
boundsColor = [];
interpolatorColor = [];
clearColor = [];
worldUp = null;
if (textureManager)
{
textureManager.destroy();
textureManager = null;
}
if (shaderManager)
{
shaderManager.destroy();
shaderManager = null;
}
effectManager = null;
TurbulenzEngine.flush();
graphicsDevice = null;
mathDevice = null;
inputDevice = null;
};
}; | the_stack |
import {
IPCMessageReader, IPCMessageWriter,
createConnection,
InitializeResult,
Diagnostic, DiagnosticSeverity, Range,
CompletionItem,
TextDocument, TextDocuments, TextDocumentChangeEvent, TextDocumentPositionParams,
DocumentSymbolParams, DocumentFormattingParams, DocumentRangeFormattingParams,
TextEdit,
SymbolInformation, WorkspaceSymbolParams, InitializeParams
} from 'vscode-languageserver';
import { getCursorWordBoundry } from './utils';
import * as Analysis from './analysis';
import { CompletionService } from './services/completionService';
import { buildDocumentSymbols } from './services/documentSymbolService';
import { buildWorkspaceSymbols } from './services/workspaceSymbolService';
import { buildLintingErrors } from './services/lintingService';
import { buildDocumentFormatEdits, buildDocumentRangeFormatEdits } from './services/formatService';
import { readFiles, FileNamedCallback } from 'node-dir';
import Uri from 'vscode-uri';
import * as luaparse from 'luaparse';
import { basename } from 'path';
export interface FormatOptions {
enabled: boolean;
indentCount: number;
useTabs: boolean;
lineWidth: number;
singleQuote: boolean;
linebreakMultipleAssignments: boolean;
}
export interface LintingOptions {
enabled: boolean;
luaCheckConfig: string;
luaCheckArgs: string[];
}
export interface Settings {
luacheckPath: string;
preferLuaCheckErrors: boolean;
targetVersion: string;
format: FormatOptions;
linting: LintingOptions;
}
class ServiceDispatcher {
private connection = createConnection(
new IPCMessageReader(process),
new IPCMessageWriter(process)
);
private rootUri: string | null = null;
private settings: Settings = {} as any;
private documents: TextDocuments = new TextDocuments();
private perDocumentAnalysis = new Map<string, Analysis.Analysis>();
private readonly triggerCharacters = ['.', ':'];
public constructor() {
this.documents.onDidChangeContent(change => this.onDidChangeContent(change));
this.documents.onDidClose(change => this.onDidClose(change));
this.connection.onInitialize(handler => this.onInitialize(handler));
this.connection.onCompletion(pos => this.onCompletion(pos));
this.connection.onDocumentSymbol(handler => this.onDocumentSymbol(handler));
this.connection.onWorkspaceSymbol(handler => this.onWorkspaceSymbol(handler));
this.connection.onDidChangeConfiguration(change => this.onDidChangeConfiguration(change));
this.connection.onDocumentFormatting((params) => this.onDocumentFormatting(params));
this.connection.onDocumentRangeFormatting((params) => this.onDocumentRangeFormatting(params));
this.documents.listen(this.connection);
this.connection.listen();
}
private onInitialize(initializeParams: InitializeParams): InitializeResult {
this.rootUri = initializeParams.rootUri;
return {
capabilities: {
// Use full sync mode for now.
// TODO: Add support for Incremental changes. Full syncs will not scale very well.
textDocumentSync: this.documents.syncKind,
documentSymbolProvider: true,
workspaceSymbolProvider: true,
completionProvider: {
resolveProvider: false,
triggerCharacters: this.triggerCharacters
},
documentFormattingProvider: true,
documentRangeFormattingProvider: true
}
};
}
private onDocumentSymbol(handler: DocumentSymbolParams): SymbolInformation[] {
const uri = handler.textDocument.uri;
const analysis: Analysis.Analysis = this.perDocumentAnalysis[uri];
return buildDocumentSymbols(uri, analysis);
}
private onWorkspaceSymbol(handler: WorkspaceSymbolParams) {
if (!this.rootUri) {
return [];
}
const query = handler.query.toLowerCase();
return new Promise<SymbolInformation[]>((resolve, reject) => {
const symbols: SymbolInformation[] = [];
const callback: FileNamedCallback = (err, content, filename, next) => {
if (err) {
return;
}
try {
const analysis = new Analysis.Analysis();
analysis.end(content.toString());
analysis.buildGlobalSymbols();
symbols.push(...buildWorkspaceSymbols(filename, query, analysis));
} catch (e) {
}
next();
};
const uri = Uri.parse(this.rootUri!);
readFiles(uri.fsPath, { match: /.lua$/ }, callback, (err) => {
if (err) {
reject(err);
return;
}
resolve(symbols);
});
});
}
private onCompletion(textDocumentPosition: TextDocumentPositionParams): CompletionItem[] {
const uri = textDocumentPosition.textDocument.uri;
const document = this.documents.get(uri);
if (!document) {
return [];
}
const documentText = document.getText();
const { prefixStartPosition, suffixEndPosition } = getCursorWordBoundry(documentText,
textDocumentPosition.position);
const startOffset = document.offsetAt(prefixStartPosition);
const endOffset = document.offsetAt(suffixEndPosition);
const analysis = new Analysis.Analysis();
// Write everything up to the beginning of the potentially invalid text
analysis.write(documentText.substring(0, startOffset));
// Is the completion for a table?
let isTableScoped = false;
const charAt = documentText.charAt(startOffset - 1);
// If the completion is prefixed by a trigger character, insert a dummy function call to keep the Lua
// syntactically valid and parsable.
if (this.triggerCharacters.indexOf(charAt) >= 0) {
analysis.write('__completion_helper__()');
isTableScoped = true;
}
// Insert a scope marker to help us find which scope we're in
analysis.write('__scope_marker__()');
// And everything after
try {
analysis.end(documentText.substring(endOffset));
analysis.buildScopedSymbols(isTableScoped);
} catch (err) {
if (!(err instanceof SyntaxError)) { throw err; }
// Suppress the failure due to syntax errors
return [];
}
const suggestionService = new CompletionService(analysis);
const word = documentText.substring(startOffset, endOffset);
return suggestionService.buildCompletions(word.toLowerCase());
}
private onDidChangeContent(change: TextDocumentChangeEvent) {
this.parseAndLintDocument(change.document).then(diagnostics => {
this.connection.sendDiagnostics({
uri: change.document.uri,
diagnostics
});
});
}
private onDidClose(change: TextDocumentChangeEvent) {
this.connection.sendDiagnostics({
uri: change.document.uri,
diagnostics: []
});
}
private onDidChangeConfiguration(change: any) {
const oldVersion = this.settings ? this.settings.targetVersion : null;
const oldLinterSettings = this.settings ? this.settings.linting : null;
this.settings = change.settings.lua as Settings;
// Because the JSON we get in `change` can be anything, we need to make sure that we've actually been passed
// a valid type, and not something else, like a string.
const validateSetting = <T>(v: any, defaultVal: T) => {
if (typeof (v) === typeof (defaultVal)) { return v; }
return defaultVal;
};
this.settings.preferLuaCheckErrors = validateSetting<boolean>(this.settings.preferLuaCheckErrors, false);
// indentCount defaults to `null`, which means we should use the editor settings. Anything else shall override
// what the editor tells us.
if (this.settings.format.indentCount !== null) {
this.settings.format.indentCount = validateSetting<number>(this.settings.format.indentCount, 4);
}
this.settings.format.lineWidth = validateSetting<number>(this.settings.format.lineWidth, 120);
this.settings.format.singleQuote = validateSetting<boolean>(this.settings.format.singleQuote, false);
this.settings.format.linebreakMultipleAssignments = validateSetting<boolean>(
this.settings.format.linebreakMultipleAssignments, false);
// Validate the version. onDidChangeConfiguration seems to be called for every keystroke the user enters,
// so its possible that the version string will be malformed.
if (!['5.1', '5.2', '5.3'].includes(this.settings.targetVersion)) {
this.settings.targetVersion = '5.1';
}
// Update luaparse to reflect the user's choice in Lua version. This is much easier than
// remembering to pass it in every time we may use it.
luaparse.defaultOptions.luaVersion = this.settings.targetVersion;
let relintAllDocuments = false;
// If the version has changed, we should most definitely re-lint all documents
if (oldVersion !== null && oldVersion !== this.settings.targetVersion) {
relintAllDocuments = true;
}
// If any linter settings have changed, we should be nice and re-lint.
if (oldLinterSettings !== null && oldLinterSettings !== this.settings.linting) {
relintAllDocuments = true;
}
if (relintAllDocuments) {
this.documents.all().forEach((doc) => {
this.parseAndLintDocument(doc).then(diagnostics => {
this.connection.sendDiagnostics({
uri: doc.uri,
diagnostics
});
});
});
}
}
private onDocumentFormatting(params: DocumentFormattingParams): TextEdit[] {
if (!this.settings.format.enabled) {
return [];
}
const uri = params.textDocument.uri;
const document = this.documents.get(uri);
if (!document) {
return [];
}
return buildDocumentFormatEdits(uri, document, this.settings.format, params.options);
}
private onDocumentRangeFormatting(params: DocumentRangeFormattingParams): TextEdit[] {
if (!this.settings.format.enabled) {
return [];
}
const uri = params.textDocument.uri;
const document = this.documents.get(uri);
if (!document) {
return [];
}
return buildDocumentRangeFormatEdits(uri, document, params.range, this.settings.format, params.options);
}
private async parseAndLintDocument(document: TextDocument) {
const documentUri = document.uri;
const documentText = document.getText();
const parsedUri = Uri.parse(documentUri);
// Don't lint the diff view. Fixes #22.
if (parsedUri.scheme === 'showModifications') {
return [];
}
// Run the docment through luaparse and output any errors it finds
const parseDocument = (): Promise<Diagnostic[]> => {
return new Promise((resolve) => {
try {
this.perDocumentAnalysis[documentUri] = new Analysis.Analysis();
this.perDocumentAnalysis[documentUri].end(documentText);
this.perDocumentAnalysis[documentUri].buildGlobalSymbols();
return resolve([]);
} catch (err) {
if (!(err instanceof SyntaxError)) { throw err; }
const e = err as any;
const lines = documentText.split(/\r?\n/g);
const line = lines[e.line - 1];
const range = Range.create(e.line - 1, e.column,
e.line - 1, line.length);
// Strip out the row and column from the message
const message = e.message.match(/\[\d+:\d+\] (.*)/)[1];
const diagnostic: Diagnostic = {
range,
message,
severity: DiagnosticSeverity.Error,
source: 'luaparse'
};
return resolve([diagnostic]);
}
});
};
let errors = await parseDocument();
if (!this.settings.linting.enabled) {
return [];
}
// Don't lint .luacheckrc files. Just return any parsing errors encountered.
if (basename(parsedUri.fsPath) === '.luacheckrc') {
return errors;
}
try {
// TODO: Clean up the dependency on this.settings.. should probably have a SettingsManager type class.
const lintingErrors = buildLintingErrors(this.settings, documentUri, documentText);
// If luacheck errors are preferred and luacheck has provided us with some, usurp any luaparse errors.
if (this.settings.preferLuaCheckErrors && lintingErrors.length > 0) {
errors = lintingErrors;
} else {
// Otherwise, join the two lists together.
errors = errors.concat(lintingErrors);
}
} catch (e) { }
return errors;
}
}
let serviceDispatcher: ServiceDispatcher | null = null;
if (module.hot) {
module.hot.accept();
module.hot.store(stash => {
stash.serviceDispatcher = serviceDispatcher;
});
module.hot.restore(stash => {
if (stash.serviceDispatcher) {
serviceDispatcher = stash.serviceDispatcher;
const oldProto = Object.getPrototypeOf(serviceDispatcher);
const newProto = ServiceDispatcher.prototype;
for (const p of Object.getOwnPropertyNames(newProto)) {
oldProto[p] = newProto[p];
}
}
});
}
if (serviceDispatcher === null) {
serviceDispatcher = new ServiceDispatcher();
} | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.