repo_name
stringlengths
5
122
path
stringlengths
3
232
text
stringlengths
6
1.05M
sarahM0/cts
src/webgpu/api/operation/rendering/blending.spec.ts
<filename>src/webgpu/api/operation/rendering/blending.spec.ts<gh_stars>0 export const description = ` Test blending results. TODO: - Test result for all combinations of args (make sure each case is distinguishable from others - Test underflow/overflow has consistent behavior - ? `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { assert, unreachable } from '../../../../common/util/util.js'; import { GPUTest } from '../../../gpu_test.js'; import { float32ToFloat16Bits } from '../../../util/conversion.js'; export const g = makeTestGroup(GPUTest); const kBlendFactors: GPUBlendFactor[] = [ 'zero', 'one', 'src', 'one-minus-src', 'src-alpha', 'one-minus-src-alpha', 'dst', 'one-minus-dst', 'dst-alpha', 'one-minus-dst-alpha', 'src-alpha-saturated', 'constant', 'one-minus-constant', ]; const kBlendOperations: GPUBlendOperation[] = [ 'add', // 'subtract', 'reverse-subtract', 'min', 'max', ]; function mapColor( col: GPUColorDict, f: (v: number, k: keyof GPUColorDict) => number ): GPUColorDict { return { r: f(col.r, 'r'), g: f(col.g, 'g'), b: f(col.b, 'b'), a: f(col.a, 'a'), }; } function computeBlendFactor( src: GPUColorDict, dst: GPUColorDict, blendColor: GPUColorDict | undefined, factor: GPUBlendFactor ): GPUColorDict { switch (factor) { case 'zero': return { r: 0, g: 0, b: 0, a: 0 }; case 'one': return { r: 1, g: 1, b: 1, a: 1 }; case 'src': return { ...src }; case 'one-minus-src': return mapColor(src, v => 1 - v); case 'src-alpha': return mapColor(src, () => src.a); case 'one-minus-src-alpha': return mapColor(src, () => 1 - src.a); case 'dst': return { ...dst }; case 'one-minus-dst': return mapColor(dst, v => 1 - v); case 'dst-alpha': return mapColor(dst, () => dst.a); case 'one-minus-dst-alpha': return mapColor(dst, () => 1 - dst.a); case 'src-alpha-saturated': { const f = Math.min(src.a, 1 - dst.a); return { r: f, g: f, b: f, a: 1 }; } case 'constant': assert(blendColor !== undefined); return { ...blendColor }; case 'one-minus-constant': assert(blendColor !== undefined); return mapColor(blendColor, v => 1 - v); default: unreachable(); } } function computeBlendOperation( src: GPUColorDict, srcFactor: GPUColorDict, dst: GPUColorDict, dstFactor: GPUColorDict, operation: GPUBlendOperation ) { switch (operation) { case 'add': return mapColor(src, (_, k) => srcFactor[k] * src[k] + dstFactor[k] * dst[k]); case 'max': return mapColor(src, (_, k) => Math.max(src[k], dst[k])); case 'min': return mapColor(src, (_, k) => Math.min(src[k], dst[k])); case 'reverse-subtract': return mapColor(src, (_, k) => dstFactor[k] * dst[k] - srcFactor[k] * src[k]); case 'subtract': return mapColor(src, (_, k) => srcFactor[k] * src[k] - dstFactor[k] * dst[k]); } } g.test('GPUBlendComponent') .desc( `Test all combinations of parameters for GPUBlendComponent. Tests that parameters are correctly passed to the backend API and blend computations are done correctly by blending a single pixel. The test uses rgba16float as the format to avoid checking clamping behavior (tested in api,operation,rendering,blending:clamp,*). Params: - component= {color, alpha} - whether to test blending the color or the alpha component. - srcFactor= {...all GPUBlendFactors} - dstFactor= {...all GPUBlendFactors} - operation= {...all GPUBlendOperations}` ) .params(u => u // .combine('component', ['color', 'alpha'] as const) .combine('srcFactor', kBlendFactors) .combine('dstFactor', kBlendFactors) .combine('operation', kBlendOperations) .beginSubcases() .combine('srcColor', [{ r: 0.11, g: 0.61, b: 0.81, a: 0.44 }]) .combine('dstColor', [ { r: 0.51, g: 0.22, b: 0.71, a: 0.33 }, { r: 0.09, g: 0.73, b: 0.93, a: 0.81 }, ]) .expand('blendConstant', p => { const needsBlendConstant = p.srcFactor === 'one-minus-constant' || p.srcFactor === 'constant' || p.dstFactor === 'one-minus-constant' || p.dstFactor === 'constant'; return needsBlendConstant ? [{ r: 0.91, g: 0.82, b: 0.73, a: 0.64 }] : [undefined]; }) ) .fn(t => { const textureFormat: GPUTextureFormat = 'rgba16float'; const srcColor = t.params.srcColor; const dstColor = t.params.dstColor; const blendConstant = t.params.blendConstant; const srcFactor = computeBlendFactor(srcColor, dstColor, blendConstant, t.params.srcFactor); const dstFactor = computeBlendFactor(srcColor, dstColor, blendConstant, t.params.dstFactor); const expectedColor = computeBlendOperation( srcColor, srcFactor, dstColor, dstFactor, t.params.operation ); switch (t.params.component) { case 'color': expectedColor.a = srcColor.a; break; case 'alpha': expectedColor.r = srcColor.r; expectedColor.g = srcColor.g; expectedColor.b = srcColor.b; break; } const pipeline = t.device.createRenderPipeline({ fragment: { targets: [ { format: textureFormat, blend: { // Set both color/alpha to defaults... color: {}, alpha: {}, // ... but then override the component we're testing. [t.params.component]: { srcFactor: t.params.srcFactor, dstFactor: t.params.dstFactor, operation: t.params.operation, }, }, }, ], module: t.device.createShaderModule({ code: ` [[block]] struct Uniform { color: vec4<f32>; }; [[group(0), binding(0)]] var<uniform> u : Uniform; [[stage(fragment)]] fn main() -> [[location(0)]] vec4<f32> { return u.color; } `, }), entryPoint: 'main', }, vertex: { module: t.device.createShaderModule({ code: ` [[stage(vertex)]] fn main() -> [[builtin(position)]] vec4<f32> { return vec4<f32>(0.0, 0.0, 0.0, 1.0); } `, }), entryPoint: 'main', }, primitive: { topology: 'point-list', }, }); const renderTarget = t.device.createTexture({ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC, size: [1, 1, 1], format: textureFormat, }); const commandEncoder = t.device.createCommandEncoder(); const renderPass = commandEncoder.beginRenderPass({ colorAttachments: [ { view: renderTarget.createView(), loadValue: dstColor, storeOp: 'store', }, ], }); renderPass.setPipeline(pipeline); if (blendConstant) { renderPass.setBlendConstant(blendConstant); } renderPass.setBindGroup( 0, t.device.createBindGroup({ layout: pipeline.getBindGroupLayout(0), entries: [ { binding: 0, resource: { buffer: t.makeBufferWithContents( new Float32Array([srcColor.r, srcColor.g, srcColor.b, srcColor.a]), GPUBufferUsage.UNIFORM ), }, }, ], }) ); renderPass.draw(1); renderPass.endPass(); t.device.queue.submit([commandEncoder.finish()]); const tolerance = 0.003; const expectedLow = mapColor(expectedColor, v => v - tolerance); const expectedHigh = mapColor(expectedColor, v => v + tolerance); t.expectSinglePixelBetweenTwoValuesFloat16In2DTexture( renderTarget, textureFormat, { x: 0, y: 0 }, { exp: [ // Use Uint16Array to store Float16 value bits new Uint16Array( [expectedLow.r, expectedLow.g, expectedLow.b, expectedLow.a].map(float32ToFloat16Bits) ), new Uint16Array( [expectedHigh.r, expectedHigh.g, expectedHigh.b, expectedHigh.a].map( float32ToFloat16Bits ) ), ], } ); }); g.test('formats') .desc( `Test blending results works for all formats that support it, and that blending is not applied for formats that do not. Blending should be done in linear space for srgb formats.` ) .unimplemented(); g.test('clamp,blend_factor') .desc('For fixed-point formats, test that the blend factor is clamped in the blend equation.') .unimplemented(); g.test('clamp,blend_color') .desc('For fixed-point formats, test that the blend color is clamped in the blend equation.') .unimplemented(); g.test('clamp,blend_result') .desc('For fixed-point formats, test that the blend result is clamped in the blend equation.') .unimplemented();
sarahM0/cts
src/webgpu/shader/execution/builtin/logical_built_in_functions.spec.ts
export const description = `WGSL execution test. Section: Logical built-in functions`; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('logical_builtin_functions,scalar_select') .uniqueId('50b1f627c11098a1') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#logical-builtin-functions') .desc( ` scalar select: T is a scalar or a vector select(f:T,t:T,cond: bool): T Returns t when cond is true, and f otherwise. (OpSelect) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) .unimplemented(); g.test('logical_builtin_functions,vector_select') .uniqueId('8b7bb7f58ee1e479') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#logical-builtin-functions') .desc( ` vector select: T is a scalar select(f: vecN<T>,t: vecN<T>,cond: vecN<bool>) Component-wise selection. Result component i is evaluated as select(f[i],t[i],cond[i]). (OpSelect) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) .unimplemented();
sarahM0/cts
src/webgpu/util/memory.ts
/** * Helper to exhaust VRAM until there is less than 64 MB of capacity. Returns * an opaque closure which can be called to free the allocated resources later. */ export async function exhaustVramUntilUnder64MB(device: GPUDevice) { const allocateUntilOom = async (device: GPUDevice, size: number) => { const buffers = []; for (;;) { device.pushErrorScope('out-of-memory'); const buffer = device.createBuffer({ size, usage: GPUBufferUsage.STORAGE }); if (await device.popErrorScope()) { return buffers; } buffers.push(buffer); } }; const kLargeChunkSize = 512 * 1024 * 1024; const kSmallChunkSize = 64 * 1024 * 1024; const buffers = await allocateUntilOom(device, kLargeChunkSize); buffers.push(...(await allocateUntilOom(device, kSmallChunkSize))); return () => { buffers.forEach(buffer => buffer.destroy()); }; }
sarahM0/cts
src/common/tools/crawl.ts
<filename>src/common/tools/crawl.ts // Node can look at the filesystem, but JS in the browser can't. // This crawls the file tree under src/suites/${suite} to generate a (non-hierarchical) static // listing file that can then be used in the browser to load the modules containing the tests. import * as fs from 'fs'; import * as path from 'path'; import { SpecFile } from '../internal/file_loader.js'; import { validQueryPart } from '../internal/query/validQueryPart.js'; import { TestSuiteListingEntry, TestSuiteListing } from '../internal/test_suite_listing.js'; import { assert, unreachable } from '../util/util.js'; const specFileSuffix = __filename.endsWith('.ts') ? '.spec.ts' : '.spec.js'; async function crawlFilesRecursively(dir: string): Promise<string[]> { const subpathInfo = await Promise.all( (await fs.promises.readdir(dir)).map(async d => { const p = path.join(dir, d); const stats = await fs.promises.stat(p); return { path: p, isDirectory: stats.isDirectory(), isFile: stats.isFile(), }; }) ); const files = subpathInfo .filter( i => i.isFile && (i.path.endsWith(specFileSuffix) || i.path.endsWith(`${path.sep}README.txt`) || i.path === 'README.txt') ) .map(i => i.path); return files.concat( await subpathInfo .filter(i => i.isDirectory) .map(i => crawlFilesRecursively(i.path)) .reduce(async (a, b) => (await a).concat(await b), Promise.resolve([])) ); } export async function crawl( suiteDir: string, validate: boolean = true ): Promise<TestSuiteListingEntry[]> { if (!fs.existsSync(suiteDir)) { console.error(`Could not find ${suiteDir}`); process.exit(1); } // Crawl files and convert paths to be POSIX-style, relative to suiteDir. const filesToEnumerate = (await crawlFilesRecursively(suiteDir)) .map(f => path.relative(suiteDir, f).replace(/\\/g, '/')) .sort(); const entries: TestSuiteListingEntry[] = []; for (const file of filesToEnumerate) { // |file| is the suite-relative file path. if (file.endsWith(specFileSuffix)) { const filepathWithoutExtension = file.substring(0, file.length - specFileSuffix.length); const suite = path.basename(suiteDir); if (validate) { const filename = `../../${suite}/${filepathWithoutExtension}.spec.js`; assert(!process.env.STANDALONE_DEV_SERVER); const mod = (await import(filename)) as SpecFile; assert(mod.description !== undefined, 'Test spec file missing description: ' + filename); assert(mod.g !== undefined, 'Test spec file missing TestGroup definition: ' + filename); mod.g.validate(); } const pathSegments = filepathWithoutExtension.split('/'); for (const p of pathSegments) { assert(validQueryPart.test(p), `Invalid directory name ${p}; must match ${validQueryPart}`); } entries.push({ file: pathSegments }); } else if (path.basename(file) === 'README.txt') { const dirname = path.dirname(file); const readme = fs.readFileSync(path.join(suiteDir, file), 'utf8').trim(); const pathSegments = dirname !== '.' ? dirname.split('/') : []; entries.push({ file: pathSegments, readme }); } else { unreachable(`Matched an unrecognized filename ${file}`); } } return entries; } export function makeListing(filename: string): Promise<TestSuiteListing> { // Don't validate. This path is only used for the dev server and running tests with Node. // Validation is done for listing generation and presubmit. return crawl(path.dirname(filename), false); }
sarahM0/cts
src/stress/queries/occlusion.spec.ts
export const description = ` Stress tests for occlusion queries. `; import { makeTestGroup } from '../../common/framework/test_group.js'; import { GPUTest } from '../../webgpu/gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('many').desc(`Tests a huge number of occlusion queries in a render pass.`).unimplemented();
sarahM0/cts
src/unittests/query_string.spec.ts
<filename>src/unittests/query_string.spec.ts export const description = ` Unit tests for TestQuery strings. `; import { makeTestGroup } from '../common/framework/test_group.js'; import { compareQueries, Ordering } from '../common/internal/query/compare.js'; import { TestQuery, TestQuerySingleCase, TestQueryMultiCase, TestQueryMultiTest, TestQueryMultiFile, relativeQueryString, } from '../common/internal/query/query.js'; import { UnitTest } from './unit_test.js'; class T extends UnitTest { expectQueryString(q: TestQuery, exp: string): void { const s = q.toString(); this.expect(s === exp, `got ${s} expected ${exp}`); } expectRelativeQueryString(parent: TestQuery, child: TestQuery, exp: string): void { const s = relativeQueryString(parent, child); this.expect(s === exp, `got ${s} expected ${exp}`); if (compareQueries(parent, child) !== Ordering.Equal) { // Test in reverse this.shouldThrow('Error', () => { relativeQueryString(child, parent); }); } } } export const g = makeTestGroup(T); g.test('stringifyQuery,single_case').fn(t => { t.expectQueryString( new TestQuerySingleCase('a', ['b_1', '2_c'], ['d_3', '4_e'], { f: 'g', _pri1: 0, x: 3, _pri2: 1, }), 'a:b_1,2_c:d_3,4_e:f="g";x=3' ); }); g.test('stringifyQuery,single_case,json').fn(t => { t.expectQueryString( new TestQuerySingleCase('a', ['b_1', '2_c'], ['d_3', '4_e'], { f: 'g', x: { p: 2, q: 'Q' }, }), 'a:b_1,2_c:d_3,4_e:f="g";x={"p":2,"q":"Q"}' ); }); g.test('stringifyQuery,multi_case').fn(t => { t.expectQueryString( new TestQueryMultiCase('a', ['b_1', '2_c'], ['d_3', '4_e'], { f: 'g', _pri1: 0, a: 3, _pri2: 1, }), 'a:b_1,2_c:d_3,4_e:f="g";a=3;*' ); t.expectQueryString( new TestQueryMultiCase('a', ['b_1', '2_c'], ['d_3', '4_e'], {}), 'a:b_1,2_c:d_3,4_e:*' ); }); g.test('stringifyQuery,multi_test').fn(t => { t.expectQueryString( new TestQueryMultiTest('a', ['b_1', '2_c'], ['d_3', '4_e']), 'a:b_1,2_c:d_3,4_e,*' ); t.expectQueryString( new TestQueryMultiTest('a', ['b_1', '2_c'], []), // 'a:b_1,2_c:*' ); }); g.test('stringifyQuery,multi_file').fn(t => { t.expectQueryString( new TestQueryMultiFile('a', ['b_1', '2_c']), // 'a:b_1,2_c,*' ); t.expectQueryString( new TestQueryMultiFile('a', []), // 'a:*' ); }); g.test('relativeQueryString,equal_or_child').fn(t => { // Depth difference = 0 t.expectRelativeQueryString( new TestQueryMultiFile('a', []), // new TestQueryMultiFile('a', []), // '' ); t.expectRelativeQueryString( new TestQueryMultiFile('a', ['b', 'c']), // new TestQueryMultiFile('a', ['b', 'c']), // '' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']), // new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']), // '' ); t.expectRelativeQueryString( new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0 }), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0 }), // '' ); t.expectRelativeQueryString( new TestQuerySingleCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // new TestQuerySingleCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // '' ); // Depth difference = 1 t.expectRelativeQueryString( new TestQueryMultiFile('a', []), // new TestQueryMultiFile('a', ['b']), // ':b,*' ); t.expectRelativeQueryString( new TestQueryMultiFile('a', ['b']), // new TestQueryMultiFile('a', ['b', 'c']), // ',c,*' ); t.expectRelativeQueryString( new TestQueryMultiFile('a', ['b', 'c']), // new TestQueryMultiTest('a', ['b', 'c'], []), // ':*' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], []), // new TestQueryMultiTest('a', ['b', 'c'], ['d']), // ':d,*' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], ['d']), // new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']), // ',e,*' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], {}), // ':*' ); t.expectRelativeQueryString( new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], {}), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0 }), // ':f=0;*' ); t.expectRelativeQueryString( new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0 }), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // ';g=1;*' ); t.expectRelativeQueryString( new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // new TestQuerySingleCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // '' ); // Depth difference = 2 t.expectRelativeQueryString( new TestQueryMultiFile('a', []), // new TestQueryMultiFile('a', ['b', 'c']), // ':b,c,*' ); t.expectRelativeQueryString( new TestQueryMultiFile('a', ['b', 'c']), // new TestQueryMultiTest('a', ['b', 'c'], ['d']), // ':d,*' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], ['d']), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], {}), // ',e:*' ); t.expectRelativeQueryString( new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], {}), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // ':f=0;g=1;*' ); t.expectRelativeQueryString( new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // new TestQuerySingleCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1, h: 2 }), // ';h=2' ); // Depth difference = 2 t.expectRelativeQueryString( new TestQueryMultiFile('a', ['b']), // new TestQueryMultiTest('a', ['b', 'c'], []), // ',c:*' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], []), // new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']), // ':d,e,*' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0 }), // ':f=0;*' ); t.expectRelativeQueryString( new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0 }), // new TestQuerySingleCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // ';g=1' ); // Depth difference = 4 t.expectRelativeQueryString( new TestQueryMultiFile('a', []), // new TestQueryMultiTest('a', ['b', 'c'], ['d']), // ':b,c:d,*' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], ['d']), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // ',e:f=0;g=1;*' ); // Depth difference = 4 t.expectRelativeQueryString( new TestQueryMultiFile('a', ['b']), // new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']), // ',c:d,e,*' ); t.expectRelativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']), // new TestQuerySingleCase('a', ['b', 'c'], ['d', 'e'], { f: 0, g: 1 }), // ':f=0;g=1' ); }); g.test('relativeQueryString,unrelated').fn(t => { t.shouldThrow('Error', () => { relativeQueryString( new TestQueryMultiFile('a', ['b', 'x']), // new TestQueryMultiFile('a', ['b', 'c']) // ); }); t.shouldThrow('Error', () => { relativeQueryString( new TestQueryMultiTest('a', ['b', 'c'], ['d', 'x']), // new TestQueryMultiTest('a', ['b', 'c'], ['d', 'e']) // ); }); t.shouldThrow('Error', () => { relativeQueryString( new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 0 }), // new TestQueryMultiCase('a', ['b', 'c'], ['d', 'e'], { f: 1 }) // ); }); });
sarahM0/cts
src/common/internal/test_suite_listing.ts
// A listing of all specs within a single suite. This is the (awaited) type of // `groups` in '{cts,unittests}/listing.ts' and `listing` in the auto-generated // 'out/{cts,unittests}/listing.js' files (see tools/gen_listings). export type TestSuiteListing = TestSuiteListingEntry[]; export type TestSuiteListingEntry = TestSuiteListingEntrySpec | TestSuiteListingEntryReadme; interface TestSuiteListingEntrySpec { readonly file: string[]; } interface TestSuiteListingEntryReadme { readonly file: string[]; readonly readme: string; }
sarahM0/cts
src/webgpu/api/operation/uncapturederror.spec.ts
export const description = ` Tests for GPUDevice.onuncapturederror. `; import { Fixture } from '../../../common/framework/fixture.js'; import { makeTestGroup } from '../../../common/framework/test_group.js'; export const g = makeTestGroup(Fixture); g.test('constructor') .desc( `GPUUncapturedErrorEvent constructor options (also tests constructing GPUOutOfMemoryError/GPUValidationError)` ) .unimplemented(); g.test('iff_uncaptured') .desc( `{validation, out-of-memory} error should fire uncapturederror iff not captured by a scope.` ) .unimplemented(); g.test('only_original_device_is_event_target') .desc( `Original GPUDevice objects are EventTargets and have onuncapturederror, but deserialized GPUDevices do not.` ) .unimplemented(); g.test('uncapturederror_from_non_originating_thread') .desc( `Uncaptured errors on any thread should always propagate to the original GPUDevice object (since deserialized ones don't have EventTarget/onuncapturederror).` ) .unimplemented();
sarahM0/cts
src/webgpu/api/operation/vertex_state/correctness.spec.ts
export const description = ` TODO: Test more corner case values for Float16 / Float32 (INF, NaN, +-0, ...) and reduce the float tolerance. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { assert, memcpy, unreachable } from '../../../../common/util/util.js'; import { kMaxVertexAttributes, kMaxVertexBufferArrayStride, kMaxVertexBuffers, kPerStageBindingLimits, kVertexFormatInfo, kVertexFormats, } from '../../../capability_info.js'; import { GPUTest } from '../../../gpu_test.js'; import { float32ToFloat16Bits, normalizedIntegerAsFloat } from '../../../util/conversion.js'; import { align, clamp } from '../../../util/math.js'; // These types mirror the structure of GPUVertexBufferLayout but allow defining the extra // dictionary members at the GPUVertexBufferLayout and GPUVertexAttribute level. The are used // like so: // // VertexState<{arrayStride: number}, {format: VertexFormat}> // VertexBuffer<{arrayStride: number}, {format: VertexFormat}> // VertexAttrib<{format: VertexFormat}> type VertexAttrib<A> = A & { shaderLocation: number }; type VertexBuffer<V, A> = V & { slot: number; attributes: VertexAttrib<A>[]; }; type VertexState<V, A> = VertexBuffer<V, A>[]; type VertexLayoutState<V, A> = VertexState< { stepMode: GPUVertexStepMode; arrayStride: number } & V, { format: GPUVertexFormat; offset: number } & A >; function mapBufferAttribs<V, A1, A2>( buffer: VertexBuffer<V, A1>, f: (v: V, a: VertexAttrib<A1>) => A2 ): VertexBuffer<V, A2> { const newAttributes: VertexAttrib<A2>[] = []; for (const a of buffer.attributes) { newAttributes.push({ shaderLocation: a.shaderLocation, ...f(buffer, a), }); } return { ...buffer, attributes: newAttributes }; } function mapStateAttribs<V, A1, A2>( buffers: VertexState<V, A1>, f: (v: V, a: VertexAttrib<A1>) => A2 ): VertexState<V, A2> { return buffers.map(b => mapBufferAttribs(b, f)); } type TestData = { shaderBaseType: string; floatTolerance?: number; // The number of vertex components in the vertexData (expectedData might contain more because // it is padded to 4 components). testComponentCount: number; // The data that will be in the uniform buffer and used to check the vertex inputs. expectedData: ArrayBuffer; // The data that will be in the vertex buffer. vertexData: ArrayBuffer; }; class VertexStateTest extends GPUTest { // Generate for VS + FS (entrypoints vsMain / fsMain) that for each attribute will check that its // value corresponds to what's expected (as provided by a uniform buffer per attribute) and then // renders each vertex at position (vertexIndex, instanceindex) with either 1 (success) or // a negative number corresponding to the check number (in case you need to debug a failure). makeTestWGSL( buffers: VertexState< { stepMode: GPUVertexStepMode }, { format: GPUVertexFormat; shaderBaseType: string; shaderComponentCount?: number; floatTolerance?: number; } >, vertexCount: number, instanceCount: number ): string { // In the base WebGPU spec maxVertexAttributes is larger than maxUniformBufferPerStage. We'll // use a combination of uniform and storage buffers to cover all possible attributes. This // happens to work because maxUniformBuffer + maxStorageBuffer = 12 + 8 = 20 which is larger // than maxVertexAttributes = 16. // However this might not work in the future for implementations that allow even more vertex // attributes so there will need to be larger changes when that happens. const maxUniformBuffers = kPerStageBindingLimits['uniformBuf'].max; assert(maxUniformBuffers + kPerStageBindingLimits['storageBuf'].max >= kMaxVertexAttributes); let vsInputs = ''; let vsChecks = ''; let vsBindings = ''; for (const b of buffers) { for (const a of b.attributes) { const format = kVertexFormatInfo[a.format]; const shaderComponentCount = a.shaderComponentCount ?? format.componentCount; const i = a.shaderLocation; // shaderType is either a scalar type like f32 or a vecN<scalarType> let shaderType = a.shaderBaseType; if (shaderComponentCount !== 1) { shaderType = `vec${shaderComponentCount}<${shaderType}>`; } let maxCount = `${vertexCount}`; let indexBuiltin = `input.vertexIndex`; if (b.stepMode === 'instance') { maxCount = `${instanceCount}`; indexBuiltin = `input.instanceIndex`; } // Start using storage buffers when we run out of uniform buffers. let storageType = 'uniform'; if (i >= maxUniformBuffers) { storageType = 'storage, read'; } vsInputs += ` [[location(${i})]] attrib${i} : ${shaderType};\n`; vsBindings += `[[block]] struct S${i} { data : array<vec4<${a.shaderBaseType}>, ${maxCount}>; };\n`; vsBindings += `[[group(0), binding(${i})]] var<${storageType}> providedData${i} : S${i};\n`; // Generate the all the checks for the attributes. for (let component = 0; component < shaderComponentCount; component++) { // Components are filled with (0, 0, 0, 1) if they aren't provided data from the pipeline. if (component >= format.componentCount) { const expected = component === 3 ? '1' : '0'; vsChecks += ` check(input.attrib${i}[${component}] == ${a.shaderBaseType}(${expected}));\n`; continue; } // Check each component individually, with special handling of tolerance for floats. const attribComponent = shaderComponentCount === 1 ? `input.attrib${i}` : `input.attrib${i}[${component}]`; const providedData = `providedData${i}.data[${indexBuiltin}][${component}]`; if (format.type === 'uint' || format.type === 'sint') { vsChecks += ` check(${attribComponent} == ${providedData});\n`; } else { vsChecks += ` check(floatsSimilar(${attribComponent}, ${providedData}, f32(${ a.floatTolerance ?? 0 })));\n`; } } } } return ` struct Inputs { ${vsInputs} [[builtin(vertex_index)]] vertexIndex: u32; [[builtin(instance_index)]] instanceIndex: u32; }; ${vsBindings} var<private> vsResult : i32 = 1; var<private> checkIndex : i32 = 0; fn check(success : bool) { if (!success) { vsResult = -checkIndex; } checkIndex = checkIndex + 1; } fn floatsSimilar(a : f32, b : f32, tolerance : f32) -> bool { if (isNan(a) && isNan(b)) { return true; } if (isInf(a) && isInf(b) && sign(a) == sign(b)) { return true; } if (isInf(a) || isInf(b)) { return false; } // TODO do we check for + and - 0? return abs(a - b) < tolerance; } fn doTest(input : Inputs) { ${vsChecks} } struct VSOutputs { [[location(0), interpolate(flat)]] result : i32; [[builtin(position)]] position : vec4<f32>; }; [[stage(vertex)]] fn vsMain(input : Inputs) -> VSOutputs { doTest(input); // Place that point at pixel (vertexIndex, instanceIndex) in a framebuffer of size // (vertexCount , instanceCount). var output : VSOutputs; output.position = vec4<f32>( ((f32(input.vertexIndex) + 0.5) / ${vertexCount}.0 * 2.0) - 1.0, ((f32(input.instanceIndex) + 0.5) / ${instanceCount}.0 * 2.0) - 1.0, 0.0, 1.0 ); output.result = vsResult; return output; } [[stage(fragment)]] fn fsMain([[location(0), interpolate(flat)]] result : i32) -> [[location(0)]] i32 { return result; } `; } makeTestPipeline( buffers: VertexState< { stepMode: GPUVertexStepMode; arrayStride: number }, { offset: number; format: GPUVertexFormat; shaderBaseType: string; shaderComponentCount?: number; floatTolerance?: number; } >, vertexCount: number, instanceCount: number ): GPURenderPipeline { const module = this.device.createShaderModule({ code: this.makeTestWGSL(buffers, vertexCount, instanceCount), }); const bufferLayouts: GPUVertexBufferLayout[] = []; for (const b of buffers) { bufferLayouts[b.slot] = b; } return this.device.createRenderPipeline({ vertex: { module, entryPoint: 'vsMain', buffers: bufferLayouts, }, primitive: { topology: 'point-list', }, fragment: { module, entryPoint: 'fsMain', targets: [ { format: 'r32sint', }, ], }, }); } // Runs the render pass drawing points in a vertexCount*instanceCount rectangle, then check each // of produced a value of 1 which means that the tests in the shader passed. submitRenderPass( pipeline: GPURenderPipeline, buffers: VertexState<{ buffer: GPUBuffer; vbOffset?: number }, {}>, expectedData: GPUBindGroup, vertexCount: number, instanceCount: number ) { const testTexture = this.device.createTexture({ format: 'r32sint', size: [vertexCount, instanceCount], usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC, }); const encoder = this.device.createCommandEncoder(); const pass = encoder.beginRenderPass({ colorAttachments: [ { view: testTexture.createView(), loadValue: [0, 0, 0, 0], storeOp: 'store', }, ], }); pass.setPipeline(pipeline); pass.setBindGroup(0, expectedData); for (const buffer of buffers) { pass.setVertexBuffer(buffer.slot, buffer.buffer, buffer.vbOffset ?? 0); } pass.draw(vertexCount, instanceCount); pass.endPass(); this.device.queue.submit([encoder.finish()]); this.expectSingleColor(testTexture, 'r32sint', { size: [vertexCount, instanceCount, 1], exp: { R: 1 }, }); } // Generate TestData for the format with interesting test values. // TODO cache the result on the fixture? // Note that the test data always starts with an interesting value, so that using the first // test value in a test is still meaningful. generateTestData(format: GPUVertexFormat): TestData { const formatInfo = kVertexFormatInfo[format]; const bitSize = formatInfo.bytesPerComponent * 8; switch (formatInfo.type) { case 'float': { const data = [42.42, 0.0, 1.0, -1.0, 1000, -18.7, 25.17]; const expectedData = new Float32Array(data).buffer; const vertexData = bitSize === 32 ? expectedData : bitSize === 16 ? new Uint16Array(data.map(float32ToFloat16Bits)).buffer : unreachable(); return { shaderBaseType: 'f32', testComponentCount: data.length, expectedData, vertexData, floatTolerance: 0.05, }; } case 'sint': { /* prettier-ignore */ const data = [ 42, 0, 1, 2, 3, 4, 5, -1, -2, -3, -4, -5, Math.pow(2, bitSize - 2), Math.pow(2, bitSize - 1) - 1, // max value -Math.pow(2, bitSize - 2), -Math.pow(2, bitSize - 1), // min value ]; const expectedData = new Int32Array(data).buffer; const vertexData = bitSize === 32 ? expectedData : bitSize === 16 ? new Int16Array(data).buffer : new Int8Array(data).buffer; return { shaderBaseType: 'i32', testComponentCount: data.length, expectedData, vertexData, }; } case 'uint': { /* prettier-ignore */ const data = [ 42, 0, 1, 2, 3, 4, 5, Math.pow(2, bitSize - 1), Math.pow(2, bitSize) - 1, // max value ]; const expectedData = new Uint32Array(data).buffer; const vertexData = bitSize === 32 ? expectedData : bitSize === 16 ? new Uint16Array(data).buffer : new Uint8Array(data).buffer; return { shaderBaseType: 'u32', testComponentCount: data.length, expectedData, vertexData, }; } case 'snorm': { /* prettier-ignore */ const data = [ 42, 0, 1, 2, 3, 4, 5, -1, -2, -3, -4, -5, Math.pow(2,bitSize - 2), Math.pow(2,bitSize - 1) - 1, // max value -Math.pow(2,bitSize - 2), -Math.pow(2,bitSize - 1), // min value ]; const vertexData = bitSize === 16 ? new Int16Array(data).buffer : bitSize === 8 ? new Int8Array(data).buffer : unreachable(); return { shaderBaseType: 'f32', testComponentCount: data.length, expectedData: new Float32Array(data.map(v => normalizedIntegerAsFloat(v, bitSize, true))) .buffer, vertexData, floatTolerance: 0.1 * normalizedIntegerAsFloat(1, bitSize, true), }; } case 'unorm': { /* prettier-ignore */ const data = [ 42, 0, 1, 2, 3, 4, 5, Math.pow(2, bitSize - 1), Math.pow(2, bitSize) - 1, // max value ]; const vertexData = bitSize === 16 ? new Uint16Array(data).buffer : bitSize === 8 ? new Uint8Array(data).buffer : unreachable(); return { shaderBaseType: 'f32', testComponentCount: data.length, expectedData: new Float32Array(data.map(v => normalizedIntegerAsFloat(v, bitSize, false))) .buffer, vertexData: vertexData!, floatTolerance: 0.1 * normalizedIntegerAsFloat(1, bitSize, false), }; } } } // The TestData generated for a format might not contain enough data for all the vertices we are // going to draw, so we expand them by adding additional copies of the vertexData as needed. // expectedData is a bit different because it also needs to be unpacked to have `componentCount` // components every 4 components (because the shader uses vec4 for the expected data). expandTestData(data: TestData, maxCount: number, componentCount: number): TestData { const vertexComponentSize = data.vertexData.byteLength / data.testComponentCount; const expectedComponentSize = data.expectedData.byteLength / data.testComponentCount; const expandedVertexData = new Uint8Array(maxCount * componentCount * vertexComponentSize); const expandedExpectedData = new Uint8Array(4 * maxCount * expectedComponentSize); for (let index = 0; index < maxCount; index++) { for (let component = 0; component < componentCount; component++) { // If only we had some builtin JS memcpy function between ArrayBuffers... const targetVertexOffset = (index * componentCount + component) * vertexComponentSize; const sourceVertexOffset = targetVertexOffset % data.vertexData.byteLength; memcpy( { src: data.vertexData, start: sourceVertexOffset, length: vertexComponentSize }, { dst: expandedVertexData, start: targetVertexOffset } ); const targetExpectedOffset = (index * 4 + component) * expectedComponentSize; const sourceExpectedOffset = ((index * componentCount + component) * expectedComponentSize) % data.expectedData.byteLength; memcpy( { src: data.expectedData, start: sourceExpectedOffset, length: expectedComponentSize }, { dst: expandedExpectedData, start: targetExpectedOffset } ); } } return { shaderBaseType: data.shaderBaseType, testComponentCount: maxCount * componentCount, floatTolerance: data.floatTolerance, expectedData: expandedExpectedData.buffer, vertexData: expandedVertexData.buffer, }; } // Copies `size` bytes from `source` to `target` starting at `offset` each `targetStride`. // (the data in `source` is assumed packed) interleaveVertexDataInto( target: ArrayBuffer, src: ArrayBuffer, { targetStride, offset, size }: { targetStride: number; offset: number; size: number } ) { const dst = new Uint8Array(target); for ( let srcStart = 0, dstStart = offset; srcStart < src.byteLength; srcStart += size, dstStart += targetStride ) { memcpy({ src, start: srcStart, length: size }, { dst, start: dstStart }); } } createTestAndPipelineData<V, A>( state: VertexLayoutState<V, A>, vertexCount: number, instanceCount: number ): VertexLayoutState<V, A & TestData> { // Gather the test data and some additional test state for attribs. return mapStateAttribs(state, (buffer, attrib) => { const maxCount = buffer.stepMode === 'instance' ? instanceCount : vertexCount; const formatInfo = kVertexFormatInfo[attrib.format]; let testData = this.generateTestData(attrib.format); testData = this.expandTestData(testData, maxCount, formatInfo.componentCount); return { ...testData, ...attrib, }; }); } createExpectedBG(state: VertexState<{}, TestData>, pipeline: GPURenderPipeline): GPUBindGroup { // Create the bindgroups from that test data const bgEntries: GPUBindGroupEntry[] = []; for (const buffer of state) { for (const attrib of buffer.attributes) { const expectedDataBuffer = this.makeBufferWithContents( new Uint8Array(attrib.expectedData), GPUBufferUsage.UNIFORM | GPUBufferUsage.STORAGE ); bgEntries.push({ binding: attrib.shaderLocation, resource: { buffer: expectedDataBuffer }, }); } } return this.device.createBindGroup({ layout: pipeline.getBindGroupLayout(0), entries: bgEntries, }); } createVertexBuffers( state: VertexLayoutState<{ vbOffset?: number }, TestData>, vertexCount: number, instanceCount: number ): VertexState<{ buffer: GPUBuffer; vbOffset?: number }, {}> { // Create the vertex buffers const vertexBuffers: VertexState<{ buffer: GPUBuffer; vbOffset?: number }, {}> = []; for (const buffer of state) { const maxCount = buffer.stepMode === 'instance' ? instanceCount : vertexCount; // Fill the vertex data with garbage so that we don't get `0` (which could be a test value) // if the vertex shader loads the vertex data incorrectly. const vertexData = new ArrayBuffer( align(buffer.arrayStride * maxCount + (buffer.vbOffset ?? 0), 4) ); new Uint8Array(vertexData).fill(0xc4); for (const attrib of buffer.attributes) { const formatInfo = kVertexFormatInfo[attrib.format]; this.interleaveVertexDataInto(vertexData, attrib.vertexData, { targetStride: buffer.arrayStride, offset: (buffer.vbOffset ?? 0) + attrib.offset, size: formatInfo.componentCount * formatInfo.bytesPerComponent, }); } vertexBuffers.push({ slot: buffer.slot, buffer: this.makeBufferWithContents(new Uint8Array(vertexData), GPUBufferUsage.VERTEX), vbOffset: buffer.vbOffset, attributes: [], }); } return vertexBuffers; } runTest( buffers: VertexLayoutState<{ vbOffset?: number }, { shaderComponentCount?: number }>, // Default to using 20 vertices and 20 instances so that we cover each of the test data at least // once (at the time of writing the largest testData has 16 values). vertexCount: number = 20, instanceCount: number = 20 ) { const testData = this.createTestAndPipelineData(buffers, vertexCount, instanceCount); const pipeline = this.makeTestPipeline(testData, vertexCount, instanceCount); const expectedDataBG = this.createExpectedBG(testData, pipeline); const vertexBuffers = this.createVertexBuffers(testData, vertexCount, instanceCount); this.submitRenderPass(pipeline, vertexBuffers, expectedDataBG, vertexCount, instanceCount); } } export const g = makeTestGroup(VertexStateTest); g.test('vertex_format_to_shader_format_conversion') .desc( `Test that the raw data passed in vertex buffers is correctly converted to the input type in the shader. Test for: - all formats - 1 to 4 components in the shader's input type (unused components are filled with 0 and except the 4th with 1) - various locations - various slots` ) .params(u => u // .combine('format', kVertexFormats) .combine('shaderComponentCount', [1, 2, 3, 4]) .beginSubcases() .combine('slot', [0, 1, kMaxVertexBuffers - 1]) .combine('shaderLocation', [0, 1, kMaxVertexAttributes - 1]) ) .fn(t => { const { format, shaderComponentCount, slot, shaderLocation } = t.params; t.runTest([ { slot, arrayStride: 16, stepMode: 'vertex', attributes: [ { shaderLocation, format, offset: 0, shaderComponentCount, }, ], }, ]); }); g.test('setVertexBuffer_offset_and_attribute_offset') .desc( `Test that the vertex buffer offset and attribute offset in the vertex state are applied correctly. Test for: - all formats - various setVertexBuffer offsets - various attribute offsets in a fixed arrayStride` ) .params(u => u // .combine('format', kVertexFormats) .beginSubcases() .combine('vbOffset', [0, 4, 400, 1004]) .combine('arrayStride', [128]) .expand('offset', p => { const formatInfo = kVertexFormatInfo[p.format]; const formatSize = formatInfo.bytesPerComponent * formatInfo.componentCount; return new Set([ 0, 4, 8, formatSize, formatSize * 2, p.arrayStride / 2, p.arrayStride - formatSize - 4, p.arrayStride - formatSize - 8, p.arrayStride - formatSize - formatSize, p.arrayStride - formatSize - formatSize * 2, p.arrayStride - formatSize, ]); }) ) .fn(t => { const { format, vbOffset, arrayStride, offset } = t.params; t.runTest([ { slot: 0, arrayStride, stepMode: 'vertex', vbOffset, attributes: [ { shaderLocation: 0, format, offset, }, ], }, ]); }); g.test('non_zero_array_stride_and_attribute_offset') .desc( `Test that the array stride and attribute offset in the vertex state are applied correctly. Test for: - all formats - various array strides - various attribute offsets in a fixed arrayStride` ) .params(u => u // .combine('format', kVertexFormats) .beginSubcases() .expand('arrayStride', p => { const formatInfo = kVertexFormatInfo[p.format]; const formatSize = formatInfo.bytesPerComponent * formatInfo.componentCount; return [align(formatSize, 4), align(formatSize, 4) + 4, kMaxVertexBufferArrayStride]; }) .expand('offset', p => { const formatInfo = kVertexFormatInfo[p.format]; const formatSize = formatInfo.bytesPerComponent * formatInfo.componentCount; return new Set( [ 0, formatSize, 4, p.arrayStride / 2, p.arrayStride - formatSize * 2, p.arrayStride - formatSize - 4, p.arrayStride - formatSize, ].map(offset => clamp(offset, { min: 0, max: p.arrayStride - formatSize })) ); }) ) .fn(t => { const { format, arrayStride, offset } = t.params; t.runTest([ { slot: 0, arrayStride, stepMode: 'vertex', attributes: [ { shaderLocation: 0, format, offset, }, ], }, ]); }); g.test('buffers_with_varying_step_mode') .desc( `Test buffers with varying step modes in the same vertex state. - Various combination of step modes` ) .paramsSubcasesOnly(u => u // .combine('stepModes', [ ['instance'], ['vertex', 'vertex', 'instance'], ['instance', 'vertex', 'instance'], ['vertex', 'instance', 'vertex', 'vertex'], ]) ) .fn(t => { const { stepModes } = t.params; const state = (stepModes as GPUVertexStepMode[]).map((stepMode, i) => ({ slot: i, arrayStride: 4, stepMode, attributes: [ { shaderLocation: i, format: 'float32' as const, offset: 0, }, ], })); t.runTest(state); }); g.test('vertex_buffer_used_multiple_times_overlapped') .desc( `Test using the same vertex buffer in for multiple "vertex buffers", with data from each buffer overlapping. - For each vertex format. - For various numbers of vertex buffers [2, 3, max]` ) .params(u => u // .combine('format', kVertexFormats) .beginSubcases() .combine('vbCount', [2, 3, kMaxVertexBuffers]) .combine('additionalVBOffset', [0, 4, 120]) ) .fn(t => { const { format, vbCount, additionalVBOffset } = t.params; const kVertexCount = 20; const kInstanceCount = 1; const formatInfo = kVertexFormatInfo[format]; const formatByteSize = formatInfo.bytesPerComponent * formatInfo.componentCount; // We need to align so the offset for non-0 setVertexBuffer don't fail validation. const alignedFormatByteSize = align(formatByteSize, 4); // In this test we want to test using the same vertex buffer for multiple different attributes. // For example if vbCount is 3, we will create a vertex buffer containing the following data: // a0, a1, a2, a3, ..., a<baseDataVertexCount> // We also create the expected data for the vertex fetching from that buffer so we can modify it // below. const baseDataVertexCount = kVertexCount + vbCount - 1; const baseData = t.createTestAndPipelineData( [ { slot: 0, arrayStride: alignedFormatByteSize, stepMode: 'vertex', vbOffset: additionalVBOffset, attributes: [{ shaderLocation: 0, format, offset: 0 }], }, ], baseDataVertexCount, kInstanceCount ); const vertexBuffer = t.createVertexBuffers(baseData, baseDataVertexCount, kInstanceCount)[0] .buffer; // We are going to bind the vertex buffer multiple times, each time at a different offset that's // a multiple of the data size. So what should be fetched by the vertex shader is: // - attrib0: a0, a1, ..., a19 // - attrib1: a1, a2, ..., a20 // - attrib2: a2, a3, ..., a21 // etc. // We re-create the test data by: // 1) creating multiple "vertex buffers" that all point at the GPUBuffer above but at // different offsets. // 2) selecting what parts of the expectedData each attribute will see in the expectedData for // the full vertex buffer. const baseTestData = baseData[0].attributes[0]; assert(baseTestData.testComponentCount === formatInfo.componentCount * baseDataVertexCount); const expectedDataBytesPerVertex = baseTestData.expectedData.byteLength / baseDataVertexCount; const testData: VertexLayoutState<{}, TestData> = []; const vertexBuffers: VertexState<{ buffer: GPUBuffer; vbOffset: number }, {}> = []; for (let i = 0; i < vbCount; i++) { vertexBuffers.push({ buffer: vertexBuffer, slot: i, vbOffset: additionalVBOffset + i * alignedFormatByteSize, attributes: [], }); testData.push({ slot: i, arrayStride: alignedFormatByteSize, stepMode: 'vertex', attributes: [ { shaderLocation: i, format, offset: 0, shaderBaseType: baseTestData.shaderBaseType, floatTolerance: baseTestData.floatTolerance, // Select vertices [i, i + kVertexCount] testComponentCount: kVertexCount * formatInfo.componentCount, expectedData: baseTestData.expectedData.slice( expectedDataBytesPerVertex * i, expectedDataBytesPerVertex * (kVertexCount + i) ), vertexData: new ArrayBuffer(0), }, ], }); } // Run the test with the modified test data. const pipeline = t.makeTestPipeline(testData, kVertexCount, kInstanceCount); const expectedDataBG = t.createExpectedBG(testData, pipeline); t.submitRenderPass(pipeline, vertexBuffers, expectedDataBG, kVertexCount, kInstanceCount); }); g.test('vertex_buffer_used_multiple_times_interleaved') .desc( `Test using the same vertex buffer in for multiple "vertex buffers", with data from each buffer interleaved. - For each vertex format. - For various numbers of vertex buffers [2, 3, max]` ) .params(u => u // .combine('format', kVertexFormats) .beginSubcases() .combine('vbCount', [2, 3, kMaxVertexBuffers]) .combine('additionalVBOffset', [0, 4, 120]) ) .fn(t => { const { format, vbCount, additionalVBOffset } = t.params; const kVertexCount = 20; const kInstanceCount = 1; const formatInfo = kVertexFormatInfo[format]; const formatByteSize = formatInfo.bytesPerComponent * formatInfo.componentCount; // We need to align so the offset for non-0 setVertexBuffer don't fail validation. const alignedFormatByteSize = align(formatByteSize, 4); // Create data for a single vertex buffer with many attributes, that will be split between // many vertexbuffers set at different offsets. // In this test we want to test using the same vertex buffer for multiple different attributes. // For example if vbCount is 3, we will create a vertex buffer containing the following data: // a0, a0, a0, a1, a1, a1, ... // To do that we create a single vertex buffer with `vbCount` attributes that all have the same // format. const attribs: GPUVertexAttribute[] = []; for (let i = 0; i < vbCount; i++) { attribs.push({ format, offset: i * alignedFormatByteSize, shaderLocation: i }); } const baseData = t.createTestAndPipelineData( [ { slot: 0, arrayStride: alignedFormatByteSize * vbCount, stepMode: 'vertex', vbOffset: additionalVBOffset, attributes: attribs, }, ], // Request one vertex more than what we need so we have an extra full stride. Otherwise WebGPU // validation of vertex being in bounds will fail for all vertex buffers at an offset that's // not 0 (since their last stride will go beyond the data for vertex kVertexCount -1). kVertexCount + 1, kInstanceCount ); const vertexBuffer = t.createVertexBuffers(baseData, kVertexCount + 1, kInstanceCount)[0] .buffer; // Then we recreate test data by: // 1) creating multiple "vertex buffers" that all point at the GPUBuffer above but at // different offsets. // 2) have multiple vertex buffer, each with one attributes that will expect a0, a1, ... const testData: VertexLayoutState<{}, TestData> = []; const vertexBuffers: VertexState<{ buffer: GPUBuffer; vbOffset: number }, {}> = []; for (let i = 0; i < vbCount; i++) { vertexBuffers.push({ slot: i, buffer: vertexBuffer, vbOffset: additionalVBOffset + i * alignedFormatByteSize, attributes: [], }); testData.push({ ...baseData[0], slot: i, attributes: [{ ...baseData[0].attributes[i], offset: 0 }], }); } // Run the test with the modified test data. const pipeline = t.makeTestPipeline(testData, kVertexCount, kInstanceCount); const expectedDataBG = t.createExpectedBG(testData, pipeline); t.submitRenderPass(pipeline, vertexBuffers, expectedDataBG, kVertexCount, kInstanceCount); }); g.test('max_buffers_and_attribs') .desc( `Test a vertex state that loads as many attributes and buffers as possible. - For each format. ` ) .params(u => u.combine('format', kVertexFormats)) .fn(t => { const { format } = t.params; const attributesPerBuffer = Math.ceil(kMaxVertexAttributes / kMaxVertexBuffers); let attributesEmitted = 0; const state: VertexLayoutState<{}, {}> = []; for (let i = 0; i < kMaxVertexBuffers; i++) { const attributes: GPUVertexAttribute[] = []; for (let j = 0; j < attributesPerBuffer && attributesEmitted < kMaxVertexAttributes; j++) { attributes.push({ format, offset: 0, shaderLocation: attributesEmitted }); attributesEmitted++; } state.push({ slot: i, stepMode: 'vertex', arrayStride: 32, attributes, }); } t.runTest(state); }); g.test('array_stride_zero') .desc( `Test that arrayStride 0 correctly uses the same data for all vertex/instances, while another test vertex buffer with arrayStrude != 0 gets different data. - Test for all formats - Test for both step modes` ) .params(u => u // .combine('format', kVertexFormats) .beginSubcases() .combine('stepMode', ['vertex', 'instance'] as const) .expand('offset', p => { const formatInfo = kVertexFormatInfo[p.format]; const formatSize = formatInfo.bytesPerComponent * formatInfo.componentCount; return new Set([ 0, 4, 8, formatSize, formatSize * 2, kMaxVertexBufferArrayStride / 2, kMaxVertexBufferArrayStride - formatSize - 4, kMaxVertexBufferArrayStride - formatSize - 8, kMaxVertexBufferArrayStride - formatSize, kMaxVertexBufferArrayStride - formatSize * 2, ]); }) ) .fn(t => { const { format, stepMode, offset } = t.params; const kCount = 10; // Create the stride 0 part of the test, first by faking a single vertex being drawn and // then expanding the data to cover kCount vertex / instances const stride0TestData = t.createTestAndPipelineData( [ { slot: 0, arrayStride: 2048, stepMode, vbOffset: offset, // used to push data in the vertex buffer attributes: [{ format, offset: 0, shaderLocation: 0 }], }, ], 1, 1 )[0]; const stride0VertexBuffer = t.createVertexBuffers([stride0TestData], kCount, kCount)[0]; // Expand the stride0 test data to have kCount values for expectedData. const originalData = stride0TestData.attributes[0].expectedData; const expandedData = new ArrayBuffer(kCount * originalData.byteLength); for (let i = 0; i < kCount; i++) { new Uint8Array(expandedData, originalData.byteLength * i).set(new Uint8Array(originalData)); } // Fixup stride0TestData to use arrayStride 0. stride0TestData.attributes[0].offset = offset; stride0TestData.attributes[0].expectedData = expandedData; stride0TestData.attributes[0].testComponentCount *= kCount; stride0TestData.arrayStride = 0; stride0VertexBuffer.vbOffset = 0; // Create the part of the state that will be varying for each vertex / instance const varyingTestData = t.createTestAndPipelineData( [ { slot: 1, arrayStride: 32, stepMode, attributes: [{ format, offset: 0, shaderLocation: 1 }], }, ], kCount, kCount )[0]; const varyingVertexBuffer = t.createVertexBuffers([varyingTestData], kCount, kCount)[0]; // Run the test with the merged test state. const state = [stride0TestData, varyingTestData]; const vertexBuffers = [stride0VertexBuffer, varyingVertexBuffer]; const pipeline = t.makeTestPipeline(state, kCount, kCount); const expectedDataBG = t.createExpectedBG(state, pipeline); t.submitRenderPass(pipeline, vertexBuffers, expectedDataBG, kCount, kCount); }); g.test('discontiguous_location_and_attribs') .desc('Test that using far away slots / shaderLocations works as expected') .fn(t => { t.runTest([ { slot: kMaxVertexBuffers - 1, arrayStride: 4, stepMode: 'vertex', attributes: [ { format: 'uint8x2', offset: 2, shaderLocation: 0 }, { format: 'uint8x2', offset: 0, shaderLocation: 8 }, ], }, { slot: 1, arrayStride: 16, stepMode: 'instance', vbOffset: 1000, attributes: [{ format: 'uint32x4', offset: 0, shaderLocation: kMaxVertexAttributes - 1 }], }, ]); }); g.test('overlapping_attributes') .desc( `Test that overlapping attributes in the same vertex buffer works - Test for all formats` ) .params(u => u.combine('format', kVertexFormats)) .fn(t => { const { format } = t.params; const attributes: GPUVertexAttribute[] = []; for (let i = 0; i < kMaxVertexAttributes; i++) { attributes.push({ format, offset: 0, shaderLocation: i }); } t.runTest([ { slot: 0, stepMode: 'vertex', arrayStride: 32, attributes, }, ]); });
sarahM0/cts
src/common/runtime/helper/test_worker.ts
<reponame>sarahM0/cts<filename>src/common/runtime/helper/test_worker.ts<gh_stars>10-100 import { LogMessageWithStack } from '../../internal/logging/log_message.js'; import { TransferredTestCaseResult, LiveTestCaseResult } from '../../internal/logging/result.js'; import { TestCaseRecorder } from '../../internal/logging/test_case_recorder.js'; import { TestQueryWithExpectation } from '../../internal/query/query.js'; export class TestWorker { private readonly debug: boolean; private readonly worker: Worker; private readonly resolvers = new Map<string, (result: LiveTestCaseResult) => void>(); constructor(debug: boolean) { this.debug = debug; const selfPath = import.meta.url; const selfPathDir = selfPath.substring(0, selfPath.lastIndexOf('/')); const workerPath = selfPathDir + '/test_worker-worker.js'; this.worker = new Worker(workerPath, { type: 'module' }); this.worker.onmessage = ev => { const query: string = ev.data.query; const result: TransferredTestCaseResult = ev.data.result; if (result.logs) { for (const l of result.logs) { Object.setPrototypeOf(l, LogMessageWithStack.prototype); } } this.resolvers.get(query)!(result as LiveTestCaseResult); // TODO(kainino0x): update the Logger with this result (or don't have a logger and update the // entire results JSON somehow at some point). }; } async run( rec: TestCaseRecorder, query: string, expectations: TestQueryWithExpectation[] = [] ): Promise<void> { this.worker.postMessage({ query, expectations, debug: this.debug }); const workerResult = await new Promise<LiveTestCaseResult>(resolve => { this.resolvers.set(query, resolve); }); rec.injectResult(workerResult); } }
sarahM0/cts
src/webgpu/api/validation/initialization/requestDevice.spec.ts
export const description = ` Test validation conditions for requestDevice. `; import { Fixture } from '../../../../common/framework/fixture.js'; import { makeTestGroup } from '../../../../common/framework/test_group.js'; export const g = makeTestGroup(Fixture); g.test('features,nonexistent') .desc('requestDevice with a made-up feature name. Should resolve to null.') .unimplemented(); g.test('features,known_but_unavailable') .desc( `requestDevice with a valid feature that's unavailable on the adapter. Should resolve to null. (Skipped if such a feature can't be found. But most browsers should support both BC and ETC while most hardware should only support one.)` ) .unimplemented(); g.test('limits') .desc( `For each limit, request with various values. Some should resolve to null. (TODO: which?) - value = { - less than default - default - default + 1 - best available - better than what's available - } ` ) .unimplemented();
sarahM0/cts
src/common/internal/file_loader.ts
<filename>src/common/internal/file_loader.ts import { IterableTestGroup } from '../internal/test_group.js'; import { assert } from '../util/util.js'; import { parseQuery } from './query/parseQuery.js'; import { TestQuery } from './query/query.js'; import { TestSuiteListing } from './test_suite_listing.js'; import { loadTreeForQuery, TestTree, TestTreeLeaf } from './tree.js'; // A listing file, e.g. either of: // - `src/webgpu/listing.ts` (which is dynamically computed, has a Promise<TestSuiteListing>) // - `out/webgpu/listing.js` (which is pre-baked, has a TestSuiteListing) interface ListingFile { listing: Promise<TestSuiteListing> | TestSuiteListing; } // A .spec.ts file, as imported. export interface SpecFile { readonly description: string; readonly g: IterableTestGroup; } // Base class for DefaultTestFileLoader and FakeTestFileLoader. export abstract class TestFileLoader { abstract listing(suite: string): Promise<TestSuiteListing>; protected abstract import(path: string): Promise<SpecFile>; importSpecFile(suite: string, path: string[]): Promise<SpecFile> { return this.import(`${suite}/${path.join('/')}.spec.js`); } async loadTree(query: TestQuery, subqueriesToExpand: string[] = []): Promise<TestTree> { return loadTreeForQuery( this, query, subqueriesToExpand.map(s => { const q = parseQuery(s); assert(q.level >= 2, () => `subqueriesToExpand entries should not be multi-file:\n ${q}`); return q; }) ); } async loadCases(query: TestQuery): Promise<IterableIterator<TestTreeLeaf>> { const tree = await this.loadTree(query); return tree.iterateLeaves(); } } export class DefaultTestFileLoader extends TestFileLoader { async listing(suite: string): Promise<TestSuiteListing> { return ((await import(`../../${suite}/listing.js`)) as ListingFile).listing; } import(path: string): Promise<SpecFile> { return import(`../../${path}`); } }
sarahM0/cts
src/stress/memory/oom.spec.ts
export const description = ` Stress tests covering robustness when available VRAM is exhausted. `; import { makeTestGroup } from '../../common/framework/test_group.js'; import { GPUTest } from '../../webgpu/gpu_test.js'; import { exhaustVramUntilUnder64MB } from '../../webgpu/util/memory.js'; export const g = makeTestGroup(GPUTest); g.test('vram_oom') .desc(`Tests that we can allocate buffers until we run out of VRAM.`) .fn(async t => { await exhaustVramUntilUnder64MB(t.device); }); g.test('get_mapped_range') .desc( `Tests getMappedRange on a mappedAtCreation GPUBuffer that failed allocation due to OOM. This should throw a RangeError, but below a certain threshold may just crash the page.` ) .unimplemented(); g.test('map_after_vram_oom') .desc( `Allocates tons of buffers and textures with varying mapping states (unmappable, mappable, mapAtCreation, mapAtCreation-then-unmapped) until OOM; then attempts to mapAsync all the mappable objects.` ) .unimplemented(); g.test('validation_vs_oom') .desc( `Tests that calls affected by both OOM and validation errors expose the validation error with precedence.` ) .unimplemented(); g.test('recovery') .desc( `Tests that after going VRAM-OOM, destroying allocated resources eventually allows new resources to be allocated.` ) .unimplemented();
sarahM0/cts
src/webgpu/api/validation/resource_usages/texture/in_render_misc.spec.ts
<reponame>sarahM0/cts export const description = ` TODO: - 2 views: upon the same subresource, or different subresources of the same texture - texture usages in copies and in render pass - consecutively set bind groups on the same index (@Richard-Yunchao: Maybe I can combine this one with the above tests. The two bind groups can either have the same index or different indices.) - unused bind groups `; import { makeTestGroup } from '../../../../../common/framework/test_group.js'; import { ValidationTest } from '../../validation_test.js'; export const g = makeTestGroup(ValidationTest);
sarahM0/cts
src/webgpu/api/validation/layout_shader_compat.spec.ts
export const description = ` TODO: - interface matching between pipeline layout and shader - x= {compute, vertex, fragment, vertex+fragment}, visibilities - x= bind group index values, binding index values, multiple bindings - x= types of bindings - x= {equal, superset, subset} `; import { makeTestGroup } from '../../../common/framework/test_group.js'; import { ValidationTest } from './validation_test.js'; export const g = makeTestGroup(ValidationTest);
sarahM0/cts
src/common/runtime/helper/test_worker-worker.ts
<gh_stars>10-100 import { setBaseResourcePath } from '../../framework/resources.js'; import { DefaultTestFileLoader } from '../../internal/file_loader.js'; import { Logger } from '../../internal/logging/logger.js'; import { parseQuery } from '../../internal/query/parseQuery.js'; import { TestQueryWithExpectation } from '../../internal/query/query.js'; import { assert } from '../../util/util.js'; // Should be DedicatedWorkerGlobalScope, but importing lib "webworker" conflicts with lib "dom". /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ declare const self: any; const loader = new DefaultTestFileLoader(); setBaseResourcePath('../../../resources'); self.onmessage = async (ev: MessageEvent) => { const query: string = ev.data.query; const expectations: TestQueryWithExpectation[] = ev.data.expectations; const debug: boolean = ev.data.debug; Logger.globalDebugMode = debug; const log = new Logger(); const testcases = Array.from(await loader.loadCases(parseQuery(query))); assert(testcases.length === 1, 'worker query resulted in != 1 cases'); const testcase = testcases[0]; const [rec, result] = log.record(testcase.query.toString()); await testcase.run(rec, expectations); self.postMessage({ query, result }); };
sarahM0/cts
src/webgpu/web_platform/canvas/context_creation.spec.ts
<gh_stars>10-100 export const description = ` Tests for canvas context creation. Note there are no context creation attributes for WebGPU (as of this writing). Options are configured in configure() instead. `; import { Fixture } from '../../../common/framework/fixture.js'; import { makeTestGroup } from '../../../common/framework/test_group.js'; export const g = makeTestGroup(Fixture); g.test('return_type') .desc( `Test the return type of getContext for WebGPU. TODO: Test OffscreenCanvas made from transferControlToOffscreen.` ) .params(u => u // .combine('offscreen', [false, true]) .beginSubcases() .combine('attributes', [undefined, {}]) ) .fn(async t => { let canvas: HTMLCanvasElement | OffscreenCanvas; if (t.params.offscreen) { if (typeof OffscreenCanvas === 'undefined') { // Skip if the current context doesn't have OffscreenCanvas (e.g. Node). t.skip('OffscreenCanvas is not available in this context'); } canvas = new OffscreenCanvas(10, 10); } else { if (typeof document === 'undefined') { // Skip if there is no document (Workers, Node) t.skip('DOM is not available to create canvas element'); } canvas = document.createElement('canvas', t.params.attributes); canvas.width = 10; canvas.height = 10; } const ctx = canvas.getContext('webgpu'); t.expect(ctx instanceof GPUCanvasContext); });
sarahM0/cts
src/webgpu/api/operation/rendering/indirect_draw.spec.ts
<gh_stars>0 export const description = ` Tests for the indirect-specific aspects of drawIndirect/drawIndexedIndirect. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; export const g = makeTestGroup(GPUTest); const filled = new Uint8Array([0, 255, 0, 255]); const notFilled = new Uint8Array([0, 0, 0, 0]); const kDrawIndirectParametersSize = 4 * Uint32Array.BYTES_PER_ELEMENT; g.test('basics,drawIndirect') .desc( `Test that the indirect draw parameters are tightly packed for drawIndirect. An indirectBuffer is created based on indirectOffset. The actual draw args being used indicated by the indirectOffset is going to draw a left bottom triangle. While the remaining indirectBuffer is populated with random numbers or draw args that draw right top triangle, both, or nothing which will fail the color check. The test will check render target to see if only the left bottom area is filled, meaning the expected draw args is uploaded correctly by the indirectBuffer and indirectOffset. Params: - indirectOffset= {0, 4, k * sizeof(args struct), k * sizeof(args struct) + 4} ` ) .paramsSubcasesOnly(u => u // .combine('indirectOffset', [ 0, Uint32Array.BYTES_PER_ELEMENT, 1 * kDrawIndirectParametersSize, 1 * kDrawIndirectParametersSize + Uint32Array.BYTES_PER_ELEMENT, 3 * kDrawIndirectParametersSize, 3 * kDrawIndirectParametersSize + Uint32Array.BYTES_PER_ELEMENT, 99 * kDrawIndirectParametersSize, 99 * kDrawIndirectParametersSize + Uint32Array.BYTES_PER_ELEMENT, ] as const) ) .fn(t => { const { indirectOffset } = t.params; const o = indirectOffset / Uint32Array.BYTES_PER_ELEMENT; const arraySize = o + 8; const indirectBuffer = [...Array(arraySize)].map(() => Math.floor(Math.random() * 100)); // draw args that will draw the left bottom triangle (expected call) indirectBuffer[o] = 3; // vertexCount indirectBuffer[o + 1] = 1; // instanceCount indirectBuffer[o + 2] = 0; // firstVertex indirectBuffer[o + 3] = 0; // firstInstance // draw args that will draw both triangles indirectBuffer[o + 4] = 6; // vertexCount indirectBuffer[o + 5] = 1; // instanceCount indirectBuffer[o + 6] = 0; // firstVertex indirectBuffer[o + 7] = 0; // firstInstance if (o >= 4) { // draw args that will draw the right top triangle indirectBuffer[o - 4] = 3; // vertexCount indirectBuffer[o - 3] = 1; // instanceCount indirectBuffer[o - 2] = 3; // firstVertex indirectBuffer[o - 1] = 0; // firstInstance } if (o >= 8) { // draw args that will draw nothing indirectBuffer[0] = 0; // vertexCount indirectBuffer[1] = 0; // instanceCount indirectBuffer[2] = 0; // firstVertex indirectBuffer[3] = 0; // firstInstance } const kRenderTargetFormat = 'rgba8unorm'; const pipeline = t.device.createRenderPipeline({ vertex: { module: t.device.createShaderModule({ code: `[[stage(vertex)]] fn main([[location(0)]] pos : vec2<f32>) -> [[builtin(position)]] vec4<f32> { return vec4<f32>(pos, 0.0, 1.0); }`, }), entryPoint: 'main', buffers: [ { attributes: [ { shaderLocation: 0, format: 'float32x2', offset: 0, }, ], arrayStride: 2 * Float32Array.BYTES_PER_ELEMENT, }, ], }, fragment: { module: t.device.createShaderModule({ code: `[[stage(fragment)]] fn main() -> [[location(0)]] vec4<f32> { return vec4<f32>(0.0, 1.0, 0.0, 1.0); }`, }), entryPoint: 'main', targets: [ { format: kRenderTargetFormat, }, ], }, }); const renderTarget = t.device.createTexture({ size: [4, 4], usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC, format: kRenderTargetFormat, }); const commandEncoder = t.device.createCommandEncoder(); const renderPass = commandEncoder.beginRenderPass({ colorAttachments: [ { view: renderTarget.createView(), loadValue: [0, 0, 0, 0], storeOp: 'store', }, ], }); renderPass.setPipeline(pipeline); renderPass.setVertexBuffer( 0, t.makeBufferWithContents( /* prettier-ignore */ new Float32Array([ // The bottom left triangle -1.0, 1.0, 1.0, -1.0, -1.0, -1.0, // The top right triangle -1.0, 1.0, 1.0, -1.0, 1.0, 1.0, ]), GPUBufferUsage.VERTEX ), 0 ); renderPass.drawIndirect( t.makeBufferWithContents(new Uint32Array(indirectBuffer), GPUBufferUsage.INDIRECT), indirectOffset ); renderPass.endPass(); t.queue.submit([commandEncoder.finish()]); // The bottom left area is filled t.expectSinglePixelIn2DTexture( renderTarget, kRenderTargetFormat, { x: 0, y: 1 }, { exp: filled } ); // The top right area is not filled t.expectSinglePixelIn2DTexture( renderTarget, kRenderTargetFormat, { x: 1, y: 0 }, { exp: notFilled } ); }); g.test('basics,drawIndexedIndirect') .desc( `Test that the indirect draw parameters are tightly packed for drawIndexedIndirect. ` ) .unimplemented();
sarahM0/cts
src/webgpu/api/validation/encoding/cmds/render_pass.spec.ts
<reponame>sarahM0/cts export const description = ` Validation tests for render pass encoding. Does **not** test usage scopes (resource_usages/), GPUProgrammablePassEncoder (programmable_pass), dynamic state (dynamic_render_state.spec.ts), or GPURenderEncoderBase (render.spec.ts). TODO: - executeBundles: - with {zero, one, multiple} bundles where {zero, one} of them are invalid objects `; import { makeTestGroup } from '../../../../../common/framework/test_group.js'; import { ValidationTest } from '../../validation_test.js'; export const g = makeTestGroup(ValidationTest);
sarahM0/cts
src/demo/file_depth_2/in_single_child_dir/r.spec.ts
<reponame>sarahM0/cts export const description = 'Description for r.spec.ts'; import { makeTestGroup } from '../../../common/framework/test_group.js'; import { UnitTest } from '../../../unittests/unit_test.js'; export const g = makeTestGroup(UnitTest);
sarahM0/cts
src/common/util/navigator_gpu.ts
/// <reference types="@webgpu/types" /> import { assert } from './util.js'; /** * Finds and returns the `navigator.gpu` object (or equivalent, for non-browser implementations). * Throws an exception if not found. */ function defaultGPUProvider(): GPU { assert( typeof navigator !== 'undefined' && navigator.gpu !== undefined, 'No WebGPU implementation found' ); return navigator.gpu; } /** * GPUProvider is a function that creates and returns a new GPU instance. * May throw an exception if a GPU cannot be created. */ export type GPUProvider = () => GPU; let gpuProvider: GPUProvider = defaultGPUProvider; /** * Sets the function to create and return a new GPU instance. */ export function setGPUProvider(provider: GPUProvider) { assert(impl === undefined, 'setGPUProvider() should not be after getGPU()'); gpuProvider = provider; } let impl: GPU | undefined = undefined; /** * Finds and returns the `navigator.gpu` object (or equivalent, for non-browser implementations). * Throws an exception if not found. */ export function getGPU(): GPU { if (impl) { return impl; } impl = gpuProvider(); return impl; }
sarahM0/cts
src/common/util/data_tables.ts
import { ResolveType, ZipKeysWithValues } from './types.js'; export type valueof<K> = K[keyof K]; export function keysOf<T extends string>(obj: { [k in T]: unknown }): readonly T[] { return (Object.keys(obj) as unknown[]) as T[]; } export function numericKeysOf<T>(obj: object): readonly T[] { return (Object.keys(obj).map(n => Number(n)) as unknown[]) as T[]; } /** * Creates an info lookup object from a more nicely-formatted table. See below for examples. * * Note: Using `as const` on the arguments to this function is necessary to infer the correct type. */ export function makeTable< Members extends readonly string[], Defaults extends readonly unknown[], Table extends { readonly [k: string]: readonly unknown[] } >( members: Members, defaults: Defaults, table: Table ): { readonly [k in keyof Table]: ResolveType<ZipKeysWithValues<Members, Table[k], Defaults>>; } { const result: { [k: string]: { [m: string]: unknown } } = {}; for (const [k, v] of Object.entries<readonly unknown[]>(table)) { const item: { [m: string]: unknown } = {}; for (let i = 0; i < members.length; ++i) { item[members[i]] = v[i] ?? defaults[i]; } result[k] = item; } /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ return result as any; }
sarahM0/cts
src/stress/adapter/device_allocation.ts
<gh_stars>10-100 export const description = ` Stress tests for GPUAdapter.requestDevice. `; import { Fixture } from '../../common/framework/fixture.js'; import { makeTestGroup } from '../../common/framework/test_group.js'; export const g = makeTestGroup(Fixture); g.test('coexisting') .desc( `Tests allocation of many coexisting GPUDevice objects. TODO: These stress tests might not make sense. Allocating lots of GPUDevices is currently crashy in Chrome, and there's not a great reason for applications to do it. UAs should probably limit the number of simultaneously active devices, but this is effectively blocked on implementing GPUDevice.destroy() to give applications the necessary controls.` ) .unimplemented(); g.test('continuous,with_destroy') .desc( `Tests allocation and destruction of many GPUDevice objects over time. Objects are sequentially requested and destroyed over a very large number of iterations.` ) .unimplemented(); g.test('continuous,no_destroy') .desc( `Tests allocation and implicit GC of many GPUDevice objects over time. Objects are sequentially requested and dropped for GC over a very large number of iterations.` ) .unimplemented();
sarahM0/cts
src/stress/texture/large.spec.ts
<filename>src/stress/texture/large.spec.ts export const description = ` Stress tests covering usage of very large textures. `; import { makeTestGroup } from '../../common/framework/test_group.js'; import { GPUTest } from '../../webgpu/gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('loading,2d') .desc( `Tests execution of shaders loading values from very large (up to at least 8192x8192) 2D textures. The texture size is selected according to the limit supported by the GPUDevice.` ) .unimplemented(); g.test('loading,2d_array') .desc( `Tests execution of shaders loading values from very large (up to at least 8192x8192x2048) arrays of 2D textures. The texture and array size is selected according to limits supported by the GPUDevice.` ) .unimplemented(); g.test('loading,3d') .desc( `Tests execution of shaders loading values from very large (up to at least 2048x2048x2048) textures. The texture size is selected according to the limit supported by the GPUDevice.` ) .unimplemented(); g.test('sampling,2d') .desc( `Tests execution of shaders sampling values from very large (up to at least 8192x8192) 2D textures. The texture size is selected according to the limit supported by the GPUDevice.` ) .unimplemented(); g.test('sampling,2d_array') .desc( `Tests execution of shaders sampling values from very large (up to at least 8192x8192x2048) arrays of 2D textures. The texture and array size is selected according to limits supported by the GPUDevice.` ) .unimplemented(); g.test('sampling,3d') .desc( `Tests execution of shaders sampling values from very large (up to at least 2048x2048x2048) textures. The texture size is selected according to the limit supported by the GPUDevice.` ) .unimplemented();
sarahM0/cts
src/common/tools/gen_listings.ts
import * as fs from 'fs'; import * as path from 'path'; import * as process from 'process'; import { crawl } from './crawl.js'; function usage(rc: number): void { console.error(`Usage: tools/gen_listings [options] [OUT_DIR] [SUITE_DIRS...] For each suite in SUITE_DIRS, generate listings and write each listing.js into OUT_DIR/{suite}/listing.js. Example: tools/gen_listings out/ src/unittests/ src/webgpu/ Options: --help Print this message and exit. --no-validate Whether to validate test modules while crawling. `); process.exit(rc); } const argv = process.argv; if (argv.indexOf('--help') !== -1) { usage(0); } let validate = true; { const i = argv.indexOf('--no-validate'); if (i !== -1) { validate = false; argv.splice(i, 1); } } if (argv.length < 4) { usage(0); } const myself = 'src/common/tools/gen_listings.ts'; const outDir = argv[2]; (async () => { for (const suiteDir of argv.slice(3)) { const listing = await crawl(suiteDir, validate); const suite = path.basename(suiteDir); const outFile = path.normalize(path.join(outDir, `${suite}/listing.js`)); fs.mkdirSync(path.join(outDir, suite), { recursive: true }); fs.writeFileSync( outFile, `\ // AUTO-GENERATED - DO NOT EDIT. See ${myself}. export const listing = ${JSON.stringify(listing, undefined, 2)}; ` ); try { fs.unlinkSync(outFile + '.map'); } catch (ex) { // ignore if file didn't exist } } })();
sarahM0/cts
src/common/internal/query/json_param_value.ts
<filename>src/common/internal/query/json_param_value.ts<gh_stars>0 import { assert, sortObjectByKey } from '../../util/util.js'; import { JSONWithUndefined } from '../params_utils.js'; // JSON can't represent `undefined` and by default stores it as `null`. // Instead, store `undefined` as this magic string value in JSON. const jsUndefinedMagicValue = '_undef_'; function stringifyFilter(k: string, v: unknown): unknown { // Make sure no one actually uses the magic value as a parameter. assert(v !== jsUndefinedMagicValue); return v === undefined ? jsUndefinedMagicValue : v; } export function stringifyParamValue(value: JSONWithUndefined): string { return JSON.stringify(value, stringifyFilter); } /** * Like stringifyParamValue but sorts dictionaries by key, for hashing. */ export function stringifyParamValueUniquely(value: JSONWithUndefined): string { return JSON.stringify(value, (k, v) => { if (typeof v === 'object' && v !== null) { return sortObjectByKey(v); } return stringifyFilter(k, v); }); } export function parseParamValue(s: string): JSONWithUndefined { return JSON.parse(s, (k, v) => (v === jsUndefinedMagicValue ? undefined : v)); }
sarahM0/cts
src/webgpu/api/validation/capability_checks/features/depth_clamping.spec.ts
<gh_stars>10-100 export const description = ` Tests 'depth-clamping' must be enabled for GPUDepthStencilState.clampDepth to be enabled. `; import { makeTestGroup } from '../../../../../common/framework/test_group.js'; import { ValidationTest } from '../../validation_test.js'; export const g = makeTestGroup(ValidationTest);
sarahM0/cts
src/unittests/check_contents.spec.ts
<gh_stars>10-100 export const description = `Unit tests for check_contents`; import { Fixture } from '../common/framework/fixture.js'; import { makeTestGroup } from '../common/internal/test_group.js'; import { ErrorWithExtra } from '../common/util/util.js'; import { checkElementsEqual } from '../webgpu/util/check_contents.js'; class F extends Fixture { test(substr: undefined | string, result: undefined | ErrorWithExtra) { if (substr === undefined) { this.expect(result === undefined, result?.message); } else { this.expect(result !== undefined && result.message.indexOf(substr) !== -1, result?.message); } } } export const g = makeTestGroup(F); g.test('checkElementsEqual').fn(t => { t.shouldThrow('Error', () => checkElementsEqual(new Uint8Array(), new Uint16Array())); t.shouldThrow('Error', () => checkElementsEqual(new Uint32Array(), new Float32Array())); t.shouldThrow('Error', () => checkElementsEqual(new Uint8Array([]), new Uint8Array([0]))); t.shouldThrow('Error', () => checkElementsEqual(new Uint8Array([0]), new Uint8Array([]))); { t.test(undefined, checkElementsEqual(new Uint8Array([]), new Uint8Array([]))); t.test(undefined, checkElementsEqual(new Uint8Array([0]), new Uint8Array([0]))); t.test(undefined, checkElementsEqual(new Uint8Array([1]), new Uint8Array([1]))); t.test( ` Starting at index 0: actual == 0x: 00 failed -> xx expected == 01`, checkElementsEqual(new Uint8Array([0]), new Uint8Array([1])) ); t.test( 'expected == 01 02 01', checkElementsEqual(new Uint8Array([1, 1, 1]), new Uint8Array([1, 2, 1])) ); } { const actual = new Uint8Array(280); const exp = new Uint8Array(280); for (let i = 2; i < 20; ++i) actual[i] = i - 4; t.test( '00 fe ff 00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 00', checkElementsEqual(actual, exp) ); for (let i = 2; i < 280; ++i) actual[i] = i - 4; t.test('Starting at index 1:', checkElementsEqual(actual, exp)); for (let i = 0; i < 2; ++i) actual[i] = i - 4; t.test('Starting at index 0:', checkElementsEqual(actual, exp)); } { const actual = new Int32Array(30); const exp = new Int32Array(30); for (let i = 2; i < 7; ++i) actual[i] = i - 3; t.test('00000002 00000003 00000000\n', checkElementsEqual(actual, exp)); for (let i = 2; i < 30; ++i) actual[i] = i - 3; t.test('00000000 00000000 ...', checkElementsEqual(actual, exp)); } { const actual = new Float64Array(30); const exp = new Float64Array(30); for (let i = 2; i < 7; ++i) actual[i] = (i - 4) * 1e100; t.test('2.000e+100 0.000\n', checkElementsEqual(actual, exp)); for (let i = 2; i < 280; ++i) actual[i] = (i - 4) * 1e100; t.test('6.000e+100 7.000e+100 ...', checkElementsEqual(actual, exp)); } });
sarahM0/cts
src/webgpu/shader/execution/builtin/min.spec.ts
<gh_stars>0 export const description = ` Execution Tests for the 'min' builtin function `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; import { i32, i32Bits, TypeI32, TypeU32, u32 } from '../../../util/conversion.js'; import { run } from './builtin.js'; export const g = makeTestGroup(GPUTest); g.test('integer_builtin_functions,unsigned_min') .uniqueId('29aba7ede5b93cdd') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` unsigned min: T is u32 or vecN<u32> min(e1: T ,e2: T) -> T Returns e1 if e1 is less than e2, and e2 otherwise. Component-wise when T is a vector. (GLSLstd450UMin) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u .combine('storageClass', ['uniform', 'storage_r', 'storage_rw'] as const) .combine('vectorize', [undefined, 2, 3, 4] as const) ) .fn(async t => { run(t, 'min', [TypeU32, TypeU32], TypeU32, t.params, [ { input: [u32(1), u32(1)], expected: u32(1) }, { input: [u32(0), u32(0)], expected: u32(0) }, { input: [u32(0xffffffff), u32(0xffffffff)], expected: u32(0xffffffff) }, { input: [u32(1), u32(2)], expected: u32(1) }, { input: [u32(2), u32(1)], expected: u32(1) }, { input: [u32(0x70000000), u32(0x80000000)], expected: u32(0x70000000) }, { input: [u32(0x80000000), u32(0x70000000)], expected: u32(0x70000000) }, { input: [u32(0), u32(0xffffffff)], expected: u32(0) }, { input: [u32(0xffffffff), u32(0)], expected: u32(0) }, { input: [u32(0), u32(0xffffffff)], expected: u32(0) }, ]); }); g.test('integer_builtin_functions,signed_min') .uniqueId('60c8ecdf409b45fc') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` signed min: T is i32 or vecN<i32> min(e1: T ,e2: T) -> T Returns e1 if e1 is less than e2, and e2 otherwise. Component-wise when T is a vector. (GLSLstd45SUMin) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u .combine('storageClass', ['uniform', 'storage_r', 'storage_rw'] as const) .combine('vectorize', [undefined, 2, 3, 4] as const) ) .fn(async t => { run(t, 'min', [TypeI32, TypeI32], TypeI32, t.params, [ { input: [i32(1), i32(1)], expected: i32(1) }, { input: [i32(0), i32(0)], expected: i32(0) }, { input: [i32(-1), i32(-1)], expected: i32(-1) }, { input: [i32(1), i32(2)], expected: i32(1) }, { input: [i32(2), i32(1)], expected: i32(1) }, { input: [i32(-1), i32(-2)], expected: i32(-2) }, { input: [i32(-2), i32(-1)], expected: i32(-2) }, { input: [i32(1), i32(-1)], expected: i32(-1) }, { input: [i32(-1), i32(1)], expected: i32(-1) }, { input: [i32Bits(0x70000000), i32Bits(0x80000000)], expected: i32Bits(0x80000000) }, { input: [i32Bits(0x80000000), i32Bits(0x70000000)], expected: i32Bits(0x80000000) }, { input: [i32Bits(0xffffffff), i32(0)], expected: i32Bits(0xffffffff) }, { input: [i32(0), i32Bits(0xffffffff)], expected: i32Bits(0xffffffff) }, ]); });
sarahM0/cts
src/webgpu/api/operation/command_buffer/render/dynamic_state.spec.ts
export const description = ` Tests of the behavior of the viewport/scissor/blend/reference states. TODO: - {viewport, scissor rect, blend color, stencil reference}: Test rendering result with {various values}. - Set the state in different ways to make sure it gets the correct value in the end: { - state unset (= default) - state explicitly set once to {default value, another value} - persistence: [set, draw, draw] (fn should differentiate from [set, draw] + [draw]) - overwriting: [set(1), draw, set(2), draw] (fn should differentiate from [set(1), set(2), draw, draw]) - overwriting: [set(1), set(2), draw] (fn should differentiate from [set(1), draw] but not [set(2), draw]) - } `; import { makeTestGroup } from '../../../../../common/framework/test_group.js'; import { GPUTest } from '../../../../gpu_test.js'; export const g = makeTestGroup(GPUTest);
sarahM0/cts
src/webgpu/util/command_buffer_maker.ts
import { ResourceState, GPUTest } from '../gpu_test.js'; export const kRenderEncodeTypes = ['render pass', 'render bundle'] as const; export type RenderEncodeType = typeof kRenderEncodeTypes[number]; export const kProgrammableEncoderTypes = ['compute pass', ...kRenderEncodeTypes] as const; export type ProgrammableEncoderType = typeof kProgrammableEncoderTypes[number]; export const kEncoderTypes = ['non-pass', ...kProgrammableEncoderTypes] as const; export type EncoderType = typeof kEncoderTypes[number]; // Look up the type of the encoder based on `T`. If `T` is a union, this will be too! type EncoderByEncoderType<T extends EncoderType> = { 'non-pass': GPUCommandEncoder; 'compute pass': GPUComputePassEncoder; 'render pass': GPURenderPassEncoder; 'render bundle': GPURenderBundleEncoder; }[T]; /** See {@link webgpu/api/validation/validation_test.ValidationTest.createEncoder | * GPUTest.createEncoder()}. */ export class CommandBufferMaker<T extends EncoderType> { /** `GPU___Encoder` for recording commands into. */ // Look up the type of the encoder based on `T`. If `T` is a union, this will be too! readonly encoder: EncoderByEncoderType<T>; /** * Finish any passes, finish and record any bundles, and finish/return the command buffer. * Checks for validation errors in (only) the appropriate finish call. */ readonly validateFinish: (shouldSucceed: boolean) => GPUCommandBuffer; /** * Finish the command buffer and submit it. Checks for validation errors in either the submit or * the appropriate finish call, depending on the state of a resource used in the encoding. */ readonly validateFinishAndSubmit: ( shouldBeValid: boolean, submitShouldSucceedIfValid: boolean ) => void; /** * `validateFinishAndSubmit()` based on the state of a resource in the command encoder. * - `finish()` should fail if the resource is 'invalid'. * - Only `submit()` should fail if the resource is 'destroyed'. */ readonly validateFinishAndSubmitGivenState: (resourceState: ResourceState) => void; constructor( t: GPUTest, encoder: EncoderByEncoderType<EncoderType>, finish: (shouldSucceed: boolean) => GPUCommandBuffer ) { // TypeScript introduces an intersection type here where we don't want one. this.encoder = encoder as EncoderByEncoderType<T>; this.validateFinish = finish; // Define extra methods like this, otherwise they get unbound when destructured, e.g.: // const { encoder, validateFinishAndSubmit } = t.createEncoder(type); this.validateFinishAndSubmit = ( shouldBeValid: boolean, submitShouldSucceedIfValid: boolean ) => { const commandBuffer = finish(shouldBeValid); if (shouldBeValid) { t.expectValidationError(() => t.queue.submit([commandBuffer]), !submitShouldSucceedIfValid); } }; this.validateFinishAndSubmitGivenState = (resourceState: ResourceState) => { this.validateFinishAndSubmit(resourceState !== 'invalid', resourceState !== 'destroyed'); }; } }
sarahM0/cts
src/webgpu/api/operation/labels.spec.ts
export const description = ` For every create function, the descriptor.label is carried over to the object.label. TODO: implement `; import { makeTestGroup } from '../../../common/framework/test_group.js'; import { GPUTest } from '../../gpu_test.js'; export const g = makeTestGroup(GPUTest);
sarahM0/cts
src/stress/device/render_pipeline_allocation.spec.ts
export const description = ` Stress tests for allocation of GPURenderPipeline objects through GPUDevice. `; import { makeTestGroup } from '../../common/framework/test_group.js'; import { GPUTest } from '../../webgpu/gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('coexisting') .desc(`Tests allocation of many coexisting GPURenderPipeline objects.`) .unimplemented(); g.test('continuous') .desc( `Tests allocation and implicit GC of many GPURenderPipeline objects over time. Objects are sequentially created and dropped for GC over a very large number of iterations.` ) .unimplemented();
sarahM0/cts
src/webgpu/api/operation/buffers/threading.spec.ts
<reponame>sarahM0/cts export const description = ` TODO: - Copy GPUBuffer to another thread while {pending, mapped mappedAtCreation} on {same,diff} thread - Destroy on one thread while {pending, mapped, mappedAtCreation, mappedAtCreation+unmap+mapped} on another thread. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; export const g = makeTestGroup(GPUTest);
sarahM0/cts
src/webgpu/api/validation/queue/submit.spec.ts
<gh_stars>10-100 export const description = ` Tests submit validation. Note: destroyed buffer/texture/querySet are tested in destroyed/. (unless it gets moved here) Note: buffer map state is tested in ./buffer_mapped.spec.ts. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { ValidationTest } from '../validation_test.js'; export const g = makeTestGroup(ValidationTest); g.test('command_buffer,device_mismatch') .desc( ` Tests submit cannot be called with command buffers created from another device Test with two command buffers to make sure all command buffers can be validated: - cb0 and cb1 from same device - cb0 and cb1 from different device ` ) .paramsSubcasesOnly([ { cb0Mismatched: false, cb1Mismatched: false }, // control case { cb0Mismatched: true, cb1Mismatched: false }, { cb0Mismatched: false, cb1Mismatched: true }, ]) .unimplemented();
sarahM0/cts
src/stress/device/texture_allocation.spec.ts
<filename>src/stress/device/texture_allocation.spec.ts<gh_stars>10-100 export const description = ` Stress tests for allocation of GPUTexture objects through GPUDevice. `; import { makeTestGroup } from '../../common/framework/test_group.js'; import { GPUTest } from '../../webgpu/gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('coexisting') .desc(`Tests allocation of many coexisting GPUTexture objects.`) .unimplemented(); g.test('continuous,with_destroy') .desc( `Tests allocation and destruction of many GPUTexture objects over time. Objects are sequentially created and destroyed over a very large number of iterations.` ) .unimplemented(); g.test('continuous,no_destroy') .desc( `Tests allocation and implicit GC of many GPUTexture objects over time. Objects are sequentially created and dropped for GC over a very large number of iterations.` ) .unimplemented();
sarahM0/cts
src/webgpu/shader/types.ts
<reponame>sarahM0/cts import { keysOf } from '../../common/util/data_tables.js'; import { assert } from '../../common/util/util.js'; import { align } from '../util/math.js'; const kArrayLength = 3; export type ContainerType = 'scalar' | 'vector' | 'matrix' | 'atomic' | 'array'; export type ScalarType = 'i32' | 'u32' | 'f32' | 'bool'; export const HostSharableTypes = ['i32', 'u32', 'f32'] as const; /** Info for each plain scalar type. */ export const kScalarTypeInfo = /* prettier-ignore */ { 'i32': { layout: { alignment: 4, size: 4 }, supportsAtomics: true, arrayLength: 1, innerLength: 0 }, 'u32': { layout: { alignment: 4, size: 4 }, supportsAtomics: true, arrayLength: 1, innerLength: 0 }, 'f32': { layout: { alignment: 4, size: 4 }, supportsAtomics: false, arrayLength: 1, innerLength: 0 }, 'bool': { layout: undefined, supportsAtomics: false, arrayLength: 1, innerLength: 0 }, } as const; /** List of all plain scalar types. */ export const kScalarTypes = keysOf(kScalarTypeInfo); /** Info for each vecN<> container type. */ export const kVectorContainerTypeInfo = /* prettier-ignore */ { 'vec2': { layout: { alignment: 8, size: 8 }, arrayLength: 2 , innerLength: 0 }, 'vec3': { layout: { alignment: 16, size: 12 }, arrayLength: 3 , innerLength: 0 }, 'vec4': { layout: { alignment: 16, size: 16 }, arrayLength: 4 , innerLength: 0 }, } as const; /** List of all vecN<> container types. */ export const kVectorContainerTypes = keysOf(kVectorContainerTypeInfo); /** Info for each matNxN<> container type. */ export const kMatrixContainerTypeInfo = /* prettier-ignore */ { 'mat2x2': { layout: { alignment: 8, size: 16 }, arrayLength: 2, innerLength: 2 }, 'mat3x2': { layout: { alignment: 8, size: 24 }, arrayLength: 3, innerLength: 2 }, 'mat4x2': { layout: { alignment: 8, size: 32 }, arrayLength: 4, innerLength: 2 }, 'mat2x3': { layout: { alignment: 16, size: 32 }, arrayLength: 2, innerLength: 3 }, 'mat3x3': { layout: { alignment: 16, size: 48 }, arrayLength: 3, innerLength: 3 }, 'mat4x3': { layout: { alignment: 16, size: 64 }, arrayLength: 4, innerLength: 3 }, 'mat2x4': { layout: { alignment: 16, size: 32 }, arrayLength: 2, innerLength: 4 }, 'mat3x4': { layout: { alignment: 16, size: 48 }, arrayLength: 3, innerLength: 4 }, 'mat4x4': { layout: { alignment: 16, size: 64 }, arrayLength: 4, innerLength: 4 }, } as const; /** List of all matNxN<> container types. */ export const kMatrixContainerTypes = keysOf(kMatrixContainerTypeInfo); export type StorageClass = 'storage' | 'uniform' | 'private' | 'function' | 'workgroup'; /** * Generate a bunch types (vec, mat, sized/unsized array) for testing. */ export function* generateTypes({ storageClass, baseType, containerType, isAtomic = false, }: { storageClass: StorageClass; /** Base scalar type (i32/u32/f32/bool). */ baseType: ScalarType; /** Container type (scalar/vector/matrix/array) */ containerType: ContainerType; /** Whether to wrap the baseType in `atomic<>`. */ isAtomic?: boolean; }) { const scalarInfo = kScalarTypeInfo[baseType]; if (isAtomic) { assert(scalarInfo.supportsAtomics, 'type does not support atomics'); } const scalarType = isAtomic ? `atomic<${baseType}>` : baseType; // Storage and uniform require host-sharable types. if (storageClass === 'storage' || storageClass === 'uniform') { assert(isHostSharable(baseType), 'type ' + baseType.toString() + ' is not host sharable'); } // Scalar types if (containerType === 'scalar') { yield { type: `${scalarType}`, _kTypeInfo: { elementBaseType: `${scalarType}`, ...scalarInfo, }, }; } // Vector types if (containerType === 'vector') { for (const vectorType of kVectorContainerTypes) { yield { type: `${vectorType}<${scalarType}>`, _kTypeInfo: { elementBaseType: baseType, ...kVectorContainerTypeInfo[vectorType] }, }; } } if (containerType === 'matrix') { // Matrices can only be f32. if (baseType === 'f32') { for (const matrixType of kMatrixContainerTypes) { const matrixInfo = kMatrixContainerTypeInfo[matrixType]; yield { type: `${matrixType}<${scalarType}>`, _kTypeInfo: { elementBaseType: `vec${matrixInfo.innerLength}<${scalarType}>`, ...matrixInfo, }, }; } } } // Array types if (containerType === 'array') { const arrayTypeInfo = { elementBaseType: `${baseType}`, arrayLength: kArrayLength, layout: scalarInfo.layout ? { alignment: scalarInfo.layout.alignment, size: storageClass === 'uniform' ? // Uniform storage class must have array elements aligned to 16. kArrayLength * arrayStride({ ...scalarInfo.layout, alignment: 16, }) : kArrayLength * arrayStride(scalarInfo.layout), } : undefined, }; // Sized if (storageClass === 'uniform') { yield { type: `[[stride(16)]] array<${scalarType},${kArrayLength}>`, _kTypeInfo: arrayTypeInfo, }; } else { yield { type: `array<${scalarType},${kArrayLength}>`, _kTypeInfo: arrayTypeInfo }; } // Unsized if (storageClass === 'storage') { yield { type: `array<${scalarType}>`, _kTypeInfo: arrayTypeInfo }; } } function arrayStride(elementLayout: { size: number; alignment: number }) { return align(elementLayout.size, elementLayout.alignment); } function isHostSharable(baseType: ScalarType) { for (const sharableType of HostSharableTypes) { if (sharableType === baseType) return true; } return false; } } /** Atomic access requires scalar/array container type and storage/workgroup memory. */ export function supportsAtomics(p: { storageClass: string; storageMode: string | undefined; access: string; containerType: ContainerType; }) { return ( ((p.storageClass === 'storage' && p.storageMode === 'read_write') || p.storageClass === 'workgroup') && (p.containerType === 'scalar' || p.containerType === 'array') ); } /** Generates an iterator of supported base types (i32/u32/f32/bool) */ export function* supportedScalarTypes(p: { isAtomic: boolean; storageClass: string }) { for (const scalarType of kScalarTypes) { const info = kScalarTypeInfo[scalarType]; // Test atomics only on supported scalar types. if (p.isAtomic && !info.supportsAtomics) continue; // Storage and uniform require host-sharable types. const isHostShared = p.storageClass === 'storage' || p.storageClass === 'uniform'; if (isHostShared && info.layout === undefined) continue; yield scalarType; } }
sarahM0/cts
src/common/framework/test_group.ts
export { makeTestGroup } from '../internal/test_group.js';
sarahM0/cts
src/webgpu/api/validation/query_set/destroy.spec.ts
export const description = ` Destroying a query set more than once is allowed. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { ValidationTest } from '../validation_test.js'; export const g = makeTestGroup(ValidationTest); g.test('twice').fn(async t => { const qset = t.device.createQuerySet({ type: 'occlusion', count: 1 }); qset.destroy(); qset.destroy(); });
sarahM0/cts
src/webgpu/api/validation/encoding/queries/pipeline_statistics.spec.ts
export const description = ` Validation for encoding pipeline statistics queries. Excludes query begin/end balance and nesting (begin_end.spec.ts) and querySet/queryIndex (general.spec.ts). TODO: - Test pipelineStatistics with {undefined, empty, duplicated, full (control case)} values `; import { makeTestGroup } from '../../../../../common/framework/test_group.js'; import { ValidationTest } from '../../validation_test.js'; export const g = makeTestGroup(ValidationTest);
sarahM0/cts
src/webgpu/api/validation/encoding/queries/common.ts
import { GPUTest } from '../../../../gpu_test.js'; export function createQuerySetWithType( t: GPUTest, type: GPUQueryType, count: GPUSize32 ): GPUQuerySet { return t.device.createQuerySet({ type, count, pipelineStatistics: type === 'pipeline-statistics' ? (['clipper-invocations'] as const) : ([] as const), }); } export function beginRenderPassWithQuerySet( t: GPUTest, encoder: GPUCommandEncoder, querySet?: GPUQuerySet ): GPURenderPassEncoder { const view = t.device .createTexture({ format: 'rgba8unorm' as const, size: { width: 16, height: 16, depthOrArrayLayers: 1 }, usage: GPUTextureUsage.RENDER_ATTACHMENT, }) .createView(); return encoder.beginRenderPass({ colorAttachments: [ { view, loadValue: { r: 1.0, g: 0.0, b: 0.0, a: 1.0 }, storeOp: 'store', }, ], occlusionQuerySet: querySet, }); }
sarahM0/cts
src/common/framework/params_builder.ts
<filename>src/common/framework/params_builder.ts import { Merged, mergeParams } from '../internal/params_utils.js'; // ================================================================ // "Public" ParamsBuilder API / Documentation // ================================================================ /** * Provides doc comments for the methods of CaseParamsBuilder and SubcaseParamsBuilder. * (Also enforces rough interface match between them.) */ export interface ParamsBuilder { /** * Expands each item in `this` into zero or more items. * Each item has its parameters expanded with those returned by the `expander`. * * **Note:** When only a single key is being added, use the simpler `expand` for readability. * * ```text * this = [ a , b , c ] * this.map(expander) = [ f(a) f(b) f(c) ] * = [[a1, a2, a3] , [ b1 ] , [] ] * merge and flatten = [ merge(a, a1), merge(a, a2), merge(a, a3), merge(b, b1) ] * ``` */ /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ expandWithParams(expander: (_: any) => any): any; /** * Expands each item in `this` into zero or more items. Each item has its parameters expanded * with one new key, `key`, and the values returned by `expander`. */ /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ expand(key: string, expander: (_: any) => any): any; /** * Expands each item in `this` to multiple items, one for each item in `newParams`. * * In other words, takes the cartesian product of [ the items in `this` ] and `newParams`. * * **Note:** When only a single key is being added, use the simpler `combine` for readability. * * ```text * this = [ {a:1}, {b:2} ] * newParams = [ {x:1}, {y:2} ] * this.combineP(newParams) = [ {a:1,x:1}, {a:1,y:2}, {b:2,x:1}, {b:2,y:2} ] * ``` */ /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ combineWithParams(newParams: Iterable<any>): any; /** * Expands each item in `this` to multiple items with `{ [name]: value }` for each value. * * In other words, takes the cartesian product of [ the items in `this` ] * and `[ {[name]: value} for each value in values ]` */ /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ combine(key: string, newParams: Iterable<any>): any; /** * Filters `this` to only items for which `pred` returns true. */ /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ filter(pred: (_: any) => boolean): any; /** * Filters `this` to only items for which `pred` returns false. */ /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ unless(pred: (_: any) => boolean): any; } /** * Determines the resulting parameter object type which would be generated by an object of * the given ParamsBuilder type. */ export type ParamTypeOf< /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ T extends ParamsBuilder > = T extends SubcaseParamsBuilder<infer CaseP, infer SubcaseP> ? Merged<CaseP, SubcaseP> : T extends CaseParamsBuilder<infer CaseP> ? CaseP : never; // ================================================================ // Implementation // ================================================================ /** * Iterable over pairs of either: * - `[case params, Iterable<subcase params>]` if there are subcases. * - `[case params, undefined]` if not. */ export type CaseSubcaseIterable<CaseP, SubcaseP> = Iterable< readonly [CaseP, Iterable<SubcaseP> | undefined] >; /** * Base class for `CaseParamsBuilder` and `SubcaseParamsBuilder`. */ export abstract class ParamsBuilderBase<CaseP extends {}, SubcaseP extends {}> { protected readonly cases: () => Generator<CaseP>; constructor(cases: () => Generator<CaseP>) { this.cases = cases; } /** * Hidden from test files. Use `builderIterateCasesWithSubcases` to access this. */ protected abstract iterateCasesWithSubcases(): CaseSubcaseIterable<CaseP, SubcaseP>; } /** * Calls the (normally hidden) `iterateCasesWithSubcases()` method. */ export function builderIterateCasesWithSubcases(builder: ParamsBuilderBase<{}, {}>) { interface IterableParamsBuilder { iterateCasesWithSubcases(): CaseSubcaseIterable<{}, {}>; } return ((builder as unknown) as IterableParamsBuilder).iterateCasesWithSubcases(); } /** * Builder for combinatorial test **case** parameters. * * CaseParamsBuilder is immutable. Each method call returns a new, immutable object, * modifying the list of cases according to the method called. * * This means, for example, that the `unit` passed into `TestBuilder.params()` can be reused. */ export class CaseParamsBuilder<CaseP extends {}> extends ParamsBuilderBase<CaseP, {}> implements Iterable<CaseP>, ParamsBuilder { *iterateCasesWithSubcases(): CaseSubcaseIterable<CaseP, {}> { for (const a of this.cases()) { yield [a, undefined]; } } [Symbol.iterator](): Iterator<CaseP> { return this.cases(); } /** @inheritdoc */ expandWithParams<NewP extends {}>( expander: (_: Merged<{}, CaseP>) => Iterable<NewP> ): CaseParamsBuilder<Merged<CaseP, NewP>> { const newGenerator = expanderGenerator(this.cases, expander); return new CaseParamsBuilder(() => newGenerator({})); } /** @inheritdoc */ expand<NewPKey extends string, NewPValue>( key: NewPKey, expander: (_: Merged<{}, CaseP>) => Iterable<NewPValue> ): CaseParamsBuilder<Merged<CaseP, { [name in NewPKey]: NewPValue }>> { return this.expandWithParams(function* (p) { for (const value of expander(p)) { // TypeScript doesn't know here that NewPKey is always a single literal string type. yield { [key]: value } as { [name in NewPKey]: NewPValue }; } }); } /** @inheritdoc */ combineWithParams<NewP extends {}>( newParams: Iterable<NewP> ): CaseParamsBuilder<Merged<CaseP, NewP>> { return this.expandWithParams(() => newParams); } /** @inheritdoc */ combine<NewPKey extends string, NewPValue>( key: NewPKey, values: Iterable<NewPValue> ): CaseParamsBuilder<Merged<CaseP, { [name in NewPKey]: NewPValue }>> { return this.expand(key, () => values); } /** @inheritdoc */ filter(pred: (_: Merged<{}, CaseP>) => boolean): CaseParamsBuilder<CaseP> { const newGenerator = filterGenerator(this.cases, pred); return new CaseParamsBuilder(() => newGenerator({})); } /** @inheritdoc */ unless(pred: (_: Merged<{}, CaseP>) => boolean): CaseParamsBuilder<CaseP> { return this.filter(x => !pred(x)); } /** * "Finalize" the list of cases and begin defining subcases. * Returns a new SubcaseParamsBuilder. Methods called on SubcaseParamsBuilder * generate new subcases instead of new cases. */ beginSubcases(): SubcaseParamsBuilder<CaseP, {}> { return new SubcaseParamsBuilder( () => this.cases(), function* () { yield {}; } ); } } /** * The unit CaseParamsBuilder, representing a single case with no params: `[ {} ]`. * * `punit` is passed to every `.params()`/`.paramsSubcasesOnly()` call, so `kUnitCaseParamsBuilder` * is only explicitly needed if constructing a ParamsBuilder outside of a test builder. */ export const kUnitCaseParamsBuilder = new CaseParamsBuilder(function* () { yield {}; }); /** * Builder for combinatorial test _subcase_ parameters. * * SubcaseParamsBuilder is immutable. Each method call returns a new, immutable object, * modifying the list of subcases according to the method called. */ export class SubcaseParamsBuilder<CaseP extends {}, SubcaseP extends {}> extends ParamsBuilderBase<CaseP, SubcaseP> implements ParamsBuilder { protected readonly subcases: (_: CaseP) => Generator<SubcaseP>; constructor(cases: () => Generator<CaseP>, generator: (_: CaseP) => Generator<SubcaseP>) { super(cases); this.subcases = generator; } *iterateCasesWithSubcases(): CaseSubcaseIterable<CaseP, SubcaseP> { for (const caseP of this.cases()) { const subcases = Array.from(this.subcases(caseP)); if (subcases.length) { yield [caseP, subcases]; } } } /** @inheritdoc */ expandWithParams<NewP extends {}>( expander: (_: Merged<CaseP, SubcaseP>) => Iterable<NewP> ): SubcaseParamsBuilder<CaseP, Merged<SubcaseP, NewP>> { return new SubcaseParamsBuilder(this.cases, expanderGenerator(this.subcases, expander)); } /** @inheritdoc */ expand<NewPKey extends string, NewPValue>( key: NewPKey, expander: (_: Merged<CaseP, SubcaseP>) => Iterable<NewPValue> ): SubcaseParamsBuilder<CaseP, Merged<SubcaseP, { [name in NewPKey]: NewPValue }>> { return this.expandWithParams(function* (p) { for (const value of expander(p)) { // TypeScript doesn't know here that NewPKey is always a single literal string type. yield { [key]: value } as { [name in NewPKey]: NewPValue }; } }); } /** @inheritdoc */ combineWithParams<NewP extends {}>( newParams: Iterable<NewP> ): SubcaseParamsBuilder<CaseP, Merged<SubcaseP, NewP>> { return this.expandWithParams(() => newParams); } /** @inheritdoc */ combine<NewPKey extends string, NewPValue>( key: NewPKey, values: Iterable<NewPValue> ): SubcaseParamsBuilder<CaseP, Merged<SubcaseP, { [name in NewPKey]: NewPValue }>> { return this.expand(key, () => values); } /** @inheritdoc */ filter(pred: (_: Merged<CaseP, SubcaseP>) => boolean): SubcaseParamsBuilder<CaseP, SubcaseP> { return new SubcaseParamsBuilder(this.cases, filterGenerator(this.subcases, pred)); } /** @inheritdoc */ unless(pred: (_: Merged<CaseP, SubcaseP>) => boolean): SubcaseParamsBuilder<CaseP, SubcaseP> { return this.filter(x => !pred(x)); } } function expanderGenerator<Base, A, B>( baseGenerator: (_: Base) => Generator<A>, expander: (_: Merged<Base, A>) => Iterable<B> ): (_: Base) => Generator<Merged<A, B>> { return function* (base: Base) { for (const a of baseGenerator(base)) { for (const b of expander(mergeParams(base, a))) { yield mergeParams(a, b); } } }; } function filterGenerator<Base, A>( baseGenerator: (_: Base) => Generator<A>, pred: (_: Merged<Base, A>) => boolean ): (_: Base) => Generator<A> { return function* (base: Base) { for (const a of baseGenerator(base)) { if (pred(mergeParams(base, a))) { yield a; } } }; }
sarahM0/cts
src/webgpu/shader/execution/builtin/atan.spec.ts
export const description = ` Execution Tests for the 'atan' builtin function `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; import { f32, f32Bits, TypeF32 } from '../../../util/conversion.js'; import { Case, Config, kBit, run, ulpThreshold } from './builtin.js'; export const g = makeTestGroup(GPUTest); g.test('float_builtin_functions,atan') .uniqueId('b13828d6243d13dd') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#float-builtin-functions') .desc( ` atan: T is f32 or vecN<f32> atan(e: T ) -> T Returns the arc tangent of e. Component-wise when T is a vector. (GLSLstd450Atan) ` ) .params(u => u .combine('storageClass', ['uniform', 'storage_r', 'storage_rw'] as const) .combine('vectorize', [undefined, 2, 3, 4] as const) ) .fn(async t => { // TODO(https://github.com/gpuweb/cts/issues/792): Decide what the ground-truth is for these tests. const truthFunc = (x: number): number => { return Math.atan(x); }; // Well defined/border cases const manual: Array<Case> = [ { input: f32Bits(kBit.f32.infinity.negative), expected: f32(-Math.PI / 2) }, { input: f32(-Math.sqrt(3)), expected: f32(-Math.PI / 3) }, { input: f32(-1), expected: f32(-Math.PI / 4) }, { input: f32(-Math.sqrt(3) / 3), expected: f32(-Math.PI / 6) }, { input: f32(Math.sqrt(3) / 3), expected: f32(Math.PI / 6) }, { input: f32(1), expected: f32(Math.PI / 4) }, { input: f32(Math.sqrt(3)), expected: f32(Math.PI / 3) }, { input: f32Bits(kBit.f32.infinity.positive), expected: f32(Math.PI / 2) }, // Zero-like cases { input: f32(0), expected: f32(0) }, { input: f32Bits(kBit.f32.positive.min), expected: f32(0) }, { input: f32Bits(kBit.f32.negative.max), expected: f32(0) }, { input: f32Bits(kBit.f32.positive.zero), expected: f32(0) }, { input: f32Bits(kBit.f32.negative.zero), expected: f32(0) }, { input: f32Bits(kBit.f32.positive.min), expected: f32Bits(kBit.f32.positive.min) }, { input: f32Bits(kBit.f32.negative.max), expected: f32Bits(kBit.f32.negative.max) }, { input: f32Bits(kBit.f32.positive.min), expected: f32Bits(kBit.f32.negative.max) }, { input: f32Bits(kBit.f32.negative.max), expected: f32Bits(kBit.f32.positive.min) }, { input: f32Bits(kBit.f32.positive.zero), expected: f32Bits(kBit.f32.positive.zero) }, { input: f32Bits(kBit.f32.negative.zero), expected: f32Bits(kBit.f32.negative.zero) }, { input: f32Bits(kBit.f32.positive.zero), expected: f32Bits(kBit.f32.negative.zero) }, { input: f32Bits(kBit.f32.negative.zero), expected: f32Bits(kBit.f32.positive.zero) }, ]; // Spread of cases over wide domain const automatic = new Array<Case>(1000); const f32Min = f32Bits(kBit.f32.positive.min).value as number; const f32Max = f32Bits(kBit.f32.positive.max).value as number; const increment = (f32Max - f32Min) / automatic.length; for (let i = 0; i < automatic.length; i++) { const x = f32Min + increment * i; automatic[i] = { input: f32(x), expected: f32(truthFunc(x)) }; } const cfg: Config = t.params; cfg.cmpFloats = ulpThreshold(4096); run(t, 'atan', [TypeF32], TypeF32, cfg, manual.concat(automatic)); });
sarahM0/cts
src/webgpu/api/operation/resource_init/check_texture/by_copy.ts
import { assert } from '../../../../../common/util/util.js'; import { EncodableTextureFormat, kTextureFormatInfo } from '../../../../capability_info.js'; import { virtualMipSize } from '../../../../util/texture/base.js'; import { CheckContents } from '../texture_zero.spec.js'; export const checkContentsByBufferCopy: CheckContents = ( t, params, texture, state, subresourceRange ) => { for (const { level: mipLevel, layer } of subresourceRange.each()) { assert(params.dimension !== '1d'); assert(params.format in kTextureFormatInfo); const format = params.format as EncodableTextureFormat; t.expectSingleColor(texture, format, { size: [t.textureWidth, t.textureHeight, t.textureDepth], dimension: params.dimension, slice: layer, layout: { mipLevel }, exp: t.stateToTexelComponents[state], }); } }; export const checkContentsByTextureCopy: CheckContents = ( t, params, texture, state, subresourceRange ) => { for (const { level, layer } of subresourceRange.each()) { assert(params.dimension !== '1d'); assert(params.format in kTextureFormatInfo); const format = params.format as EncodableTextureFormat; const [width, height, depth] = virtualMipSize( params.dimension, [t.textureWidth, t.textureHeight, t.textureDepth], level ); const dst = t.device.createTexture({ dimension: params.dimension, size: [width, height, depth], format: params.format, usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.COPY_SRC, }); const commandEncoder = t.device.createCommandEncoder(); commandEncoder.copyTextureToTexture( { texture, mipLevel: level, origin: { x: 0, y: 0, z: layer } }, { texture: dst, mipLevel: 0 }, { width, height, depthOrArrayLayers: depth } ); t.queue.submit([commandEncoder.finish()]); t.expectSingleColor(dst, format, { size: [width, height, depth], exp: t.stateToTexelComponents[state], }); } };
sarahM0/cts
src/webgpu/api/validation/queue/writeBuffer.spec.ts
<reponame>sarahM0/cts<filename>src/webgpu/api/validation/queue/writeBuffer.spec.ts<gh_stars>10-100 export const description = ` Tests writeBuffer validation. Note: destroyed buffer is tested in destroyed/. Note: buffer map state is tested in ./buffer_mapped.spec.ts. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { TypedArrayBufferView, TypedArrayBufferViewConstructor, } from '../../../../common/util/util.js'; import { GPUConst } from '../../../constants.js'; import { ValidationTest } from '../validation_test.js'; export const g = makeTestGroup(ValidationTest); g.test('ranges') .desc( ` Tests that the data ranges given to GPUQueue.writeBuffer() are properly validated. Tests calling writeBuffer with both TypedArrays and ArrayBuffers and checks that the data offset and size is interpreted correctly for both. - When passing a TypedArray the data offset and size is given in elements. - When passing an ArrayBuffer the data offset and size is given in bytes. Also verifies that the specified data range: - Describes a valid range of the destination buffer and source buffer. - Fits fully within the destination buffer. - Has a byte size which is a multiple of 4. ` ) .fn(async t => { const queue = t.device.queue; function runTest(arrayType: TypedArrayBufferViewConstructor, testBuffer: boolean) { const elementSize = arrayType.BYTES_PER_ELEMENT; const bufferSize = 16 * elementSize; const buffer = t.device.createBuffer({ size: bufferSize, usage: GPUBufferUsage.COPY_DST, }); const arraySm: TypedArrayBufferView | ArrayBuffer = testBuffer ? new arrayType(8).buffer : new arrayType(8); const arrayMd: TypedArrayBufferView | ArrayBuffer = testBuffer ? new arrayType(16).buffer : new arrayType(16); const arrayLg: TypedArrayBufferView | ArrayBuffer = testBuffer ? new arrayType(32).buffer : new arrayType(32); if (elementSize < 4) { const array15: TypedArrayBufferView | ArrayBuffer = testBuffer ? new arrayType(15).buffer : new arrayType(15); // Writing the full buffer that isn't 4-byte aligned. t.shouldThrow('OperationError', () => queue.writeBuffer(buffer, 0, array15)); // Writing from an offset that causes source to be 4-byte aligned. queue.writeBuffer(buffer, 0, array15, 3); // Writing from an offset that causes the source to not be 4-byte aligned. t.shouldThrow('OperationError', () => queue.writeBuffer(buffer, 0, arrayMd, 3)); // Writing with a size that is not 4-byte aligned. t.shouldThrow('OperationError', () => queue.writeBuffer(buffer, 0, arraySm, 0, 7)); } // Writing the full buffer without offsets. queue.writeBuffer(buffer, 0, arraySm); queue.writeBuffer(buffer, 0, arrayMd); t.expectValidationError(() => queue.writeBuffer(buffer, 0, arrayLg)); // Writing the full buffer with a 4-byte aligned offset. queue.writeBuffer(buffer, 8, arraySm); t.expectValidationError(() => queue.writeBuffer(buffer, 8, arrayMd)); // Writing the full buffer with a unaligned offset. t.shouldThrow('OperationError', () => queue.writeBuffer(buffer, 3, arraySm)); // Writing remainder of buffer from offset. queue.writeBuffer(buffer, 0, arraySm, 4); queue.writeBuffer(buffer, 0, arrayMd, 4); t.expectValidationError(() => queue.writeBuffer(buffer, 0, arrayLg, 4)); // Writing a larger buffer from an offset that allows it to fit in the destination. queue.writeBuffer(buffer, 0, arrayLg, 16); // Writing with both an offset and size. queue.writeBuffer(buffer, 0, arraySm, 4, 4); // Writing with a size that extends past the source buffer length. t.shouldThrow('OperationError', () => queue.writeBuffer(buffer, 0, arraySm, 0, 16)); t.shouldThrow('OperationError', () => queue.writeBuffer(buffer, 0, arraySm, 4, 8)); // Writing with a size that is 4-byte aligned but an offset that is not. queue.writeBuffer(buffer, 0, arraySm, 3, 4); // Writing zero bytes at the end of the buffer queue.writeBuffer(buffer, bufferSize, arraySm, 0, 0); // Writing with a buffer offset that is out of range of buffer size t.expectValidationError(() => queue.writeBuffer(buffer, bufferSize + 4, arraySm, 0, 0)); // Writing zero bytes from the end of the data queue.writeBuffer(buffer, 0, arraySm, 8, 0); // Writing with a data offset that is out of range of data size t.shouldThrow('OperationError', () => queue.writeBuffer(buffer, 0, arraySm, 9, 0)); // A data offset of undefined should be treated as 0 queue.writeBuffer(buffer, 0, arraySm, undefined, 8); t.shouldThrow('OperationError', () => queue.writeBuffer(buffer, 0, arraySm, undefined, 12)); } const arrayTypes = [ Uint8Array, Uint8ClampedArray, Int8Array, Uint16Array, Int16Array, Uint32Array, Int32Array, Float32Array, Float64Array, ]; runTest(Uint8Array, true); for (const arrayType of arrayTypes) { runTest(arrayType, false); } }); g.test('usages') .desc( ` Tests calling writeBuffer with the buffer missed COPY_DST usage. - buffer {with, without} COPY DST usage ` ) .paramsSubcasesOnly([ { usage: GPUConst.BufferUsage.COPY_DST, _valid: true }, // control case { usage: GPUConst.BufferUsage.STORAGE, _valid: false }, // without COPY_DST usage { usage: GPUConst.BufferUsage.STORAGE | GPUConst.BufferUsage.COPY_SRC, _valid: false }, // with other usage { usage: GPUConst.BufferUsage.STORAGE | GPUConst.BufferUsage.COPY_DST, _valid: true }, // with COPY_DST usage ]) .fn(async t => { const { usage, _valid } = t.params; const buffer = t.device.createBuffer({ size: 16, usage }); const data = new Uint8Array(16); t.expectValidationError(() => { t.device.queue.writeBuffer(buffer, 0, data, 0, data.length); }, !_valid); }); g.test('buffer,device_mismatch') .desc('Tests writeBuffer cannot be called with a buffer created from another device') .paramsSubcasesOnly(u => u.combine('mismatched', [true, false])) .unimplemented();
sarahM0/cts
src/webgpu/api/validation/encoding/queries/resolveQuerySet.spec.ts
<gh_stars>10-100 export const description = ` Validation tests for resolveQuerySet. `; import { makeTestGroup } from '../../../../../common/framework/test_group.js'; import { GPUConst } from '../../../../constants.js'; import { ValidationTest } from '../../validation_test.js'; export const g = makeTestGroup(ValidationTest); export const kQueryCount = 2; g.test('invalid_queryset_and_destination_buffer') .desc( ` Tests that resolve query set with invalid object. - invalid GPUQuerySet that failed during creation. - invalid destination buffer that failed during creation. ` ) .paramsSubcasesOnly([ { querySetState: 'valid', destinationState: 'valid' }, // control case { querySetState: 'invalid', destinationState: 'valid' }, { querySetState: 'valid', destinationState: 'invalid' }, ] as const) .fn(async t => { const { querySetState, destinationState } = t.params; const querySet = t.createQuerySetWithState(querySetState); const destination = t.createBufferWithState(destinationState, { size: kQueryCount * 8, usage: GPUBufferUsage.QUERY_RESOLVE, }); const encoder = t.createEncoder('non-pass'); encoder.encoder.resolveQuerySet(querySet, 0, 1, destination, 0); encoder.validateFinish(querySetState === 'valid' && destinationState === 'valid'); }); g.test('first_query_and_query_count') .desc( ` Tests that resolve query set with invalid firstQuery and queryCount: - firstQuery and/or queryCount out of range ` ) .paramsSubcasesOnly([ { firstQuery: 0, queryCount: kQueryCount }, // control case { firstQuery: 0, queryCount: kQueryCount + 1 }, { firstQuery: 1, queryCount: kQueryCount }, { firstQuery: kQueryCount, queryCount: 1 }, ]) .fn(async t => { const { firstQuery, queryCount } = t.params; const querySet = t.device.createQuerySet({ type: 'occlusion', count: kQueryCount }); const destination = t.device.createBuffer({ size: kQueryCount * 8, usage: GPUBufferUsage.QUERY_RESOLVE, }); const encoder = t.createEncoder('non-pass'); encoder.encoder.resolveQuerySet(querySet, firstQuery, queryCount, destination, 0); encoder.validateFinish(firstQuery + queryCount <= kQueryCount); }); g.test('destination_buffer_usage') .desc( ` Tests that resolve query set with invalid destinationBuffer: - Buffer usage {with, without} QUERY_RESOLVE ` ) .paramsSubcasesOnly(u => u // .combine('bufferUsage', [ GPUConst.BufferUsage.STORAGE, GPUConst.BufferUsage.QUERY_RESOLVE, // control case ] as const) ) .fn(async t => { const querySet = t.device.createQuerySet({ type: 'occlusion', count: kQueryCount }); const destination = t.device.createBuffer({ size: kQueryCount * 8, usage: t.params.bufferUsage, }); const encoder = t.createEncoder('non-pass'); encoder.encoder.resolveQuerySet(querySet, 0, kQueryCount, destination, 0); encoder.validateFinish(t.params.bufferUsage === GPUConst.BufferUsage.QUERY_RESOLVE); }); g.test('destination_offset_alignment') .desc( ` Tests that resolve query set with invalid destinationOffset: - destinationOffset is not a multiple of 256 ` ) .paramsSubcasesOnly(u => u.combine('destinationOffset', [0, 128, 256, 384])) .fn(async t => { const { destinationOffset } = t.params; const querySet = t.device.createQuerySet({ type: 'occlusion', count: kQueryCount }); const destination = t.device.createBuffer({ size: 512, usage: GPUBufferUsage.QUERY_RESOLVE, }); const encoder = t.createEncoder('non-pass'); encoder.encoder.resolveQuerySet(querySet, 0, kQueryCount, destination, destinationOffset); encoder.validateFinish(destinationOffset % 256 === 0); }); g.test('resolve_buffer_oob') .desc( ` Tests that resolve query set with the size oob: - The size of destinationBuffer - destinationOffset < queryCount * 8 ` ) .paramsSubcasesOnly(u => u.combineWithParams([ { queryCount: 2, bufferSize: 16, destinationOffset: 0, _success: true }, { queryCount: 3, bufferSize: 16, destinationOffset: 0, _success: false }, { queryCount: 2, bufferSize: 16, destinationOffset: 256, _success: false }, { queryCount: 2, bufferSize: 272, destinationOffset: 256, _success: true }, { queryCount: 2, bufferSize: 264, destinationOffset: 256, _success: false }, ]) ) .fn(async t => { const { queryCount, bufferSize, destinationOffset, _success } = t.params; const querySet = t.device.createQuerySet({ type: 'occlusion', count: queryCount }); const destination = t.device.createBuffer({ size: bufferSize, usage: GPUBufferUsage.QUERY_RESOLVE, }); const encoder = t.createEncoder('non-pass'); encoder.encoder.resolveQuerySet(querySet, 0, queryCount, destination, destinationOffset); encoder.validateFinish(_success); }); g.test('query_set_buffer,device_mismatch') .desc( 'Tests resolveQuerySet cannot be called with a query set or destination buffer created from another device' ) .paramsSubcasesOnly([ { querySetMismatched: false, bufferMismatched: false }, // control case { querySetMismatched: true, bufferMismatched: false }, { querySetMismatched: false, bufferMismatched: true }, ] as const) .unimplemented();
sarahM0/cts
src/webgpu/api/operation/render_pipeline/vertex_only_render_pipeline.spec.ts
<filename>src/webgpu/api/operation/render_pipeline/vertex_only_render_pipeline.spec.ts export const description = ` Test vertex-only render pipeline. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; class F extends GPUTest {} export const g = makeTestGroup(F); g.test('draw_depth_and_stencil_with_vertex_only_pipeline') .desc( ` TODO: - Test drawing depth and stencil with vertex-only render pipelines by 1. Create a color attachment and depth-stencil attachment of 4 pixels in a line, clear the color to RGBA(0.0, 0.0, 0.0, 0.0), depth to 0.0 and stencil to 0x0 2. Use a depth and stencil test disabled vertex-only render pipeline to modify the depth of middle 2 pixels to 0.5, while leaving stencil unchanged 3. Use another depth and stencil test disabled vertex-only render pipeline to modify the stencil of right 2 pixels to 0x1, while leaving depth unchanged 4. Use a complete render pipeline to draw all 4 pixels with color RGBA(0.0, 1.0, 0.0, 1.0), but with depth test requiring depth no less than 0.5 and stencil test requiring stencil equals to 0x1 5. Validate that only the third pixel is of color RGBA(0.0, 1.0, 0.0, 1.0), and all other pixels are RGBA(0.0, 0.0, 0.0, 0.0). ` ) .unimplemented();
sarahM0/cts
src/webgpu/api/validation/encoding/render_bundle.spec.ts
export const description = ` TODO: - test creating a render bundle, and if it's valid, test that executing it is not an error - color formats {all possible formats} {zero, one, multiple} - depth/stencil format {unset, all possible formats} - ? `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { ValidationTest } from '../validation_test.js'; export const g = makeTestGroup(ValidationTest); g.test('render_bundles,device_mismatch') .desc( ` Tests executeBundles cannot be called with render bundles created from another device Test with two bundles to make sure all bundles can be validated: - bundle0 and bundle1 from same device - bundle0 and bundle1 from different device ` ) .paramsSubcasesOnly([ { bundle0Mismatched: false, bundle1Mismatched: false }, // control case { bundle0Mismatched: true, bundle1Mismatched: false }, { bundle0Mismatched: false, bundle1Mismatched: true }, ]) .fn(async t => { const { bundle0Mismatched, bundle1Mismatched } = t.params; const mismatched = bundle0Mismatched || bundle1Mismatched; if (mismatched) { await t.selectMismatchedDeviceOrSkipTestCase(undefined); } const descriptor: GPURenderBundleEncoderDescriptor = { colorFormats: ['rgba8unorm'], }; const bundle0Encoder = mismatched ? t.mismatchedDevice.createRenderBundleEncoder(descriptor) : t.device.createRenderBundleEncoder(descriptor); const bundle0 = bundle0Encoder.finish(); const bundle1Encoder = mismatched ? t.mismatchedDevice.createRenderBundleEncoder(descriptor) : t.device.createRenderBundleEncoder(descriptor); const bundle1 = bundle1Encoder.finish(); const encoder = t.createEncoder('render pass'); encoder.encoder.executeBundles([bundle0, bundle1]); encoder.validateFinish(!mismatched); });
sarahM0/cts
src/webgpu/api/operation/sampling/filter_mode.spec.ts
export const description = ` Tests the behavior of different filtering modes in minFilter/magFilter/mipmapFilter. TODO: - Test exact sampling results with small tolerance. Tests should differentiate between different values for all three filter modes to make sure none are missed or incorrect in implementations. - (Likely unnecessary with the above.) Test exactly the expected number of samples are used. Test this by setting up a rendering and asserting how many different shades result. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; export const g = makeTestGroup(GPUTest);
sarahM0/cts
src/common/internal/query/encode_selectively.ts
<reponame>sarahM0/cts<gh_stars>10-100 /** * Encodes a stringified TestQuery so that it can be placed in a `?q=` parameter in a URL. * * `encodeURIComponent` encodes in accordance with `application/x-www-form-urlencoded`, * but URLs don't actually have to be as strict as HTML form encoding * (we interpret this purely from JavaScript). * So we encode the component, then selectively convert some %-encoded escape codes * back to their original form for readability/copyability. */ export function encodeURIComponentSelectively(s: string): string { let ret = encodeURIComponent(s); ret = ret.replace(/%22/g, '"'); // for JSON strings ret = ret.replace(/%2C/g, ','); // for path separator, and JSON arrays ret = ret.replace(/%3A/g, ':'); // for big separator ret = ret.replace(/%3B/g, ';'); // for param separator ret = ret.replace(/%3D/g, '='); // for params (k=v) ret = ret.replace(/%5B/g, '['); // for JSON arrays ret = ret.replace(/%5D/g, ']'); // for JSON arrays ret = ret.replace(/%7B/g, '{'); // for JSON objects ret = ret.replace(/%7D/g, '}'); // for JSON objects ret = ret.replace(/%E2%9C%97/g, '✗'); // for jsUndefinedMagicValue return ret; }
sarahM0/cts
src/webgpu/api/validation/buffer/destroy.spec.ts
export const description = ` Validation tests for GPUBuffer.destroy. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { kBufferUsages } from '../../../capability_info.js'; import { GPUConst } from '../../../constants.js'; import { ValidationTest } from '../validation_test.js'; export const g = makeTestGroup(ValidationTest); g.test('all_usages') .desc('Test destroying buffers of every usage type.') .paramsSubcasesOnly(u => u // .combine('usage', kBufferUsages) ) .fn(async t => { const { usage } = t.params; const buf = t.device.createBuffer({ size: 4, usage, }); buf.destroy(); }); g.test('error_buffer') .desc('Test that error buffers may be destroyed without generating validation errors.') .fn(async t => { const buf = t.getErrorBuffer(); buf.destroy(); }); g.test('twice') .desc( `Test that destroying a buffer more than once is allowed. - Tests buffers which are mapped at creation or not - Tests buffers with various usages` ) .paramsSubcasesOnly(u => u // .combine('mappedAtCreation', [false, true]) .combineWithParams([ { size: 4, usage: GPUConst.BufferUsage.COPY_SRC }, { size: 4, usage: GPUConst.BufferUsage.MAP_WRITE | GPUConst.BufferUsage.COPY_SRC }, { size: 4, usage: GPUConst.BufferUsage.COPY_DST | GPUConst.BufferUsage.MAP_READ }, ]) ) .fn(async t => { const buf = t.device.createBuffer(t.params); buf.destroy(); buf.destroy(); }); g.test('while_mapped') .desc( `Test destroying buffers while mapped or after being unmapped. - Tests {mappable, unmappable mapAtCreation, mappable mapAtCreation} - Tests while {mapped, mapped at creation, unmapped}` ) .paramsSubcasesOnly(u => u // .combine('mappedAtCreation', [false, true]) .combine('unmapBeforeDestroy', [false, true]) .combineWithParams([ { usage: GPUConst.BufferUsage.COPY_SRC }, { usage: GPUConst.BufferUsage.MAP_WRITE | GPUConst.BufferUsage.COPY_SRC }, { usage: GPUConst.BufferUsage.COPY_DST | GPUConst.BufferUsage.MAP_READ }, { usage: GPUConst.BufferUsage.MAP_WRITE | GPUConst.BufferUsage.COPY_SRC, mapMode: GPUConst.MapMode.WRITE, }, { usage: GPUConst.BufferUsage.COPY_DST | GPUConst.BufferUsage.MAP_READ, mapMode: GPUConst.MapMode.READ, }, ]) .unless(p => p.mappedAtCreation === false && p.mapMode === undefined) ) .fn(async t => { const { usage, mapMode, mappedAtCreation, unmapBeforeDestroy } = t.params; const buf = t.device.createBuffer({ size: 4, usage, mappedAtCreation, }); if (mapMode !== undefined) { if (mappedAtCreation) { buf.unmap(); } await buf.mapAsync(mapMode); } if (unmapBeforeDestroy) { buf.unmap(); } buf.destroy(); });
sarahM0/cts
src/stress/memory/churn.spec.ts
export const description = ` Stress tests covering robustness in the presence of heavy buffer and texture memory churn. `; import { makeTestGroup } from '../../common/framework/test_group.js'; import { GPUTest } from '../../webgpu/gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('churn') .desc( `Allocates and populates a huge number of buffers and textures over time, retaining some while dropping or explicitly destroying others. When finished, verifies the expected contents of any remaining buffers and textures.` ) .unimplemented();
sarahM0/cts
src/webgpu/api/operation/render_pipeline/alpha_to_coverage.spec.ts
export const description = ` TODO: - for sampleCount = 4, alphaToCoverageEnabled = true and various combinations of: - rasterization masks - increasing alpha values of the first color output including { < 0, = 0, = 1/16, = 2/16, ..., = 15/16, = 1, > 1 } - alpha values of the second color output = { 0, 0.5, 1.0 }. - test that for a single pixel in { first, second } { color, depth, stencil } output the final sample mask is applied to it, moreover: - if alpha is 0.0 or less then alpha to coverage mask is 0x0, - if alpha is 1.0 or greater then alpha to coverage mask is 0xFFFFFFFF, - that the number of bits in the alpha to coverage mask is non-decreasing, - that the computation of alpha to coverage mask doesn't depend on any other color output than first, - (not included in the spec): that once a sample is included in the alpha to coverage sample mask it will be included for any alpha greater than or equal to the current value. `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; export const g = makeTestGroup(GPUTest);
sarahM0/cts
src/unittests/params_builder_and_utils.spec.ts
<gh_stars>10-100 export const description = ` Unit tests for parameterization helpers. `; import { kUnitCaseParamsBuilder, CaseSubcaseIterable, ParamsBuilderBase, builderIterateCasesWithSubcases, } from '../common/framework/params_builder.js'; import { makeTestGroup } from '../common/framework/test_group.js'; import { mergeParams, publicParamsEquals } from '../common/internal/params_utils.js'; import { assert, objectEquals } from '../common/util/util.js'; import { UnitTest } from './unit_test.js'; class ParamsTest extends UnitTest { expectParams<CaseP, SubcaseP>( act: ParamsBuilderBase<CaseP, SubcaseP>, exp: CaseSubcaseIterable<{}, {}> ): void { const a = Array.from(builderIterateCasesWithSubcases(act)).map(([caseP, subcases]) => [ caseP, subcases ? Array.from(subcases) : undefined, ]); const e = Array.from(exp); this.expect( objectEquals(a, e), ` got ${JSON.stringify(a)} expected ${JSON.stringify(e)}` ); } } export const g = makeTestGroup(ParamsTest); const u = kUnitCaseParamsBuilder; g.test('combine').fn(t => { t.expectParams<{ hello: number }, {}>(u.combine('hello', [1, 2, 3]), [ [{ hello: 1 }, undefined], [{ hello: 2 }, undefined], [{ hello: 3 }, undefined], ]); t.expectParams<{ hello: 1 | 2 | 3 }, {}>(u.combine('hello', [1, 2, 3] as const), [ [{ hello: 1 }, undefined], [{ hello: 2 }, undefined], [{ hello: 3 }, undefined], ]); t.expectParams<{}, { hello: number }>(u.beginSubcases().combine('hello', [1, 2, 3]), [ [{}, [{ hello: 1 }, { hello: 2 }, { hello: 3 }]], ]); t.expectParams<{}, { hello: 1 | 2 | 3 }>(u.beginSubcases().combine('hello', [1, 2, 3] as const), [ [{}, [{ hello: 1 }, { hello: 2 }, { hello: 3 }]], ]); }); g.test('empty').fn(t => { t.expectParams<{}, {}>(u, [ [{}, undefined], // ]); t.expectParams<{}, {}>(u.beginSubcases(), [ [{}, [{}]], // ]); }); g.test('combine,zeroes_and_ones').fn(t => { t.expectParams<{}, {}>(u.combineWithParams([]).combineWithParams([]), []); t.expectParams<{}, {}>(u.combineWithParams([]).combineWithParams([{}]), []); t.expectParams<{}, {}>(u.combineWithParams([{}]).combineWithParams([]), []); t.expectParams<{}, {}>(u.combineWithParams([{}]).combineWithParams([{}]), [ [{}, undefined], // ]); t.expectParams<{}, {}>(u.combine('x', []).combine('y', []), []); t.expectParams<{}, {}>(u.combine('x', []).combine('y', [1]), []); t.expectParams<{}, {}>(u.combine('x', [1]).combine('y', []), []); t.expectParams<{}, {}>(u.combine('x', [1]).combine('y', [1]), [ [{ x: 1, y: 1 }, undefined], // ]); }); g.test('combine,mixed').fn(t => { t.expectParams<{ x: number; y: string; p: number | undefined; q: number | undefined }, {}>( u .combine('x', [1, 2]) .combine('y', ['a', 'b']) .combineWithParams([{ p: 4 }, { q: 5 }]) .combineWithParams([{}]), [ [{ x: 1, y: 'a', p: 4 }, undefined], [{ x: 1, y: 'a', q: 5 }, undefined], [{ x: 1, y: 'b', p: 4 }, undefined], [{ x: 1, y: 'b', q: 5 }, undefined], [{ x: 2, y: 'a', p: 4 }, undefined], [{ x: 2, y: 'a', q: 5 }, undefined], [{ x: 2, y: 'b', p: 4 }, undefined], [{ x: 2, y: 'b', q: 5 }, undefined], ] ); }); g.test('filter').fn(t => { t.expectParams<{ a: boolean; x: number | undefined; y: number | undefined }, {}>( u .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .filter(p => p.a), [ [{ a: true, x: 1 }, undefined], // ] ); t.expectParams<{ a: boolean; x: number | undefined; y: number | undefined }, {}>( u .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .beginSubcases() .filter(p => p.a), [ [{ a: true, x: 1 }, [{}]], // // Case with no subcases is filtered out. ] ); t.expectParams<{}, { a: boolean; x: number | undefined; y: number | undefined }>( u .beginSubcases() .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .filter(p => p.a), [ [{}, [{ a: true, x: 1 }]], // ] ); }); g.test('unless').fn(t => { t.expectParams<{ a: boolean; x: number | undefined; y: number | undefined }, {}>( u .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .unless(p => p.a), [ [{ a: false, y: 2 }, undefined], // ] ); t.expectParams<{ a: boolean; x: number | undefined; y: number | undefined }, {}>( u .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .beginSubcases() .unless(p => p.a), [ // Case with no subcases is filtered out. [{ a: false, y: 2 }, [{}]], // ] ); t.expectParams<{}, { a: boolean; x: number | undefined; y: number | undefined }>( u .beginSubcases() .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .unless(p => p.a), [ [{}, [{ a: false, y: 2 }]], // ] ); }); g.test('expandP').fn(t => { // simple t.expectParams<{}, {}>( u.expandWithParams(function* () {}), [] ); t.expectParams<{}, {}>( u.expandWithParams(function* () { yield {}; }), [[{}, undefined]] ); t.expectParams<{ z: number | undefined; w: number | undefined }, {}>( u.expandWithParams(function* () { yield* kUnitCaseParamsBuilder.combine('z', [3, 4]); yield { w: 5 }; }), [ [{ z: 3 }, undefined], [{ z: 4 }, undefined], [{ w: 5 }, undefined], ] ); t.expectParams<{}, { z: number | undefined; w: number | undefined }>( u.beginSubcases().expandWithParams(function* () { yield* kUnitCaseParamsBuilder.combine('z', [3, 4]); yield { w: 5 }; }), [[{}, [{ z: 3 }, { z: 4 }, { w: 5 }]]] ); // more complex t.expectParams< { a: boolean; x: number | undefined; y: number | undefined; z: number | undefined; w: number | undefined; }, {} >( u .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .expandWithParams(function* (p) { if (p.a) { yield { z: 3 }; yield { z: 4 }; } else { yield { w: 5 }; } }), [ [{ a: true, x: 1, z: 3 }, undefined], [{ a: true, x: 1, z: 4 }, undefined], [{ a: false, y: 2, w: 5 }, undefined], ] ); t.expectParams< { a: boolean; x: number | undefined; y: number | undefined }, { z: number | undefined; w: number | undefined } >( u .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .beginSubcases() .expandWithParams(function* (p) { if (p.a) { yield { z: 3 }; yield { z: 4 }; } else { yield { w: 5 }; } }), [ [{ a: true, x: 1 }, [{ z: 3 }, { z: 4 }]], [{ a: false, y: 2 }, [{ w: 5 }]], ] ); }); g.test('expand').fn(t => { // simple t.expectParams<{}, {}>( u.expand('x', function* () {}), [] ); t.expectParams<{ z: number }, {}>( u.expand('z', function* () { yield 3; yield 4; }), [ [{ z: 3 }, undefined], [{ z: 4 }, undefined], ] ); t.expectParams<{}, { z: number }>( u.beginSubcases().expand('z', function* () { yield 3; yield 4; }), [[{}, [{ z: 3 }, { z: 4 }]]] ); // more complex t.expectParams<{ a: boolean; x: number | undefined; y: number | undefined; z: number }, {}>( u .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .expand('z', function* (p) { if (p.a) { yield 3; } else { yield 5; } }), [ [{ a: true, x: 1, z: 3 }, undefined], [{ a: false, y: 2, z: 5 }, undefined], ] ); t.expectParams<{ a: boolean; x: number | undefined; y: number | undefined }, { z: number }>( u .combineWithParams([ { a: true, x: 1 }, { a: false, y: 2 }, ]) .beginSubcases() .expand('z', function* (p) { if (p.a) { yield 3; } else { yield 5; } }), [ [{ a: true, x: 1 }, [{ z: 3 }]], [{ a: false, y: 2 }, [{ z: 5 }]], ] ); }); g.test('invalid,shadowing').fn(t => { // Existing CaseP is shadowed by a new CaseP. { const p = u .combineWithParams([ { a: true, x: 1 }, { a: false, x: 2 }, ]) .expandWithParams(function* (p) { if (p.a) { yield { x: 3 }; } else { yield { w: 5 }; } }); // Iterating causes e.g. mergeParams({x:1}, {x:3}), which fails. t.shouldThrow('Error', () => { Array.from(p.iterateCasesWithSubcases()); }); } // Existing SubcaseP is shadowed by a new SubcaseP. { const p = u .beginSubcases() .combineWithParams([ { a: true, x: 1 }, { a: false, x: 2 }, ]) .expandWithParams(function* (p) { if (p.a) { yield { x: 3 }; } else { yield { w: 5 }; } }); // Iterating causes e.g. mergeParams({x:1}, {x:3}), which fails. t.shouldThrow('Error', () => { Array.from(p.iterateCasesWithSubcases()); }); } // Existing CaseP is shadowed by a new SubcaseP. { const p = u .combineWithParams([ { a: true, x: 1 }, { a: false, x: 2 }, ]) .beginSubcases() .expandWithParams(function* (p) { if (p.a) { yield { x: 3 }; } else { yield { w: 5 }; } }); const cases = Array.from(p.iterateCasesWithSubcases()); // Iterating cases is fine... for (const [caseP, subcases] of cases) { assert(subcases !== undefined); // Iterating subcases is fine... for (const subcaseP of subcases) { if (caseP.a) { assert(subcases !== undefined); // Only errors once we try to e.g. mergeParams({x:1}, {x:3}). t.shouldThrow('Error', () => { mergeParams(caseP, subcaseP); }); } else { mergeParams(caseP, subcaseP); } } } } }); g.test('undefined').fn(t => { t.expect(!publicParamsEquals({ a: undefined }, {})); t.expect(!publicParamsEquals({}, { a: undefined })); }); g.test('private').fn(t => { t.expect(publicParamsEquals({ _a: 0 }, {})); t.expect(publicParamsEquals({}, { _a: 0 })); }); g.test('value,array').fn(t => { t.expectParams<{ a: number[] }, {}>(u.combineWithParams([{ a: [1, 2] }]), [ [{ a: [1, 2] }, undefined], // ]); t.expectParams<{}, { a: number[] }>(u.beginSubcases().combineWithParams([{ a: [1, 2] }]), [ [{}, [{ a: [1, 2] }]], // ]); }); g.test('value,object').fn(t => { t.expectParams<{ a: { [k: string]: number } }, {}>(u.combineWithParams([{ a: { x: 1 } }]), [ [{ a: { x: 1 } }, undefined], // ]); t.expectParams<{}, { a: { [k: string]: number } }>( u.beginSubcases().combineWithParams([{ a: { x: 1 } }]), [ [{}, [{ a: { x: 1 } }]], // ] ); });
sarahM0/cts
src/stress/device/bind_group_allocation.spec.ts
<reponame>sarahM0/cts export const description = ` Stress tests for allocation of GPUBindGroup objects through GPUDevice. `; import { makeTestGroup } from '../../common/framework/test_group.js'; import { GPUTest } from '../../webgpu/gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('coexisting') .desc(`Tests allocation of many coexisting GPUBindGroup objects.`) .fn(t => { const kNumGroups = 1_000_000; const buffer = t.device.createBuffer({ size: 64, usage: GPUBufferUsage.STORAGE, }); const layout = t.device.createBindGroupLayout({ entries: [ { binding: 0, visibility: GPUShaderStage.COMPUTE, buffer: { type: 'storage' }, }, ], }); const bindGroups = []; for (let i = 0; i < kNumGroups; ++i) { bindGroups.push( t.device.createBindGroup({ layout, entries: [{ binding: 0, resource: { buffer } }], }) ); } }); g.test('continuous') .desc( `Tests allocation and implicit GC of many GPUBindGroup objects over time. Objects are sequentially created and dropped for GC over a very large number of iterations.` ) .fn(t => { const kNumGroups = 5_000_000; const buffer = t.device.createBuffer({ size: 64, usage: GPUBufferUsage.STORAGE, }); const layout = t.device.createBindGroupLayout({ entries: [ { binding: 0, visibility: GPUShaderStage.COMPUTE, buffer: { type: 'storage' }, }, ], }); for (let i = 0; i < kNumGroups; ++i) { t.device.createBindGroup({ layout, entries: [{ binding: 0, resource: { buffer } }], }); } });
sarahM0/cts
src/stress/queries/timestamps.spec.ts
<gh_stars>10-100 export const description = ` Stress tests for timestamp queries. `; import { makeTestGroup } from '../../common/framework/test_group.js'; import { GPUTest } from '../../webgpu/gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('command_encoder_one_query_set') .desc( `Tests a huge number of timestamp queries over a single query set between render passes on a single command encoder.` ) .unimplemented(); g.test('command_encoder_many_query_sets') .desc( `Tests a huge number of timestamp queries over many query sets between render passes on a single command encoder.` ) .unimplemented(); g.test('render_pass_one_query_set') .desc( `Tests a huge number of timestamp queries over a single query set in a single render pass.` ) .unimplemented(); g.test('render_pass_many_query_sets') .desc( `Tests a huge number of timestamp queries over a huge number of query sets in a single render pass.` ) .unimplemented(); g.test('compute_pass_one_query_set') .desc( `Tests a huge number of timestamp queries over a single query set in a single compute pass.` ) .unimplemented(); g.test('compute_pass_many_query_sets') .desc( `Tests a huge number of timestamp queries over a huge number of query sets in a single compute pass.` ) .unimplemented();
sarahM0/cts
src/webgpu/api/validation/attachment_compatibility.spec.ts
export const description = ` Validation for attachment compatibility between render passes, bundles, and pipelines TODO: Add sparse color attachment compatibility test when defined by specification `; import { makeTestGroup } from '../../../common/framework/test_group.js'; import { range } from '../../../common/util/util.js'; import { kRegularTextureFormats, kSizedDepthStencilFormats, kUnsizedDepthStencilFormats, kTextureSampleCounts, kMaxColorAttachments, kTextureFormatInfo, } from '../../capability_info.js'; import { ValidationTest } from './validation_test.js'; const kColorAttachmentCounts = range(kMaxColorAttachments, i => i + 1); const kDepthStencilAttachmentFormats = [ undefined, ...kSizedDepthStencilFormats, ...kUnsizedDepthStencilFormats, ] as const; class F extends ValidationTest { createAttachmentTextureView(format: GPUTextureFormat, sampleCount?: number) { return this.device .createTexture({ // Size matching the "arbitrary" size used by ValidationTest helpers. size: [16, 16, 1], format, usage: GPUTextureUsage.RENDER_ATTACHMENT, sampleCount, }) .createView(); } createColorAttachment( format: GPUTextureFormat, sampleCount?: number ): GPURenderPassColorAttachment { return { view: this.createAttachmentTextureView(format, sampleCount), loadValue: [0, 0, 0, 0], storeOp: 'store', }; } createDepthAttachment( format: GPUTextureFormat, sampleCount?: number ): GPURenderPassDepthStencilAttachment { return { view: this.createAttachmentTextureView(format, sampleCount), depthLoadValue: 0, depthStoreOp: 'discard', stencilLoadValue: 1, stencilStoreOp: 'discard', }; } createRenderPipeline( targets: Iterable<GPUColorTargetState>, depthStencil?: GPUDepthStencilState, sampleCount?: number ) { return this.device.createRenderPipeline({ vertex: { module: this.device.createShaderModule({ code: ` [[stage(vertex)]] fn main() -> [[builtin(position)]] vec4<f32> { return vec4<f32>(0.0, 0.0, 0.0, 0.0); }`, }), entryPoint: 'main', }, fragment: { module: this.device.createShaderModule({ code: '[[stage(fragment)]] fn main() {}', }), entryPoint: 'main', targets, }, primitive: { topology: 'triangle-list' }, depthStencil, multisample: { count: sampleCount }, }); } } export const g = makeTestGroup(F); const kColorAttachmentFormats = kRegularTextureFormats.filter(format => { const info = kTextureFormatInfo[format]; return info.color && info.renderable; }); g.test('render_pass_and_bundle,color_format') .desc('Test that color attachment formats in render passes and bundles must match.') .paramsSubcasesOnly(u => u // .combine('passFormat', kColorAttachmentFormats) .combine('bundleFormat', kColorAttachmentFormats) ) .fn(t => { const { passFormat, bundleFormat } = t.params; const bundleEncoder = t.device.createRenderBundleEncoder({ colorFormats: [bundleFormat], }); const bundle = bundleEncoder.finish(); const { encoder, validateFinishAndSubmit } = t.createEncoder('non-pass'); const pass = encoder.beginRenderPass({ colorAttachments: [t.createColorAttachment(passFormat)], }); pass.executeBundles([bundle]); pass.endPass(); validateFinishAndSubmit(passFormat === bundleFormat, true); }); g.test('render_pass_and_bundle,color_count') .desc( ` Test that the number of color attachments in render passes and bundles must match. TODO: Add sparse color attachment compatibility test when defined by specification ` ) .paramsSubcasesOnly(u => u // .combine('passCount', kColorAttachmentCounts) .combine('bundleCount', kColorAttachmentCounts) ) .fn(t => { const { passCount, bundleCount } = t.params; const bundleEncoder = t.device.createRenderBundleEncoder({ colorFormats: range(bundleCount, () => 'rgba8unorm'), }); const bundle = bundleEncoder.finish(); const { encoder, validateFinishAndSubmit } = t.createEncoder('non-pass'); const pass = encoder.beginRenderPass({ colorAttachments: range(passCount, () => t.createColorAttachment('rgba8unorm')), }); pass.executeBundles([bundle]); pass.endPass(); validateFinishAndSubmit(passCount === bundleCount, true); }); g.test('render_pass_and_bundle,depth_format') .desc('Test that the depth attachment format in render passes and bundles must match.') .paramsSubcasesOnly(u => u // .combine('passFormat', kDepthStencilAttachmentFormats) .combine('bundleFormat', kDepthStencilAttachmentFormats) ) .fn(async t => { const { passFormat, bundleFormat } = t.params; await t.selectDeviceForTextureFormatOrSkipTestCase([passFormat, bundleFormat]); const bundleEncoder = t.device.createRenderBundleEncoder({ colorFormats: ['rgba8unorm'], depthStencilFormat: bundleFormat, }); const bundle = bundleEncoder.finish(); const { encoder, validateFinishAndSubmit } = t.createEncoder('non-pass'); const pass = encoder.beginRenderPass({ colorAttachments: [t.createColorAttachment('rgba8unorm')], depthStencilAttachment: passFormat !== undefined ? t.createDepthAttachment(passFormat) : undefined, }); pass.executeBundles([bundle]); pass.endPass(); validateFinishAndSubmit(passFormat === bundleFormat, true); }); g.test('render_pass_and_bundle,sample_count') .desc('Test that the sample count in render passes and bundles must match.') .paramsSubcasesOnly(u => u // .combine('renderSampleCount', kTextureSampleCounts) .combine('bundleSampleCount', kTextureSampleCounts) ) .fn(t => { const { renderSampleCount, bundleSampleCount } = t.params; const bundleEncoder = t.device.createRenderBundleEncoder({ colorFormats: ['rgba8unorm'], sampleCount: bundleSampleCount, }); const bundle = bundleEncoder.finish(); const { encoder, validateFinishAndSubmit } = t.createEncoder('non-pass'); const pass = encoder.beginRenderPass({ colorAttachments: [t.createColorAttachment('rgba8unorm', renderSampleCount)], }); pass.executeBundles([bundle]); pass.endPass(); validateFinishAndSubmit(renderSampleCount === bundleSampleCount, true); }); g.test('render_pass_or_bundle_and_pipeline,color_format') .desc( ` Test that color attachment formats in render passes or bundles match the pipeline color format. ` ) .params(u => u .combine('encoderType', ['render pass', 'render bundle'] as const) .beginSubcases() .combine('encoderFormat', kColorAttachmentFormats) .combine('pipelineFormat', kColorAttachmentFormats) ) .fn(t => { const { encoderType, encoderFormat, pipelineFormat } = t.params; const pipeline = t.createRenderPipeline([{ format: pipelineFormat, writeMask: 0 }]); const { encoder, validateFinishAndSubmit } = t.createEncoder(encoderType, { attachmentInfo: { colorFormats: [encoderFormat] }, }); encoder.setPipeline(pipeline); validateFinishAndSubmit(encoderFormat === pipelineFormat, true); }); g.test('render_pass_or_bundle_and_pipeline,color_count') .desc( ` Test that the number of color attachments in render passes or bundles match the pipeline color count. TODO: Add sparse color attachment compatibility test when defined by specification ` ) .params(u => u .combine('encoderType', ['render pass', 'render bundle'] as const) .beginSubcases() .combine('encoderCount', kColorAttachmentCounts) .combine('pipelineCount', kColorAttachmentCounts) ) .fn(t => { const { encoderType, encoderCount, pipelineCount } = t.params; const pipeline = t.createRenderPipeline( range(pipelineCount, () => ({ format: 'rgba8unorm', writeMask: 0 })) ); const { encoder, validateFinishAndSubmit } = t.createEncoder(encoderType, { attachmentInfo: { colorFormats: range(encoderCount, () => 'rgba8unorm') }, }); encoder.setPipeline(pipeline); validateFinishAndSubmit(encoderCount === pipelineCount, true); }); g.test('render_pass_or_bundle_and_pipeline,depth_format') .desc( ` Test that the depth attachment format in render passes or bundles match the pipeline depth format. ` ) .params(u => u .combine('encoderType', ['render pass', 'render bundle'] as const) .beginSubcases() .combine('encoderFormat', kDepthStencilAttachmentFormats) .combine('pipelineFormat', kDepthStencilAttachmentFormats) ) .fn(async t => { const { encoderType, encoderFormat, pipelineFormat } = t.params; await t.selectDeviceForTextureFormatOrSkipTestCase([encoderFormat, pipelineFormat]); const pipeline = t.createRenderPipeline( [{ format: 'rgba8unorm', writeMask: 0 }], pipelineFormat !== undefined ? { format: pipelineFormat } : undefined ); const { encoder, validateFinishAndSubmit } = t.createEncoder(encoderType, { attachmentInfo: { colorFormats: ['rgba8unorm'], depthStencilFormat: encoderFormat }, }); encoder.setPipeline(pipeline); validateFinishAndSubmit(encoderFormat === pipelineFormat, true); }); g.test('render_pass_or_bundle_and_pipeline,sample_count') .desc( ` Test that the sample count in render passes or bundles match the pipeline sample count for both color texture and depthstencil texture. ` ) .params(u => u .combine('encoderType', ['render pass', 'render bundle'] as const) .combine('attachmentType', ['color', 'depthstencil'] as const) .beginSubcases() .combine('encoderSampleCount', kTextureSampleCounts) .combine('pipelineSampleCount', kTextureSampleCounts) ) .fn(t => { const { encoderType, attachmentType, encoderSampleCount, pipelineSampleCount } = t.params; const colorFormats = attachmentType === 'color' ? ['rgba8unorm' as const] : []; const depthStencilFormat = attachmentType === 'depthstencil' ? ('depth24plus-stencil8' as const) : undefined; const pipeline = t.createRenderPipeline( colorFormats.map(format => ({ format, writeMask: 0 })), depthStencilFormat ? { format: depthStencilFormat } : undefined, pipelineSampleCount ); const { encoder, validateFinishAndSubmit } = t.createEncoder(encoderType, { attachmentInfo: { colorFormats, depthStencilFormat, sampleCount: encoderSampleCount }, }); encoder.setPipeline(pipeline); validateFinishAndSubmit(encoderSampleCount === pipelineSampleCount, true); });
sarahM0/cts
src/webgpu/util/copy_to_texture.ts
import { GPUTest } from '../gpu_test.js'; import { checkElementsEqual, checkElementsBetween } from './check_contents.js'; import { align } from './math.js'; import { kBytesPerRowAlignment } from './texture/layout.js'; export function isFp16Format(format: GPUTextureFormat): boolean { switch (format) { case 'r16float': case 'rg16float': case 'rgba16float': return true; default: return false; } } export class CopyToTextureUtils extends GPUTest { // TODO(crbug.com/dawn/868): Should be possible to consolidate this along with texture checking checkCopyExternalImageResult( src: GPUBuffer, expected: ArrayBufferView, width: number, height: number, bytesPerPixel: number, isFp16: boolean ): void { const exp = new Uint8Array(expected.buffer, expected.byteOffset, expected.byteLength); const rowPitch = align(width * bytesPerPixel, kBytesPerRowAlignment); const readbackPromise = this.readGPUBufferRangeTyped(src, { type: Uint8Array, typedLength: rowPitch * height, }); this.eventualAsyncExpectation(async niceStack => { const readback = await readbackPromise; const check = this.checkBufferWithRowPitch( readback.data, exp, width, height, rowPitch, bytesPerPixel, isFp16 ); if (check !== undefined) { niceStack.message = check; this.rec.expectationFailed(niceStack); } readback.cleanup(); }); } // TODO(crbug.com/dawn/868): Should be possible to consolidate this along with texture checking checkBufferWithRowPitch( actual: Uint8Array, exp: Uint8Array, width: number, height: number, rowPitch: number, bytesPerPixel: number, isFp16: boolean ): string | undefined { const bytesPerRow = width * bytesPerPixel; // When dst format is fp16 formats, the expectation and real result always has 1 bit difference in the ending // (e.g. CC vs CD) if there needs some alpha ops (if alpha channel is not 0.0 or 1.0). Suspect it is errors when // doing encoding. We check fp16 dst texture format with 1-bit ULP tolerance. if (isFp16) { for (let y = 0; y < height; ++y) { const expRow = exp.subarray(y * bytesPerRow, bytesPerRow); const checkResult = checkElementsBetween(actual.subarray(y * rowPitch, bytesPerRow), [ i => (expRow[i] > 0 ? expRow[i] - 1 : expRow[i]), i => expRow[i] + 1, ]); if (checkResult !== undefined) return `on row ${y}: ${checkResult}`; } } else { for (let y = 0; y < height; ++y) { const checkResult = checkElementsEqual( actual.subarray(y * rowPitch, bytesPerRow), exp.subarray(y * bytesPerRow, bytesPerRow) ); if (checkResult !== undefined) return `on row ${y}: ${checkResult}`; } } return undefined; } doTestAndCheckResult( imageCopyExternalImage: GPUImageCopyExternalImage, dstTextureCopyView: GPUImageCopyTextureTagged, copySize: GPUExtent3DDict, bytesPerPixel: number, expectedData: Uint8ClampedArray, isFp16: boolean ): void { this.device.queue.copyExternalImageToTexture( imageCopyExternalImage, dstTextureCopyView, copySize ); const externalImage = imageCopyExternalImage.source; const dstTexture = dstTextureCopyView.texture; const bytesPerRow = align(externalImage.width * bytesPerPixel, kBytesPerRowAlignment); const testBuffer = this.device.createBuffer({ size: bytesPerRow * externalImage.height, usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST, }); this.trackForCleanup(testBuffer); const encoder = this.device.createCommandEncoder(); encoder.copyTextureToBuffer( { texture: dstTexture, mipLevel: 0, origin: { x: 0, y: 0, z: 0 } }, { buffer: testBuffer, bytesPerRow }, { width: externalImage.width, height: externalImage.height, depthOrArrayLayers: 1 } ); this.device.queue.submit([encoder.finish()]); this.checkCopyExternalImageResult( testBuffer, expectedData, externalImage.width, externalImage.height, bytesPerPixel, isFp16 ); } }
sarahM0/cts
src/unittests/test_query.spec.ts
<reponame>sarahM0/cts export const description = ` Tests for TestQuery `; import { makeTestGroup } from '../common/framework/test_group.js'; import { TestQueryMultiFile, TestQueryMultiTest, TestQueryMultiCase, TestQuerySingleCase, TestQuery, } from '../common/internal/query/query.js'; import { UnitTest } from './unit_test.js'; class F extends UnitTest { expectToString(q: TestQuery, exp: string) { this.expect(q.toString() === exp); } } export const g = makeTestGroup(F); g.test('constructor').fn(t => { t.shouldThrow('Error', () => new TestQueryMultiTest('suite', [], [])); t.shouldThrow('Error', () => new TestQueryMultiCase('suite', ['a'], [], {})); t.shouldThrow('Error', () => new TestQueryMultiCase('suite', [], ['c'], {})); t.shouldThrow('Error', () => new TestQueryMultiCase('suite', [], [], {})); t.shouldThrow('Error', () => new TestQuerySingleCase('suite', ['a'], [], {})); t.shouldThrow('Error', () => new TestQuerySingleCase('suite', [], ['c'], {})); t.shouldThrow('Error', () => new TestQuerySingleCase('suite', [], [], {})); }); g.test('toString').fn(t => { t.expectToString(new TestQueryMultiFile('s', []), 's:*'); t.expectToString(new TestQueryMultiFile('s', ['a']), 's:a,*'); t.expectToString(new TestQueryMultiFile('s', ['a', 'b']), 's:a,b,*'); t.expectToString(new TestQueryMultiTest('s', ['a', 'b'], []), 's:a,b:*'); t.expectToString(new TestQueryMultiTest('s', ['a', 'b'], ['c']), 's:a,b:c,*'); t.expectToString(new TestQueryMultiTest('s', ['a', 'b'], ['c', 'd']), 's:a,b:c,d,*'); t.expectToString(new TestQueryMultiCase('s', ['a', 'b'], ['c', 'd'], {}), 's:a,b:c,d:*'); t.expectToString( new TestQueryMultiCase('s', ['a', 'b'], ['c', 'd'], { x: 1 }), 's:a,b:c,d:x=1;*' ); t.expectToString( new TestQueryMultiCase('s', ['a', 'b'], ['c', 'd'], { x: 1, y: 2 }), 's:a,b:c,d:x=1;y=2;*' ); t.expectToString( new TestQuerySingleCase('s', ['a', 'b'], ['c', 'd'], { x: 1, y: 2 }), 's:a,b:c,d:x=1;y=2' ); t.expectToString(new TestQuerySingleCase('s', ['a', 'b'], ['c', 'd'], {}), 's:a,b:c,d:'); });
sarahM0/cts
src/webgpu/shader/execution/builtin/abs.spec.ts
export const description = ` Execution Tests for the 'abs' builtin function `; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; import { f32, f32Bits, i32Bits, TypeF32, TypeI32, TypeU32, u32Bits, } from '../../../util/conversion.js'; import { anyOf, kBit, kValue, run } from './builtin.js'; export const g = makeTestGroup(GPUTest); g.test('integer_builtin_functions,abs_unsigned') .uniqueId('59ff84968a839124') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` scalar case, unsigned abs: abs(e: T ) -> T T is u32 or vecN<u32>. Result is e. This is provided for symmetry with abs for signed integers. Component-wise when T is a vector. ` ) .params(u => u .combine('storageClass', ['uniform', 'storage_r', 'storage_rw'] as const) .combine('vectorize', [undefined, 2, 3, 4] as const) ) .fn(async t => { run(t, 'abs', [TypeU32], TypeU32, t.params, [ // Min and Max u32 { input: u32Bits(kBit.u32.min), expected: u32Bits(kBit.u32.min) }, { input: u32Bits(kBit.u32.max), expected: u32Bits(kBit.u32.max) }, // Powers of 2: -2^i: 0 =< i =< 31 { input: u32Bits(kBit.powTwo.to0), expected: u32Bits(kBit.powTwo.to0) }, { input: u32Bits(kBit.powTwo.to1), expected: u32Bits(kBit.powTwo.to1) }, { input: u32Bits(kBit.powTwo.to2), expected: u32Bits(kBit.powTwo.to2) }, { input: u32Bits(kBit.powTwo.to3), expected: u32Bits(kBit.powTwo.to3) }, { input: u32Bits(kBit.powTwo.to4), expected: u32Bits(kBit.powTwo.to4) }, { input: u32Bits(kBit.powTwo.to5), expected: u32Bits(kBit.powTwo.to5) }, { input: u32Bits(kBit.powTwo.to6), expected: u32Bits(kBit.powTwo.to6) }, { input: u32Bits(kBit.powTwo.to7), expected: u32Bits(kBit.powTwo.to7) }, { input: u32Bits(kBit.powTwo.to8), expected: u32Bits(kBit.powTwo.to8) }, { input: u32Bits(kBit.powTwo.to9), expected: u32Bits(kBit.powTwo.to9) }, { input: u32Bits(kBit.powTwo.to10), expected: u32Bits(kBit.powTwo.to10) }, { input: u32Bits(kBit.powTwo.to11), expected: u32Bits(kBit.powTwo.to11) }, { input: u32Bits(kBit.powTwo.to12), expected: u32Bits(kBit.powTwo.to12) }, { input: u32Bits(kBit.powTwo.to13), expected: u32Bits(kBit.powTwo.to13) }, { input: u32Bits(kBit.powTwo.to14), expected: u32Bits(kBit.powTwo.to14) }, { input: u32Bits(kBit.powTwo.to15), expected: u32Bits(kBit.powTwo.to15) }, { input: u32Bits(kBit.powTwo.to16), expected: u32Bits(kBit.powTwo.to16) }, { input: u32Bits(kBit.powTwo.to17), expected: u32Bits(kBit.powTwo.to17) }, { input: u32Bits(kBit.powTwo.to18), expected: u32Bits(kBit.powTwo.to18) }, { input: u32Bits(kBit.powTwo.to19), expected: u32Bits(kBit.powTwo.to19) }, { input: u32Bits(kBit.powTwo.to20), expected: u32Bits(kBit.powTwo.to20) }, { input: u32Bits(kBit.powTwo.to21), expected: u32Bits(kBit.powTwo.to21) }, { input: u32Bits(kBit.powTwo.to22), expected: u32Bits(kBit.powTwo.to22) }, { input: u32Bits(kBit.powTwo.to23), expected: u32Bits(kBit.powTwo.to23) }, { input: u32Bits(kBit.powTwo.to24), expected: u32Bits(kBit.powTwo.to24) }, { input: u32Bits(kBit.powTwo.to25), expected: u32Bits(kBit.powTwo.to25) }, { input: u32Bits(kBit.powTwo.to26), expected: u32Bits(kBit.powTwo.to26) }, { input: u32Bits(kBit.powTwo.to27), expected: u32Bits(kBit.powTwo.to27) }, { input: u32Bits(kBit.powTwo.to28), expected: u32Bits(kBit.powTwo.to28) }, { input: u32Bits(kBit.powTwo.to29), expected: u32Bits(kBit.powTwo.to29) }, { input: u32Bits(kBit.powTwo.to30), expected: u32Bits(kBit.powTwo.to30) }, { input: u32Bits(kBit.powTwo.to31), expected: u32Bits(kBit.powTwo.to31) }, ]); }); g.test('integer_builtin_functions,abs_signed') .uniqueId('d8fc581d17db6ae8') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` signed abs: abs(e: T ) -> T T is i32 or vecN<i32>. The result is the absolute value of e. Component-wise when T is a vector. If e evaluates to the largest negative value, then the result is e. (GLSLstd450SAbs) ` ) .params(u => u .combine('storageClass', ['uniform', 'storage_r', 'storage_rw'] as const) .combine('vectorize', [undefined, 2, 3, 4] as const) ) .fn(async t => { run(t, 'abs', [TypeI32], TypeI32, t.params, [ // Min and max i32 // If e evaluates to the largest negative value, then the result is e. { input: i32Bits(kBit.i32.negative.min), expected: i32Bits(kBit.i32.negative.min) }, { input: i32Bits(kBit.i32.negative.max), expected: i32Bits(kBit.i32.positive.min) }, { input: i32Bits(kBit.i32.positive.max), expected: i32Bits(kBit.i32.positive.max) }, { input: i32Bits(kBit.i32.positive.min), expected: i32Bits(kBit.i32.positive.min) }, // input: -1 * pow(2, n), n = {-31, ..., 0 }, expected: pow(2, n), n = {-31, ..., 0}] { input: i32Bits(kBit.negPowTwo.to0), expected: i32Bits(kBit.powTwo.to0) }, { input: i32Bits(kBit.negPowTwo.to1), expected: i32Bits(kBit.powTwo.to1) }, { input: i32Bits(kBit.negPowTwo.to2), expected: i32Bits(kBit.powTwo.to2) }, { input: i32Bits(kBit.negPowTwo.to3), expected: i32Bits(kBit.powTwo.to3) }, { input: i32Bits(kBit.negPowTwo.to4), expected: i32Bits(kBit.powTwo.to4) }, { input: i32Bits(kBit.negPowTwo.to5), expected: i32Bits(kBit.powTwo.to5) }, { input: i32Bits(kBit.negPowTwo.to6), expected: i32Bits(kBit.powTwo.to6) }, { input: i32Bits(kBit.negPowTwo.to7), expected: i32Bits(kBit.powTwo.to7) }, { input: i32Bits(kBit.negPowTwo.to8), expected: i32Bits(kBit.powTwo.to8) }, { input: i32Bits(kBit.negPowTwo.to9), expected: i32Bits(kBit.powTwo.to9) }, { input: i32Bits(kBit.negPowTwo.to10), expected: i32Bits(kBit.powTwo.to10) }, { input: i32Bits(kBit.negPowTwo.to11), expected: i32Bits(kBit.powTwo.to11) }, { input: i32Bits(kBit.negPowTwo.to12), expected: i32Bits(kBit.powTwo.to12) }, { input: i32Bits(kBit.negPowTwo.to13), expected: i32Bits(kBit.powTwo.to13) }, { input: i32Bits(kBit.negPowTwo.to14), expected: i32Bits(kBit.powTwo.to14) }, { input: i32Bits(kBit.negPowTwo.to15), expected: i32Bits(kBit.powTwo.to15) }, { input: i32Bits(kBit.negPowTwo.to16), expected: i32Bits(kBit.powTwo.to16) }, { input: i32Bits(kBit.negPowTwo.to17), expected: i32Bits(kBit.powTwo.to17) }, { input: i32Bits(kBit.negPowTwo.to18), expected: i32Bits(kBit.powTwo.to18) }, { input: i32Bits(kBit.negPowTwo.to19), expected: i32Bits(kBit.powTwo.to19) }, { input: i32Bits(kBit.negPowTwo.to20), expected: i32Bits(kBit.powTwo.to20) }, { input: i32Bits(kBit.negPowTwo.to21), expected: i32Bits(kBit.powTwo.to21) }, { input: i32Bits(kBit.negPowTwo.to22), expected: i32Bits(kBit.powTwo.to22) }, { input: i32Bits(kBit.negPowTwo.to23), expected: i32Bits(kBit.powTwo.to23) }, { input: i32Bits(kBit.negPowTwo.to24), expected: i32Bits(kBit.powTwo.to24) }, { input: i32Bits(kBit.negPowTwo.to25), expected: i32Bits(kBit.powTwo.to25) }, { input: i32Bits(kBit.negPowTwo.to26), expected: i32Bits(kBit.powTwo.to26) }, { input: i32Bits(kBit.negPowTwo.to27), expected: i32Bits(kBit.powTwo.to27) }, { input: i32Bits(kBit.negPowTwo.to28), expected: i32Bits(kBit.powTwo.to28) }, { input: i32Bits(kBit.negPowTwo.to29), expected: i32Bits(kBit.powTwo.to29) }, { input: i32Bits(kBit.negPowTwo.to30), expected: i32Bits(kBit.powTwo.to30) }, { input: i32Bits(kBit.negPowTwo.to31), expected: i32Bits(kBit.powTwo.to31) }, ]); }); g.test('float_builtin_functions,abs_float') .uniqueId('2c1782b6a8dec8cb') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#float-builtin-functions') .desc( ` float abs: abs(e: T ) -> T T is f32 or vecN<f32> Returns the absolute value of e (e.g. e with a positive sign bit). Component-wise when T is a vector. (GLSLstd450Fabs) ` ) .params(u => u .combine('storageClass', ['uniform', 'storage_r', 'storage_rw'] as const) .combine('vectorize', [undefined, 2, 3, 4] as const) ) .fn(async t => { run(t, 'abs', [TypeF32], TypeF32, t.params, [ // Min and Max f32 { input: f32Bits(kBit.f32.negative.max), expected: f32Bits(0x0080_0000) }, { input: f32Bits(kBit.f32.negative.min), expected: f32Bits(0x7f7f_ffff) }, { input: f32Bits(kBit.f32.positive.min), expected: f32Bits(kBit.f32.positive.min) }, { input: f32Bits(kBit.f32.positive.max), expected: f32Bits(kBit.f32.positive.max) }, // Subnormal f32 // TODO(sarahM0): Check if this is needed (or if it has to fail). If yes add other values. { input: f32Bits(kBit.f32.subnormal.positive.max), expected: anyOf(f32Bits(kBit.f32.subnormal.positive.max), f32(0)), }, { input: f32Bits(kBit.f32.subnormal.positive.min), expected: anyOf(f32Bits(kBit.f32.subnormal.positive.min), f32(0)), }, // Infinity f32 { input: f32Bits(kBit.f32.infinity.negative), expected: f32Bits(kBit.f32.infinity.positive) }, { input: f32Bits(kBit.f32.infinity.positive), expected: f32Bits(kBit.f32.infinity.positive) }, // Powers of 2.0: -2.0^i: -1 >= i >= -31 { input: f32(kValue.negPowTwo.toMinus1), expected: f32(kValue.powTwo.toMinus1) }, { input: f32(kValue.negPowTwo.toMinus2), expected: f32(kValue.powTwo.toMinus2) }, { input: f32(kValue.negPowTwo.toMinus3), expected: f32(kValue.powTwo.toMinus3) }, { input: f32(kValue.negPowTwo.toMinus4), expected: f32(kValue.powTwo.toMinus4) }, { input: f32(kValue.negPowTwo.toMinus5), expected: f32(kValue.powTwo.toMinus5) }, { input: f32(kValue.negPowTwo.toMinus6), expected: f32(kValue.powTwo.toMinus6) }, { input: f32(kValue.negPowTwo.toMinus7), expected: f32(kValue.powTwo.toMinus7) }, { input: f32(kValue.negPowTwo.toMinus8), expected: f32(kValue.powTwo.toMinus8) }, { input: f32(kValue.negPowTwo.toMinus9), expected: f32(kValue.powTwo.toMinus9) }, { input: f32(kValue.negPowTwo.toMinus10), expected: f32(kValue.powTwo.toMinus10) }, { input: f32(kValue.negPowTwo.toMinus11), expected: f32(kValue.powTwo.toMinus11) }, { input: f32(kValue.negPowTwo.toMinus12), expected: f32(kValue.powTwo.toMinus12) }, { input: f32(kValue.negPowTwo.toMinus13), expected: f32(kValue.powTwo.toMinus13) }, { input: f32(kValue.negPowTwo.toMinus14), expected: f32(kValue.powTwo.toMinus14) }, { input: f32(kValue.negPowTwo.toMinus15), expected: f32(kValue.powTwo.toMinus15) }, { input: f32(kValue.negPowTwo.toMinus16), expected: f32(kValue.powTwo.toMinus16) }, { input: f32(kValue.negPowTwo.toMinus17), expected: f32(kValue.powTwo.toMinus17) }, { input: f32(kValue.negPowTwo.toMinus18), expected: f32(kValue.powTwo.toMinus18) }, { input: f32(kValue.negPowTwo.toMinus19), expected: f32(kValue.powTwo.toMinus19) }, { input: f32(kValue.negPowTwo.toMinus20), expected: f32(kValue.powTwo.toMinus20) }, { input: f32(kValue.negPowTwo.toMinus21), expected: f32(kValue.powTwo.toMinus21) }, { input: f32(kValue.negPowTwo.toMinus22), expected: f32(kValue.powTwo.toMinus22) }, { input: f32(kValue.negPowTwo.toMinus23), expected: f32(kValue.powTwo.toMinus23) }, { input: f32(kValue.negPowTwo.toMinus24), expected: f32(kValue.powTwo.toMinus24) }, { input: f32(kValue.negPowTwo.toMinus25), expected: f32(kValue.powTwo.toMinus25) }, { input: f32(kValue.negPowTwo.toMinus26), expected: f32(kValue.powTwo.toMinus26) }, { input: f32(kValue.negPowTwo.toMinus27), expected: f32(kValue.powTwo.toMinus27) }, { input: f32(kValue.negPowTwo.toMinus28), expected: f32(kValue.powTwo.toMinus28) }, { input: f32(kValue.negPowTwo.toMinus29), expected: f32(kValue.powTwo.toMinus29) }, { input: f32(kValue.negPowTwo.toMinus30), expected: f32(kValue.powTwo.toMinus30) }, { input: f32(kValue.negPowTwo.toMinus31), expected: f32(kValue.powTwo.toMinus31) }, // Powers of 2.0: -2.0^i: 1 <= i <= 31 { input: f32(kValue.negPowTwo.to1), expected: f32(kValue.powTwo.to1) }, { input: f32(kValue.negPowTwo.to2), expected: f32(kValue.powTwo.to2) }, { input: f32(kValue.negPowTwo.to3), expected: f32(kValue.powTwo.to3) }, { input: f32(kValue.negPowTwo.to4), expected: f32(kValue.powTwo.to4) }, { input: f32(kValue.negPowTwo.to5), expected: f32(kValue.powTwo.to5) }, { input: f32(kValue.negPowTwo.to6), expected: f32(kValue.powTwo.to6) }, { input: f32(kValue.negPowTwo.to7), expected: f32(kValue.powTwo.to7) }, { input: f32(kValue.negPowTwo.to8), expected: f32(kValue.powTwo.to8) }, { input: f32(kValue.negPowTwo.to9), expected: f32(kValue.powTwo.to9) }, { input: f32(kValue.negPowTwo.to10), expected: f32(kValue.powTwo.to10) }, { input: f32(kValue.negPowTwo.to11), expected: f32(kValue.powTwo.to11) }, { input: f32(kValue.negPowTwo.to12), expected: f32(kValue.powTwo.to12) }, { input: f32(kValue.negPowTwo.to13), expected: f32(kValue.powTwo.to13) }, { input: f32(kValue.negPowTwo.to14), expected: f32(kValue.powTwo.to14) }, { input: f32(kValue.negPowTwo.to15), expected: f32(kValue.powTwo.to15) }, { input: f32(kValue.negPowTwo.to16), expected: f32(kValue.powTwo.to16) }, { input: f32(kValue.negPowTwo.to17), expected: f32(kValue.powTwo.to17) }, { input: f32(kValue.negPowTwo.to18), expected: f32(kValue.powTwo.to18) }, { input: f32(kValue.negPowTwo.to19), expected: f32(kValue.powTwo.to19) }, { input: f32(kValue.negPowTwo.to20), expected: f32(kValue.powTwo.to20) }, { input: f32(kValue.negPowTwo.to21), expected: f32(kValue.powTwo.to21) }, { input: f32(kValue.negPowTwo.to22), expected: f32(kValue.powTwo.to22) }, { input: f32(kValue.negPowTwo.to23), expected: f32(kValue.powTwo.to23) }, { input: f32(kValue.negPowTwo.to24), expected: f32(kValue.powTwo.to24) }, { input: f32(kValue.negPowTwo.to25), expected: f32(kValue.powTwo.to25) }, { input: f32(kValue.negPowTwo.to26), expected: f32(kValue.powTwo.to26) }, { input: f32(kValue.negPowTwo.to27), expected: f32(kValue.powTwo.to27) }, { input: f32(kValue.negPowTwo.to28), expected: f32(kValue.powTwo.to28) }, { input: f32(kValue.negPowTwo.to29), expected: f32(kValue.powTwo.to29) }, { input: f32(kValue.negPowTwo.to30), expected: f32(kValue.powTwo.to30) }, { input: f32(kValue.negPowTwo.to31), expected: f32(kValue.powTwo.to31) }, ]); });
sarahM0/cts
src/unittests/maths.spec.ts
export const description = ` Util math unit tests. `; import { makeTestGroup } from '../common/framework/test_group.js'; import { kBit } from '../webgpu/shader/execution/builtin/builtin.js'; import { f32, f32Bits, Scalar } from '../webgpu/util/conversion.js'; import { diffULP, nextAfter } from '../webgpu/util/math.js'; import { UnitTest } from './unit_test.js'; export const g = makeTestGroup(UnitTest); interface DiffULPCase { a: number; b: number; ulp: number; } function hexToF32(hex: number): number { return new Float32Array(new Uint32Array([hex]).buffer)[0]; } g.test('test,math,diffULP') .paramsSimple<DiffULPCase>([ { a: 0, b: 0, ulp: 0 }, { a: 1, b: 2, ulp: 2 ** 23 }, // Single exponent step { a: 2, b: 1, ulp: 2 ** 23 }, // Single exponent step { a: 2, b: 4, ulp: 2 ** 23 }, // Single exponent step { a: 4, b: 2, ulp: 2 ** 23 }, // Single exponent step { a: -1, b: -2, ulp: 2 ** 23 }, // Single exponent step { a: -2, b: -1, ulp: 2 ** 23 }, // Single exponent step { a: -2, b: -4, ulp: 2 ** 23 }, // Single exponent step { a: -4, b: -2, ulp: 2 ** 23 }, // Single exponent step { a: 1, b: 4, ulp: 2 ** 24 }, // Double exponent step { a: 4, b: 1, ulp: 2 ** 24 }, // Double exponent step { a: -1, b: -4, ulp: 2 ** 24 }, // Double exponent step { a: -4, b: -1, ulp: 2 ** 24 }, // Double exponent step { a: hexToF32(0x00800000), b: hexToF32(0x00800001), ulp: 1 }, // Single mantissa step { a: hexToF32(0x00800001), b: hexToF32(0x00800000), ulp: 1 }, // Single mantissa step { a: hexToF32(0x03800000), b: hexToF32(0x03800001), ulp: 1 }, // Single mantissa step { a: hexToF32(0x03800001), b: hexToF32(0x03800000), ulp: 1 }, // Single mantissa step { a: -hexToF32(0x00800000), b: -hexToF32(0x00800001), ulp: 1 }, // Single mantissa step { a: -hexToF32(0x00800001), b: -hexToF32(0x00800000), ulp: 1 }, // Single mantissa step { a: -hexToF32(0x03800000), b: -hexToF32(0x03800001), ulp: 1 }, // Single mantissa step { a: -hexToF32(0x03800001), b: -hexToF32(0x03800000), ulp: 1 }, // Single mantissa step { a: hexToF32(0x00800000), b: hexToF32(0x00800002), ulp: 2 }, // Double mantissa step { a: hexToF32(0x00800002), b: hexToF32(0x00800000), ulp: 2 }, // Double mantissa step { a: hexToF32(0x03800000), b: hexToF32(0x03800002), ulp: 2 }, // Double mantissa step { a: hexToF32(0x03800002), b: hexToF32(0x03800000), ulp: 2 }, // Double mantissa step { a: -hexToF32(0x00800000), b: -hexToF32(0x00800002), ulp: 2 }, // Double mantissa step { a: -hexToF32(0x00800002), b: -hexToF32(0x00800000), ulp: 2 }, // Double mantissa step { a: -hexToF32(0x03800000), b: -hexToF32(0x03800002), ulp: 2 }, // Double mantissa step { a: -hexToF32(0x03800002), b: -hexToF32(0x03800000), ulp: 2 }, // Double mantissa step { a: hexToF32(0x00800000), b: 0, ulp: 1 }, // Normals near 0 { a: 0, b: hexToF32(0x00800000), ulp: 1 }, // Normals near 0 { a: -hexToF32(0x00800000), b: 0, ulp: 1 }, // Normals near 0 { a: 0, b: -hexToF32(0x00800000), ulp: 1 }, // Normals near 0 { a: hexToF32(0x00800000), b: -hexToF32(0x00800000), ulp: 2 }, // Normals around 0 { a: -hexToF32(0x00800000), b: hexToF32(0x00800000), ulp: 2 }, // Normals around 0 { a: hexToF32(0x00000001), b: 0, ulp: 0 }, // Subnormals near 0 { a: 0, b: hexToF32(0x00000001), ulp: 0 }, // Subnormals near 0 { a: -hexToF32(0x00000001), b: 0, ulp: 0 }, // Subnormals near 0 { a: 0, b: -hexToF32(0x00000001), ulp: 0 }, // Subnormals near 0 { a: hexToF32(0x00000001), b: -hexToF32(0x00000001), ulp: 0 }, // Subnormals near 0 { a: -hexToF32(0x00000001), b: hexToF32(0x00000001), ulp: 0 }, // Subnormals near 0 { a: hexToF32(0x00000001), b: hexToF32(0x00800000), ulp: 1 }, // Normal/Subnormal boundary { a: hexToF32(0x00800000), b: hexToF32(0x00000001), ulp: 1 }, // Normal/Subnormal boundary { a: -hexToF32(0x00000001), b: -hexToF32(0x00800000), ulp: 1 }, // Normal/Subnormal boundary { a: -hexToF32(0x00800000), b: -hexToF32(0x00000001), ulp: 1 }, // Normal/Subnormal boundary { a: hexToF32(0x00800001), b: hexToF32(0x00000000), ulp: 2 }, // Just-above-Normal/Subnormal boundary { a: hexToF32(0x00800001), b: hexToF32(0x00000001), ulp: 2 }, // Just-above-Normal/Subnormal boundary { a: hexToF32(0x00800005), b: hexToF32(0x00000001), ulp: 6 }, // Just-above-Normal/Subnormal boundary { a: hexToF32(0x00800005), b: hexToF32(0x00000111), ulp: 6 }, // Just-above-Normal/Subnormal boundary ]) .fn(t => { const a = t.params.a; const b = t.params.b; const got = diffULP(a, b); const expect = t.params.ulp; t.expect(got === expect, `diffULP(${a}, ${b}) returned ${got}. Expected ${expect}`); }); interface nextAfterCase { val: number; dir: boolean; result: Scalar; } g.test('test,math,nextAfter') .paramsSimple<nextAfterCase>([ // Edge Cases { val: NaN, dir: true, result: f32Bits(0x7fffffff) }, { val: NaN, dir: false, result: f32Bits(0x7fffffff) }, { val: Number.POSITIVE_INFINITY, dir: true, result: f32Bits(kBit.f32.infinity.positive) }, { val: Number.POSITIVE_INFINITY, dir: false, result: f32Bits(kBit.f32.infinity.positive) }, { val: Number.NEGATIVE_INFINITY, dir: true, result: f32Bits(kBit.f32.infinity.negative) }, { val: Number.NEGATIVE_INFINITY, dir: false, result: f32Bits(kBit.f32.infinity.negative) }, // Zeroes { val: -0, dir: true, result: f32Bits(kBit.f32.subnormal.positive.min) }, { val: +0, dir: false, result: f32Bits(kBit.f32.subnormal.negative.max) }, // Skipping these, since the testing framework does not distinguish between // +0 and -0, so throws an error about duplicate cases. // { val: +0, dir: true, result: f32Bits(kBit.f32.subnormal.positive.min) }, // { val: -0, dir: false, result: f32Bits(kBit.f32.subnormal.negative.max) }, // Subnormals { val: hexToF32(kBit.f32.subnormal.positive.min), dir: true, result: f32Bits(0x00000002) }, { val: hexToF32(kBit.f32.subnormal.positive.min), dir: false, result: f32(0) }, // prettier-ignore { val: hexToF32(kBit.f32.subnormal.positive.max), dir: true, result: f32Bits(kBit.f32.positive.min) }, { val: hexToF32(kBit.f32.subnormal.positive.max), dir: false, result: f32Bits(0x007ffffe) }, { val: hexToF32(kBit.f32.subnormal.negative.min), dir: true, result: f32Bits(0x807ffffe) }, // prettier-ignore { val: hexToF32(kBit.f32.subnormal.negative.min), dir: false, result: f32Bits(kBit.f32.negative.max) }, { val: hexToF32(kBit.f32.subnormal.negative.max), dir: true, result: f32(0) }, { val: hexToF32(kBit.f32.subnormal.negative.max), dir: false, result: f32Bits(0x80000002) }, // Normals // prettier-ignore { val: hexToF32(kBit.f32.positive.max), dir: true, result: f32Bits(kBit.f32.infinity.positive) }, { val: hexToF32(kBit.f32.positive.max), dir: false, result: f32Bits(0x7f7ffffe) }, { val: hexToF32(kBit.f32.positive.min), dir: true, result: f32Bits(0x00800001) }, // prettier-ignore { val: hexToF32(kBit.f32.positive.min), dir: false, result: f32Bits(kBit.f32.subnormal.positive.max) }, // prettier-ignore { val: hexToF32(kBit.f32.negative.max), dir: true, result: f32Bits(kBit.f32.subnormal.negative.min) }, { val: hexToF32(kBit.f32.negative.max), dir: false, result: f32Bits(0x80800001) }, { val: hexToF32(kBit.f32.negative.min), dir: true, result: f32Bits(0xff7ffffe) }, // prettier-ignore { val: hexToF32(kBit.f32.negative.min), dir: false, result: f32Bits(kBit.f32.infinity.negative) }, { val: hexToF32(0x03800000), dir: true, result: f32Bits(0x03800001) }, { val: hexToF32(0x03800000), dir: false, result: f32Bits(0x037fffff) }, { val: hexToF32(0x83800000), dir: true, result: f32Bits(0x837fffff) }, { val: hexToF32(0x83800000), dir: false, result: f32Bits(0x83800001) }, ]) .fn(t => { const val = t.params.val; const dir = t.params.dir; const expect = t.params.result; const expect_type = typeof expect; const got = nextAfter(val, dir); const got_type = typeof got; t.expect( got.value === expect.value || (Number.isNaN(got.value) && Number.isNaN(expect.value)), `nextAfter(${val}, ${dir}) returned ${got} (${got_type}). Expected ${expect} (${expect_type})` ); });
sarahM0/cts
src/common/internal/query/parseQuery.ts
import { assert } from '../../util/util.js'; import { TestParamsRW, JSONWithUndefined, badParamValueChars, paramKeyIsPublic, } from '../params_utils.js'; import { parseParamValue } from './json_param_value.js'; import { TestQuery, TestQueryMultiFile, TestQueryMultiTest, TestQueryMultiCase, TestQuerySingleCase, } from './query.js'; import { kBigSeparator, kWildcard, kPathSeparator, kParamSeparator } from './separators.js'; import { validQueryPart } from './validQueryPart.js'; export function parseQuery(s: string): TestQuery { try { return parseQueryImpl(s); } catch (ex) { ex.message += '\n on: ' + s; throw ex; } } function parseQueryImpl(s: string): TestQuery { // Undo encodeURIComponentSelectively s = decodeURIComponent(s); // bigParts are: suite, file, test, params (note kBigSeparator could appear in params) let suite: string; let fileString: string | undefined; let testString: string | undefined; let paramsString: string | undefined; { const i1 = s.indexOf(kBigSeparator); assert(i1 !== -1, `query string must have at least one ${kBigSeparator}`); suite = s.substring(0, i1); const i2 = s.indexOf(kBigSeparator, i1 + 1); if (i2 === -1) { fileString = s.substring(i1 + 1); } else { fileString = s.substring(i1 + 1, i2); const i3 = s.indexOf(kBigSeparator, i2 + 1); if (i3 === -1) { testString = s.substring(i2 + 1); } else { testString = s.substring(i2 + 1, i3); paramsString = s.substring(i3 + 1); } } } const { parts: file, wildcard: filePathHasWildcard } = parseBigPart(fileString, kPathSeparator); if (testString === undefined) { // Query is file-level assert( filePathHasWildcard, `File-level query without wildcard ${kWildcard}. Did you want a file-level query \ (append ${kPathSeparator}${kWildcard}) or test-level query (append ${kBigSeparator}${kWildcard})?` ); return new TestQueryMultiFile(suite, file); } assert(!filePathHasWildcard, `Wildcard ${kWildcard} must be at the end of the query string`); const { parts: test, wildcard: testPathHasWildcard } = parseBigPart(testString, kPathSeparator); if (paramsString === undefined) { // Query is test-level assert( testPathHasWildcard, `Test-level query without wildcard ${kWildcard}; did you want a test-level query \ (append ${kPathSeparator}${kWildcard}) or case-level query (append ${kBigSeparator}${kWildcard})?` ); assert(file.length > 0, 'File part of test-level query was empty (::)'); return new TestQueryMultiTest(suite, file, test); } // Query is case-level assert(!testPathHasWildcard, `Wildcard ${kWildcard} must be at the end of the query string`); const { parts: paramsParts, wildcard: paramsHasWildcard } = parseBigPart( paramsString, kParamSeparator ); assert(test.length > 0, 'Test part of case-level query was empty (::)'); const params: TestParamsRW = {}; for (const paramPart of paramsParts) { const [k, v] = parseSingleParam(paramPart); assert(validQueryPart.test(k), 'param key names must match ' + validQueryPart); params[k] = v; } if (paramsHasWildcard) { return new TestQueryMultiCase(suite, file, test, params); } else { return new TestQuerySingleCase(suite, file, test, params); } } // webgpu:a,b,* or webgpu:a,b,c:* const kExampleQueries = `\ webgpu${kBigSeparator}a${kPathSeparator}b${kPathSeparator}${kWildcard} or \ webgpu${kBigSeparator}a${kPathSeparator}b${kPathSeparator}c${kBigSeparator}${kWildcard}`; function parseBigPart( s: string, separator: typeof kParamSeparator | typeof kPathSeparator ): { parts: string[]; wildcard: boolean } { if (s === '') { return { parts: [], wildcard: false }; } const parts = s.split(separator); let endsWithWildcard = false; for (const [i, part] of parts.entries()) { if (i === parts.length - 1) { endsWithWildcard = part === kWildcard; } assert( part.indexOf(kWildcard) === -1 || endsWithWildcard, `Wildcard ${kWildcard} must be complete last part of a path (e.g. ${kExampleQueries})` ); } if (endsWithWildcard) { // Remove the last element of the array (which is just the wildcard). parts.length = parts.length - 1; } return { parts, wildcard: endsWithWildcard }; } function parseSingleParam(paramSubstring: string): [string, JSONWithUndefined] { assert(paramSubstring !== '', 'Param in a query must not be blank (is there a trailing comma?)'); const i = paramSubstring.indexOf('='); assert(i !== -1, 'Param in a query must be of form key=value'); const k = paramSubstring.substring(0, i); assert(paramKeyIsPublic(k), 'Param in a query must not be private (start with _)'); const v = paramSubstring.substring(i + 1); return [k, parseSingleParamValue(v)]; } function parseSingleParamValue(s: string): JSONWithUndefined { assert( !badParamValueChars.test(s), `param value must not match ${badParamValueChars} - was ${s}` ); return parseParamValue(s); }
sarahM0/cts
src/webgpu/shader/execution/shader_io/compute_builtins.spec.ts
<reponame>sarahM0/cts export const description = `Test compute shader builtin variables`; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { iterRange } from '../../../../common/util/util.js'; import { GPUTest } from '../../../gpu_test.js'; export const g = makeTestGroup(GPUTest); // Test that the values for each input builtin are correct. g.test('inputs') .desc(`Test compute shader builtin inputs values`) .params(u => u .combine('method', ['param', 'struct', 'mixed'] as const) .combine('dispatch', ['direct', 'indirect'] as const) .combineWithParams([ { groupSize: { x: 1, y: 1, z: 1 }, numGroups: { x: 1, y: 1, z: 1 }, }, { groupSize: { x: 8, y: 4, z: 2 }, numGroups: { x: 1, y: 1, z: 1 }, }, { groupSize: { x: 1, y: 1, z: 1 }, numGroups: { x: 8, y: 4, z: 2 }, }, { groupSize: { x: 3, y: 7, z: 5 }, numGroups: { x: 13, y: 9, z: 11 }, }, ] as const) .beginSubcases() ) .fn(async t => { const invocationsPerGroup = t.params.groupSize.x * t.params.groupSize.y * t.params.groupSize.z; const totalInvocations = invocationsPerGroup * t.params.numGroups.x * t.params.numGroups.y * t.params.numGroups.z; // Generate the structures, parameters, and builtin expressions used in the shader. let params = ''; let structures = ''; let local_id = ''; let local_index = ''; let global_id = ''; let group_id = ''; let num_groups = ''; switch (t.params.method) { case 'param': params = ` [[builtin(local_invocation_id)]] local_id : vec3<u32>, [[builtin(local_invocation_index)]] local_index : u32, [[builtin(global_invocation_id)]] global_id : vec3<u32>, [[builtin(workgroup_id)]] group_id : vec3<u32>, [[builtin(num_workgroups)]] num_groups : vec3<u32>, `; local_id = 'local_id'; local_index = 'local_index'; global_id = 'global_id'; group_id = 'group_id'; num_groups = 'num_groups'; break; case 'struct': structures = `struct Inputs { [[builtin(local_invocation_id)]] local_id : vec3<u32>; [[builtin(local_invocation_index)]] local_index : u32; [[builtin(global_invocation_id)]] global_id : vec3<u32>; [[builtin(workgroup_id)]] group_id : vec3<u32>; [[builtin(num_workgroups)]] num_groups : vec3<u32>; };`; params = `inputs : Inputs`; local_id = 'inputs.local_id'; local_index = 'inputs.local_index'; global_id = 'inputs.global_id'; group_id = 'inputs.group_id'; num_groups = 'inputs.num_groups'; break; case 'mixed': structures = `struct InputsA { [[builtin(local_invocation_index)]] local_index : u32; [[builtin(global_invocation_id)]] global_id : vec3<u32>; }; struct InputsB { [[builtin(workgroup_id)]] group_id : vec3<u32>; };`; params = `[[builtin(local_invocation_id)]] local_id : vec3<u32>, inputsA : InputsA, inputsB : InputsB, [[builtin(num_workgroups)]] num_groups : vec3<u32>,`; local_id = 'local_id'; local_index = 'inputsA.local_index'; global_id = 'inputsA.global_id'; group_id = 'inputsB.group_id'; num_groups = 'num_groups'; break; } // WGSL shader that stores every builtin value to a buffer, for every invocation in the grid. const wgsl = ` [[block]] struct S { data : array<u32>; }; [[block]] struct V { data : array<vec3<u32>>; }; [[group(0), binding(0)]] var<storage, write> local_id_out : V; [[group(0), binding(1)]] var<storage, write> local_index_out : S; [[group(0), binding(2)]] var<storage, write> global_id_out : V; [[group(0), binding(3)]] var<storage, write> group_id_out : V; [[group(0), binding(4)]] var<storage, write> num_groups_out : V; ${structures} let group_width = ${t.params.groupSize.x}u; let group_height = ${t.params.groupSize.y}u; let group_depth = ${t.params.groupSize.z}u; [[stage(compute), workgroup_size(group_width, group_height, group_depth)]] fn main( ${params} ) { let group_index = ((${group_id}.z * ${num_groups}.y) + ${group_id}.y) * ${num_groups}.x + ${group_id}.x; let global_index = group_index * ${invocationsPerGroup}u + ${local_index}; local_id_out.data[global_index] = ${local_id}; local_index_out.data[global_index] = ${local_index}; global_id_out.data[global_index] = ${global_id}; group_id_out.data[global_index] = ${group_id}; num_groups_out.data[global_index] = ${num_groups}; } `; const pipeline = t.device.createComputePipeline({ compute: { module: t.device.createShaderModule({ code: wgsl, }), entryPoint: 'main', }, }); // Helper to create a `size`-byte buffer with binding number `binding`. function createBuffer(size: number, binding: number) { const buffer = t.device.createBuffer({ size, usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC, }); t.trackForCleanup(buffer); bindGroupEntries.push({ binding, resource: { buffer, }, }); return buffer; } // Create the output buffers. const bindGroupEntries: GPUBindGroupEntry[] = []; const localIdBuffer = createBuffer(totalInvocations * 16, 0); const localIndexBuffer = createBuffer(totalInvocations * 4, 1); const globalIdBuffer = createBuffer(totalInvocations * 16, 2); const groupIdBuffer = createBuffer(totalInvocations * 16, 3); const numGroupsBuffer = createBuffer(totalInvocations * 16, 4); const bindGroup = t.device.createBindGroup({ layout: pipeline.getBindGroupLayout(0), entries: bindGroupEntries, }); // Run the shader. const encoder = t.device.createCommandEncoder(); const pass = encoder.beginComputePass(); pass.setPipeline(pipeline); pass.setBindGroup(0, bindGroup); switch (t.params.dispatch) { case 'direct': pass.dispatch(t.params.numGroups.x, t.params.numGroups.y, t.params.numGroups.z); break; case 'indirect': { const dispatchBuffer = t.device.createBuffer({ size: 3 * Uint32Array.BYTES_PER_ELEMENT, usage: GPUBufferUsage.INDIRECT, mappedAtCreation: true, }); t.trackForCleanup(dispatchBuffer); const dispatchData = new Uint32Array(dispatchBuffer.getMappedRange()); dispatchData[0] = t.params.numGroups.x; dispatchData[1] = t.params.numGroups.y; dispatchData[2] = t.params.numGroups.z; dispatchBuffer.unmap(); pass.dispatchIndirect(dispatchBuffer, 0); break; } } pass.endPass(); t.queue.submit([encoder.finish()]); type vec3 = { x: number; y: number; z: number }; // Helper to check that the vec3<u32> value at each index of the provided `output` buffer // matches the expected value for that invocation, as generated by the `getBuiltinValue` // function. The `name` parameter is the builtin name, used for error messages. const checkEachIndex = ( output: Uint32Array, name: string, getBuiltinValue: (groupId: vec3, localId: vec3) => vec3 ) => { // Loop over workgroups. for (let gz = 0; gz < t.params.numGroups.z; gz++) { for (let gy = 0; gy < t.params.numGroups.y; gy++) { for (let gx = 0; gx < t.params.numGroups.x; gx++) { // Loop over invocations within a group. for (let lz = 0; lz < t.params.groupSize.z; lz++) { for (let ly = 0; ly < t.params.groupSize.y; ly++) { for (let lx = 0; lx < t.params.groupSize.x; lx++) { const groupIndex = (gz * t.params.numGroups.y + gy) * t.params.numGroups.x + gx; const localIndex = (lz * t.params.groupSize.y + ly) * t.params.groupSize.x + lx; const globalIndex = groupIndex * invocationsPerGroup + localIndex; const expected = getBuiltinValue( { x: gx, y: gy, z: gz }, { x: lx, y: ly, z: lz } ); if (output[globalIndex * 4 + 0] !== expected.x) { return new Error( `${name}.x failed at group(${gx},${gy},${gz}) local(${lx},${ly},${lz}))\n` + ` expected: ${expected.x}\n` + ` got: ${output[globalIndex * 4 + 0]}` ); } if (output[globalIndex * 4 + 1] !== expected.y) { return new Error( `${name}.y failed at group(${gx},${gy},${gz}) local(${lx},${ly},${lz}))\n` + ` expected: ${expected.y}\n` + ` got: ${output[globalIndex * 4 + 1]}` ); } if (output[globalIndex * 4 + 2] !== expected.z) { return new Error( `${name}.z failed at group(${gx},${gy},${gz}) local(${lx},${ly},${lz}))\n` + ` expected: ${expected.z}\n` + ` got: ${output[globalIndex * 4 + 2]}` ); } } } } } } } return undefined; }; // Check [[builtin(local_invocation_index)]] values. t.expectGPUBufferValuesEqual( localIndexBuffer, new Uint32Array([...iterRange(totalInvocations, x => x % invocationsPerGroup)]) ); // Check [[builtin(local_invocation_id)]] values. t.expectGPUBufferValuesPassCheck( localIdBuffer, outputData => checkEachIndex(outputData, 'local_invocation_id', (_, localId) => localId), { type: Uint32Array, typedLength: totalInvocations * 4 } ); // Check [[builtin(global_invocation_id)]] values. const getGlobalId = (groupId: vec3, localId: vec3) => { return { x: groupId.x * t.params.groupSize.x + localId.x, y: groupId.y * t.params.groupSize.y + localId.y, z: groupId.z * t.params.groupSize.z + localId.z, }; }; t.expectGPUBufferValuesPassCheck( globalIdBuffer, outputData => checkEachIndex(outputData, 'global_invocation_id', getGlobalId), { type: Uint32Array, typedLength: totalInvocations * 4 } ); // Check [[builtin(workgroup_id)]] values. t.expectGPUBufferValuesPassCheck( groupIdBuffer, outputData => checkEachIndex(outputData, 'workgroup_id', (groupId, _) => groupId), { type: Uint32Array, typedLength: totalInvocations * 4 } ); // Check [[builtin(num_workgroups)]] values. t.expectGPUBufferValuesPassCheck( numGroupsBuffer, outputData => checkEachIndex(outputData, 'num_workgroups', () => t.params.numGroups), { type: Uint32Array, typedLength: totalInvocations * 4 } ); });
sarahM0/cts
src/webgpu/shader/execution/builtin/integer_built_in_functions.spec.ts
export const description = `WGSL execution test. Section: Integer built-in functions`; import { makeTestGroup } from '../../../../common/framework/test_group.js'; import { GPUTest } from '../../../gpu_test.js'; export const g = makeTestGroup(GPUTest); g.test('integer_builtin_functions,unsigned_clamp') .uniqueId('386458e12e52645b') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` unsigned clamp: T is u32 or vecN<u32> clamp(e1: T ,e2: T,e3: T) -> T Returns min(max(e1,e2),e3). Component-wise when T is a vector. (GLSLstd450UClamp) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) .unimplemented(); g.test('integer_builtin_functions,signed_clamp') .uniqueId('da51d3c8cc902ab2') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` signed clamp: T is i32 or vecN<i32> clamp(e1: T ,e2: T,e3: T) -> T Returns min(max(e1,e2),e3). Component-wise when T is a vector. (GLSLstd450SClamp) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) .unimplemented(); g.test('integer_builtin_functions,count_1_bits') .uniqueId('259605bdcc180a4b') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` count 1 bits: T is i32, u32, vecN<i32>, or vecN<u32> countOneBits(e: T ) -> T The number of 1 bits in the representation of e. Also known as "population count". Component-wise when T is a vector. (SPIR-V OpBitCount) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) .unimplemented(); g.test('integer_builtin_functions,unsigned_max') .uniqueId('2cce54f65e71b3a3') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` unsigned max: T is u32 or vecN<u32> max(e1: T ,e2: T) -> T Returns e2 if e1 is less than e2, and e1 otherwise. Component-wise when T is a vector. (GLSLstd450UMax) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) .unimplemented(); g.test('integer_builtin_functions,signed_max') .uniqueId('ef8c37107946a69e') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` signed max: T is i32 or vecN<i32> max(e1: T ,e2: T) -> T Returns e2 if e1 is less than e2, and e1 otherwise. Component-wise when T is a vector. (GLSLstd450SMax) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) .unimplemented(); g.test('integer_builtin_functions,bit_reversal') .uniqueId('8a7550f1097993f8') .specURL('https://www.w3.org/TR/2021/WD-WGSL-20210929/#integer-builtin-functions') .desc( ` bit reversal: T is i32, u32, vecN<i32>, or vecN<u32> reverseBits(e: T ) -> T Reverses the bits in e: The bit at position k of the result equals the bit at position 31-k of e. Component-wise when T is a vector. (SPIR-V OpBitReverse) Please read the following guidelines before contributing: https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md ` ) .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) .unimplemented();
upstash/upstash-kafka
examples/cloudflare-worker/bindings.d.ts
<reponame>upstash/upstash-kafka export interface Bindings { UPSTASH_KAFKA_REST_URL: string UPSTASH_KAFKA_REST_USERNAME: string UPSTASH_KAFKA_REST_PASSWORD: string }
upstash/upstash-kafka
examples/cloudflare-worker/src/index.ts
import { Kafka } from "@upstash/kafka" import type { Bindings } from "bindings" export default { async fetch(_request: Request, env: Bindings) { const kafka = new Kafka({ url: env.UPSTASH_KAFKA_REST_URL, username: env.UPSTASH_KAFKA_REST_USERNAME, password: env.UPSTASH_KAFKA_REST_PASSWORD, }) const p = kafka.producer() const c = kafka.consumer() const topicA = "a" await p.produce(topicA, "Hello World") const messages = await c.consume({ consumerGroupId: "group_1", instanceId: "instance_1", topics: [topicA], autoOffsetReset: "earliest", }) return new Response(JSON.stringify(messages), { headers: { "content-type": "text/plain" } }) }, }
upstash/upstash-kafka
pkg/produce.test.ts
import { kafka, Topic } from "./test_setup" import { expect, it } from "@jest/globals" it("publishes a single message succesfully", async () => { const p = kafka.producer() const c = kafka.consumer() const message = { hello: "test" } const header = { key: "signature", value: "abcd" } const { partition, offset, topic } = await p.produce(Topic.RED, message, { headers: [header] }) const found = await c.fetch({ topic, partition, offset }) expect(JSON.parse(found[0].value)).toEqual(message) expect(found[0].headers[0]).toEqual(header) }) it("Publish a serialized succesfully", async () => { const p = kafka.producer() const c = kafka.consumer() const message = "hello world" const header = { key: "signature", value: "abcd" } const { partition, offset, topic } = await p.produce(Topic.RED, message, { headers: [header] }) const found = await c.fetch({ topic, partition, offset }) expect(found[0].value).toEqual(message) expect(found[0].headers[0]).toEqual(header) }) it("publishes multiple messages to different topics succesfully", async () => { const p = kafka.producer() const c = kafka.consumer() const message0 = { hello: "test" } const message1 = { hello: "world" } const res = await p.produceMany([ { topic: Topic.RED, value: JSON.stringify(message0) }, { topic: Topic.GREEN, value: JSON.stringify(message1) }, ]) const found = await c.fetch({ topicPartitionOffsets: res.map((r) => ({ topic: r.topic, partition: r.partition, offset: r.offset, })), }) expect(found.map((f) => JSON.parse(f.value))).toContainEqual(message0) expect(found.map((f) => JSON.parse(f.value))).toContainEqual(message1) })
upstash/upstash-kafka
pkg/index.ts
export * from "./kafka" export * from "./error" export * from "./types"
upstash/upstash-kafka
pkg/test_setup.ts
import { Kafka } from "./kafka" import { config } from "dotenv" config() const url = process.env["UPSTASH_KAFKA_REST_URL"] if (!url) { throw new Error("UPSTASH_KAFKA_REST_URL env missing") } const username = process.env["UPSTASH_KAFKA_REST_USERNAME"] if (!username) { throw new Error("UPSTASH_KAFKA_REST_USERNAME env missing") } const password = process.env["UPSTASH_KAFKA_REST_PASSWORD"] if (!password) { throw new Error("UPSTASH_KAFKA_REST_PASSWORD env missing") } export const kafka = new Kafka({ url, username, password }) /* eslint-disable no-unused-vars */ export enum Topic { GREEN = "green", BLUE = "blue", RED = "red", } /* eslint-enable no-unused-vars */
upstash/upstash-kafka
pkg/base64.ts
/** * Encode a string as base64 * * Credit to https://base64.guru/developers/javascript/examples/polyfill */ export function base64(origin: string): string { const alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" const len = origin.length - 1 let i = -1 let encoded = "" while (i < len) { const code = (origin.charCodeAt(++i) << 16) | (origin.charCodeAt(++i) << 8) | origin.charCodeAt(++i) encoded += alphabet[(code >>> 18) & 63] + alphabet[(code >>> 12) & 63] + alphabet[(code >>> 6) & 63] + alphabet[code & 63] } var pads = origin.length % 3 if (pads > 0) { encoded = encoded.slice(0, pads - 3) while (encoded.length % 4 !== 0) { encoded += "=" } } return encoded }
upstash/upstash-kafka
pkg/test_teardown.ts
<filename>pkg/test_teardown.ts import { kafka } from "./test_setup" export default async function teardown(): Promise<void> { const a = kafka.admin() const existingConsumers = await a.consumers() await Promise.all( existingConsumers.flatMap((group) => group.instances.map((instance) => a.removeConsumerInstance(group.name, instance.name)), ), ) }
upstash/upstash-kafka
pkg/error.ts
/** * Result of a bad request to upstash */ export class UpstashError extends Error { public readonly result: string public readonly error: string public readonly status: number constructor(res: { result: string; error: string; status: number }) { super(res.error) this.name = "UpstashError" this.result = res.result this.error = res.error this.status = res.status } }
upstash/upstash-kafka
cmd/removeConsumerGroups.ts
import { Kafka } from "../pkg/kafka" import { config } from "dotenv" config() async function main() { const url = process.env["UPSTASH_KAFKA_REST_URL"] if (!url) { throw new Error("Could not find url") } const username = process.env["UPSTASH_KAFKA_REST_USERNAME"] if (!username) { throw new Error("Could not find username") } const password = process.env["UPSTASH_KAFKA_REST_PASSWORD"] if (!password) { throw new Error("Could not find password") } const kafka = new Kafka({ url, username, password }) const a = kafka.admin() const existingConsumers = await a.consumers() await Promise.all( existingConsumers.flatMap((group) => group.instances.map((instance) => a.removeConsumerInstance(group.name, instance.name)), ), ) } main()
upstash/upstash-kafka
pkg/types.ts
<filename>pkg/types.ts<gh_stars>10-100 export type Header = { key: string; value: string } export type Message = { topic: string partition: number offset: number timestamp: number key: string value: string headers: Header[] }
shwilliam/fullstack-ts-boiler
client/src/graphql/index.ts
<gh_stars>0 import { apolloClient } from './client' import { GraphQLError } from 'graphql' import { FetchResult } from 'apollo-boost' import { AllPostsQuery, AllPostsDocument, AddPostMutation, AddPostMutationVariables, AddPostDocument, NewPostInput as INewPostInput, Post as IPost, } from './generated'; export interface ExecutionResult<T> { errors?: ReadonlyArray<GraphQLError>; data?: Readonly<T>; } export async function getAllPosts(): Promise< ExecutionResult<AllPostsQuery> > { return await apolloClient.query<AllPostsQuery>({ query: AllPostsDocument, fetchPolicy: 'no-cache' }) } export async function addPost( variables: AddPostMutationVariables ): Promise<FetchResult<AddPostMutation>> { return await apolloClient.mutate< AddPostMutation, AddPostMutationVariables >({ mutation: AddPostDocument, variables, }) } export type Post = IPost export type NewPostInput = INewPostInput
shwilliam/fullstack-ts-boiler
client/src/App.tsx
import React from 'react' import { NewPostForm, PostList } from './components' import { StoreContextProvider } from './context/store' const App = () => ( <StoreContextProvider> <NewPostForm/> <PostList/> </StoreContextProvider> ) export default App
shwilliam/fullstack-ts-boiler
client/src/components/PostList.tsx
import React, { useContext } from 'react' import { Post } from '../graphql' import { StoreContext } from '../context/store' const PostList = () => { const {posts} = useContext(StoreContext) return ( <ul> {posts ? posts.map((post: Post) => <li key={post.id}>{post.title}</li>) : <p>loading...</p> } </ul> ) } export default PostList
shwilliam/fullstack-ts-boiler
client/src/components/NewPostForm.tsx
import React, { useState, FormEvent, useContext } from 'react' import { StoreContext } from '../context/store' const NewPostForm = () => { const {publish} = useContext(StoreContext) const [titleInput, setTitleInput] = useState('') const [contentInput, setContentInput] = useState('') const onPostSubmit = (e: FormEvent<HTMLFormElement>) => { publish({title: titleInput, content: contentInput}) setTitleInput('') setContentInput('') e.preventDefault() } return ( <form onSubmit={onPostSubmit}> <label> Title: <input name='title' type='text' value={titleInput} onChange={e => setTitleInput(e.target.value)} /> </label> <label> Content: <input name='content' type='text' value={contentInput} onChange={e => setContentInput(e.target.value)} /> </label> <button type='submit'>Publish</button> </form> ) } export default NewPostForm
shwilliam/fullstack-ts-boiler
server/src/repository.ts
<gh_stars>0 import fs from 'fs' import { Post } from './models/Post' import { NewPostInput } from './models/NewPostInput' const STATE_FILE = './data/blog.json' let _state: Post[] = null async function readFile(filename: string): Promise<string> { return new Promise((resolve, reject) => { fs.readFile(filename, (err, data) => { if (err) reject(err) resolve(data.toString()) }) }) } async function saveChanges(): Promise<void> { if (_state) { const json = JSON.stringify(_state, null, 2) return new Promise((resolve, reject) => { fs.writeFile(STATE_FILE, json, err => { if (err) reject(err) else { resolve() } }) }) } } export async function getAllPosts(): Promise<Post[]> { if (!_state) _state = JSON.parse(await readFile(STATE_FILE)) return _state } export async function addPost(postInput: NewPostInput): Promise<Post> { const allPosts = await getAllPosts() const newItem = new Post(postInput) allPosts.push(newItem) await saveChanges() return newItem }
shwilliam/fullstack-ts-boiler
client/src/components/index.ts
import NewPostForm from './NewPostForm' import PostList from './PostList' export { NewPostForm, PostList }
shwilliam/fullstack-ts-boiler
server/src/models/FeedResolver.ts
import { Query, Mutation, Arg } from 'type-graphql' import { getAllPosts, addPost, } from '../repository' import { Post } from './Post' import { NewPostInput } from './NewPostInput' export class FeedResolver { @Query(returns => [Post]) public async allPosts(): Promise<Post[]> { return await getAllPosts() } @Mutation(returns => Post) public async addPost(@Arg('input') input: NewPostInput): Promise<Post> { return await addPost(input) } }
shwilliam/fullstack-ts-boiler
server/src/models/NewPostInput.ts
import { InputType, Field } from 'type-graphql' import { Post } from './Post' @InputType({ description: 'Input type for new post' }) export class NewPostInput implements Partial<Post> { @Field() title: string; @Field() content: string; }
shwilliam/fullstack-ts-boiler
server/src/server.ts
<reponame>shwilliam/fullstack-ts-boiler<filename>server/src/server.ts import 'reflect-metadata' import { ApolloServer } from 'apollo-server' import { FeedResolver } from './models/FeedResolver' import { buildSchema } from 'type-graphql' (async (): Promise<void> => { const server = new ApolloServer({ schema: await buildSchema({ resolvers: [FeedResolver], validate: false }) }) const { url } = await server.listen(process.env.PORT || 80) console.log(`Server running on ${url}`) })()
shwilliam/fullstack-ts-boiler
client/src/context/store.tsx
import React, { createContext, useState, useEffect, ReactNode } from 'react' import { getAllPosts, addPost, Post, NewPostInput } from '../graphql' const StoreContext = createContext<IStore>({ posts: [], publish: () => {} }) const StoreContextProvider = (props: {children: ReactNode}) => { const [posts, setPosts] = useState() const fetchPosts = async () => { const allPosts = await getAllPosts() setPosts(allPosts?.data?.allPosts) } const publish = async (input: {title: string, content: string}) => { await addPost({input}) fetchPosts() } useEffect(()=> { fetchPosts() }, []) return ( <StoreContext.Provider value={{ posts, publish }}> {props.children} </StoreContext.Provider> ) } type NoOp = () => void interface IStore { posts: Post[]; publish: ((input: NewPostInput) => Promise<void>) | NoOp; } export { StoreContext, StoreContextProvider }
shwilliam/fullstack-ts-boiler
client/src/graphql/generated.tsx
<reponame>shwilliam/fullstack-ts-boiler import gql from 'graphql-tag'; import * as ApolloReactCommon from '@apollo/react-common'; import * as React from 'react'; import * as ApolloReactComponents from '@apollo/react-components'; import * as ApolloReactHoc from '@apollo/react-hoc'; export type Maybe<T> = T | null; export type Omit<T, K extends keyof T> = Pick<T, Exclude<keyof T, K>>; /** All built-in and custom scalars, mapped to their actual values */ export type Scalars = { ID: string; String: string; Boolean: boolean; Int: number; Float: number; }; export type Mutation = { __typename?: 'Mutation'; addPost: Post; }; export type MutationAddPostArgs = { input: NewPostInput; }; export type NewPostInput = { title: Scalars['String']; content: Scalars['String']; }; export type Post = { __typename?: 'Post'; id: Scalars['ID']; title: Scalars['String']; content: Scalars['String']; }; export type Query = { __typename?: 'Query'; allPosts: Array<Post>; }; export type AddPostMutationVariables = { input: NewPostInput; }; export type AddPostMutation = ( { __typename?: 'Mutation' } & { addPost: ( { __typename?: 'Post' } & Pick<Post, 'id' | 'title' | 'content'> ) } ); export type AllPostsQueryVariables = {}; export type AllPostsQuery = ( { __typename?: 'Query' } & { allPosts: Array<( { __typename?: 'Post' } & Pick<Post, 'id' | 'title' | 'content'> )> } ); export const AddPostDocument = gql` mutation addPost($input: NewPostInput!) { addPost(input: $input) { id title content } } `; export type AddPostMutationFn = ApolloReactCommon.MutationFunction<AddPostMutation, AddPostMutationVariables>; export type AddPostComponentProps = Omit<ApolloReactComponents.MutationComponentOptions<AddPostMutation, AddPostMutationVariables>, 'mutation'>; export const AddPostComponent = (props: AddPostComponentProps) => ( <ApolloReactComponents.Mutation<AddPostMutation, AddPostMutationVariables> mutation={AddPostDocument} {...props} /> ); export type AddPostProps<TChildProps = {}> = ApolloReactHoc.MutateProps<AddPostMutation, AddPostMutationVariables> & TChildProps; export function withAddPost<TProps, TChildProps = {}>(operationOptions?: ApolloReactHoc.OperationOption< TProps, AddPostMutation, AddPostMutationVariables, AddPostProps<TChildProps>>) { return ApolloReactHoc.withMutation<TProps, AddPostMutation, AddPostMutationVariables, AddPostProps<TChildProps>>(AddPostDocument, { alias: 'addPost', ...operationOptions }); }; export type AddPostMutationResult = ApolloReactCommon.MutationResult<AddPostMutation>; export type AddPostMutationOptions = ApolloReactCommon.BaseMutationOptions<AddPostMutation, AddPostMutationVariables>; export const AllPostsDocument = gql` query allPosts { allPosts { id title content } } `; export type AllPostsComponentProps = Omit<ApolloReactComponents.QueryComponentOptions<AllPostsQuery, AllPostsQueryVariables>, 'query'>; export const AllPostsComponent = (props: AllPostsComponentProps) => ( <ApolloReactComponents.Query<AllPostsQuery, AllPostsQueryVariables> query={AllPostsDocument} {...props} /> ); export type AllPostsProps<TChildProps = {}> = ApolloReactHoc.DataProps<AllPostsQuery, AllPostsQueryVariables> & TChildProps; export function withAllPosts<TProps, TChildProps = {}>(operationOptions?: ApolloReactHoc.OperationOption< TProps, AllPostsQuery, AllPostsQueryVariables, AllPostsProps<TChildProps>>) { return ApolloReactHoc.withQuery<TProps, AllPostsQuery, AllPostsQueryVariables, AllPostsProps<TChildProps>>(AllPostsDocument, { alias: 'allPosts', ...operationOptions }); }; export type AllPostsQueryResult = ApolloReactCommon.QueryResult<AllPostsQuery, AllPostsQueryVariables>;
shwilliam/fullstack-ts-boiler
server/src/models/Post.ts
<gh_stars>0 import { ObjectType, Field, ID } from 'type-graphql' import shortid from 'shortid' @ObjectType() export class Post { @Field(type => ID) id: string; @Field() title: string; @Field() content: string; constructor(init: Partial<Post>) { Object.assign(this, init) this.id = shortid.generate() } }
shwilliam/fullstack-ts-boiler
client/src/graphql/client.ts
<reponame>shwilliam/fullstack-ts-boiler import {ApolloClient, ApolloLink, HttpLink, InMemoryCache} from 'apollo-boost' import {onError} from 'apollo-link-error' const httpLink = new HttpLink({ uri: 'http://localhost:80', }) const logErrorsLink = onError(({graphQLErrors, networkError}) => { if (graphQLErrors) { graphQLErrors.map(({message, locations, path}) => console.log( `[GraphQL error]: Message: ${message}, Location: ${locations}, Path: ${path}`, ), ) } if (networkError) { console.log(`[Network error]: ${networkError}`) if (graphQLErrors) graphQLErrors.map(({message, locations, path}) => console.log( `[GraphQL error]: Message: ${message}, Location: ${locations}, Path: ${path}`, ), ) } }) export const apolloClient = new ApolloClient({ link: ApolloLink.from([logErrorsLink, httpLink]), cache: new InMemoryCache(), })
Futurejason/pxt-magicbitssss
main.ts
/* <EMAIL> modified from pxt-servo/servodriver.ts load dependency "magicbit": "file:../pxt-magicbit" */ enum Offset { //% block=one ONE = 0, //% block=two TWO = 1, //% block=three THREE = 2, //% block=four FOUR = 3 } // enum IrPins{ // P0= 3, // P1= 2, // P2= 1, // P3= 4, // P4= 5, // P5= 17, // P6= 12, // P7= 11, // P8= 18, // P9= 10, // P10= 6, // P11= 26, // P12= 20, // P13= 23, // P14= 22, // P15= 21, // P16= 16, // P19= 0, // P20= 30 // }; enum RemoteButton { //% block=A A = 0x45, //% block=B B = 0x46, //% block=C C = 0x47, //% block=D D = 0x44, //% block=UP UP = 0x40, //% block=+ Add = 0x43, //% block=LEFT Left = 0x07, //% block=OK Ok = 0x15, //% block=RIGHT Right = 0x09, //% block=DOWN Down = 0x19, //% block=- EQ = 0x0d, //% block=0 NUM0 = 0x16, //% block=1 NUM1 = 0x0c, //% block=2 NUM2 = 0x18, //% block=3 NUM3 = 0x5e, //% block=4 NUM4 = 0x8, //% block=5 NUM5 = 0x1c, //% block=6 NUM6 = 0x5a, //% block=7 NUM7 = 0x42, //% block=8 NUM8 = 0x52, //% block=9 NUM9 = 0x4A }; //% color="#EE6A50" weight=10 icon="\uf013" namespace magicbit { const PCA9685_ADDRESS = 0x40 const MODE1 = 0x00 const MODE2 = 0x01 const SUBADR1 = 0x02 const SUBADR2 = 0x03 const SUBADR3 = 0x04 const PRESCALE = 0xFE const LED0_ON_L = 0x06 const LED0_ON_H = 0x07 const LED0_OFF_L = 0x08 const LED0_OFF_H = 0x09 const ALL_LED_ON_L = 0xFA const ALL_LED_ON_H = 0xFB const ALL_LED_OFF_L = 0xFC const ALL_LED_OFF_H = 0xFD const STP_CHA_L = 2047 const STP_CHA_H = 4095 const STP_CHB_L = 1 const STP_CHB_H = 2047 const STP_CHC_L = 1023 const STP_CHC_H = 3071 const STP_CHD_L = 3071 const STP_CHD_H = 1023 export enum Servos { S1 = 0x01, S2 = 0x02, S3 = 0x03, S4 = 0x04, S5 = 0x05, S6 = 0x06, S7 = 0x07, S8 = 0x08 } export enum Motors { M1 = 0x3, M2 = 0x4, M3 = 0x1, M4 = 0x2 } export enum Steppers { STPM1 = 0x2, STPM2 = 0x1 } export enum SonarVersion { V1 = 0x1, V2 = 0x2 } export enum Turns { //% blockId="T1B4" block="1/4" T1B4 = 90, //% blockId="T1B2" block="1/2" T1B2 = 180, //% blockId="T1B0" block="1" T1B0 = 360, //% blockId="T2B0" block="2" T2B0 = 720, //% blockId="T3B0" block="3" T3B0 = 1080, //% blockId="T4B0" block="4" T4B0 = 1440, //% blockId="T5B0" block="5" T5B0 = 1800 } let initialized = false function i2cwrite(addr: number, reg: number, value: number) { let buf = pins.createBuffer(2) buf[0] = reg buf[1] = value pins.i2cWriteBuffer(addr, buf) } function i2ccmd(addr: number, value: number) { let buf = pins.createBuffer(1) buf[0] = value pins.i2cWriteBuffer(addr, buf) } function i2cread(addr: number, reg: number) { pins.i2cWriteNumber(addr, reg, NumberFormat.UInt8BE); let val = pins.i2cReadNumber(addr, NumberFormat.UInt8BE); return val; } function initPCA9685(): void { i2cwrite(PCA9685_ADDRESS, MODE1, 0x00) setFreq(50); for (let idx = 0; idx < 16; idx++) { setPwm(idx, 0, 0); } initialized = true } function setFreq(freq: number): void { // Constrain the frequency let prescaleval = 25000000; prescaleval /= 4096; prescaleval /= freq; prescaleval -= 1; let prescale = prescaleval; //Math.Floor(prescaleval + 0.5); let oldmode = i2cread(PCA9685_ADDRESS, MODE1); let newmode = (oldmode & 0x7F) | 0x10; // sleep i2cwrite(PCA9685_ADDRESS, MODE1, newmode); // go to sleep i2cwrite(PCA9685_ADDRESS, PRESCALE, prescale); // set the prescaler i2cwrite(PCA9685_ADDRESS, MODE1, oldmode); control.waitMicros(5000); i2cwrite(PCA9685_ADDRESS, MODE1, oldmode | 0xa1); } function setPwm(channel: number, on: number, off: number): void { if (channel < 0 || channel > 15) return; //serial.writeValue("ch", channel) //serial.writeValue("on", on) //serial.writeValue("off", off) let buf = pins.createBuffer(5); buf[0] = LED0_ON_L + 4 * channel; buf[1] = on & 0xff; buf[2] = (on >> 8) & 0xff; buf[3] = off & 0xff; buf[4] = (off >> 8) & 0xff; pins.i2cWriteBuffer(PCA9685_ADDRESS, buf); } function setStepper(index: number, dir: boolean): void { if (index == 1) { if (dir) { setPwm(0, STP_CHA_L, STP_CHA_H); setPwm(2, STP_CHB_L, STP_CHB_H); setPwm(1, STP_CHC_L, STP_CHC_H); setPwm(3, STP_CHD_L, STP_CHD_H); } else { setPwm(3, STP_CHA_L, STP_CHA_H); setPwm(1, STP_CHB_L, STP_CHB_H); setPwm(2, STP_CHC_L, STP_CHC_H); setPwm(0, STP_CHD_L, STP_CHD_H); } } else { if (dir) { setPwm(4, STP_CHA_L, STP_CHA_H); setPwm(6, STP_CHB_L, STP_CHB_H); setPwm(5, STP_CHC_L, STP_CHC_H); setPwm(7, STP_CHD_L, STP_CHD_H); } else { setPwm(7, STP_CHA_L, STP_CHA_H); setPwm(5, STP_CHB_L, STP_CHB_H); setPwm(6, STP_CHC_L, STP_CHC_H); setPwm(4, STP_CHD_L, STP_CHD_H); } } } function stopMotor(index: number) { setPwm((index - 1) * 2, 0, 0); setPwm((index - 1) * 2 + 1, 0, 0); } /** * Servo Execute * @param index Servo Channel; eg: S1 * @param degree [0-180] degree of servo; eg: 0, 90, 180 */ //% blockId=magicbit_servo block="Servo|%index|degree %degree" //% weight=100 //% degree.min=0 degree.max=180 //% name.fieldEditor="gridpicker" name.fieldOptions.columns=4 export function Servo(index: Servos, degree: number): void { if (!initialized) { initPCA9685() } // 50hz: 20,000 us let v_us = (degree * 1800 / 180 + 600) // 0.6 ~ 2.4 let value = v_us * 4096 / 20000 setPwm(index + 7, 0, value) } /** * Servo Execute * @param index Servo Channel; eg: S1 * @param degree1 [0-180] degree of servo; eg: 0, 90, 180 * @param degree2 [0-180] degree of servo; eg: 0, 90, 180 * @param speed [1-10] speed of servo; eg: 1, 10 */ //% blockId=motorbit_servospeed block="Servo|%index|degree start %degree1|end %degree2|speed %speed" //% weight=98 //% degree1.min=0 degree1.max=180 //% degree2.min=0 degree2.max=180 //% speed.min=1 speed.max=10 //% inlineInputMode=inline //% name.fieldEditor="gridpicker" name.fieldOptions.columns=4 export function Servospeed(index: Servos, degree1: number, degree2: number, speed: number): void { if (!initialized) { initPCA9685() } // 50hz: 20,000 us if(degree1 > degree2){ for(let i=degree1;i>degree2;i--){ let v_us = (i * 1800 / 180 + 600) // 0.6 ~ 2.4 let value = v_us * 4096 / 20000 basic.pause(4 * (10 - speed)); setPwm(index + 7, 0, value) } } else{ for(let i=degree1;i<degree2;i++){ let v_us = (i * 1800 / 180 + 600) // 0.6 ~ 2.4 let value = v_us * 4096 / 20000 basic.pause(4 * (10 - speed)); setPwm(index + 7, 0, value) } } } /** * Geek Servo * @param index Servo Channel; eg: S1 * @param degree [-45-225] degree of servo; eg: -45, 90, 225 */ //% blockId=magicbit_gservo block="Geek Servo|%index|degree %degree=protractorPicker" //% weight=96 //% blockGap=50 //% degree.defl=90 //% name.fieldEditor="gridpicker" name.fieldOptions.columns=4 export function GeekServo(index: Servos, degree: number): void { if (!initialized) { initPCA9685() } // 50hz: 20,000 us let v_us = ((degree - 90) * 20 / 3 + 1500) // 0.6 ~ 2.4 let value = v_us * 4096 / 20000 setPwm(index + 7, 0, value) } //% blockId=magicbit_stepper_degree block="Stepper 28BYJ-48|%index|degree %degree" //% weight=90 export function StepperDegree(index: Steppers, degree: number): void { if (!initialized) { initPCA9685() } setStepper(index, degree > 0); degree = Math.abs(degree); basic.pause(10240 * degree / 360); MotorStopAll() } //% blockId=magicbit_stepper_turn block="Stepper 28BYJ-48|%index|turn %turn" //% weight=90 export function StepperTurn(index: Steppers, turn: Turns): void { let degree = turn; StepperDegree(index, degree); } //% blockId=magicbit_stepper_dual block="Dual Stepper(Degree) |STPM1 %degree1| STPM2 %degree2" //% weight=89 export function StepperDual(degree1: number, degree2: number): void { if (!initialized) { initPCA9685() } setStepper(1, degree1 > 0); setStepper(2, degree2 > 0); degree1 = Math.abs(degree1); degree2 = Math.abs(degree2); basic.pause(10240 * Math.min(degree1, degree2) / 360); if (degree1 > degree2) { stopMotor(3); stopMotor(4); basic.pause(10240 * (degree1 - degree2) / 360); } else { stopMotor(1); stopMotor(2); basic.pause(10240 * (degree2 - degree1) / 360); } MotorStopAll() } /** * Stepper Car move forward * @param distance Distance to move in cm; eg: 10, 20 * @param diameter diameter of wheel in mm; eg: 48 */ //% blockId=magicbit_stpcar_move block="Car Forward|Distance(cm) %distance|Wheel Diameter(mm) %diameter" //% weight=88 export function StpCarMove(distance: number, diameter: number): void { if (!initialized) { initPCA9685() } let delay = 10240 * 10 * distance / 3 / diameter; // use 3 instead of pi setStepper(1, delay > 0); setStepper(2, delay > 0); delay = Math.abs(delay); basic.pause(delay); MotorStopAll() } /** * Stepper Car turn by degree * @param turn Degree to turn; eg: 90, 180, 360 * @param diameter diameter of wheel in mm; eg: 48 * @param track track width of car; eg: 125 */ //% blockId=magicbit_stpcar_turn block="Car Turn|Degree %turn|Wheel Diameter(mm) %diameter|Track(mm) %track" //% weight=87 //% blockGap=50 export function StpCarTurn(turn: number, diameter: number, track: number): void { if (!initialized) { initPCA9685() } let delay = 10240 * turn * track / 360 / diameter; setStepper(1, delay < 0); setStepper(2, delay > 0); delay = Math.abs(delay); basic.pause(delay); MotorStopAll() } //% blockId=magicbit_motor_run block="Motor|%index|speed %speed" //% weight=85 //% speed.min=-255 speed.max=255 //% name.fieldEditor="gridpicker" name.fieldOptions.columns=4 export function MotorRun(index: Motors, speed: number): void { if (!initialized) { initPCA9685() } speed = speed * 16; // map 255 to 4096 if (speed >= 4096) { speed = 4095 } if (speed <= -4096) { speed = -4095 } if (index > 4 || index <= 0) return let pp = (index - 1) * 2 let pn = (index - 1) * 2 + 1 if (speed >= 0) { setPwm(pp, 0, speed) setPwm(pn, 0, 0) } else { setPwm(pp, 0, 0) setPwm(pn, 0, -speed) } } /** * Execute two motors at the same time * @param motor1 First Motor; eg: M1, M2 * @param speed1 [-255-255] speed of motor; eg: 150, -150 * @param motor2 Second Motor; eg: M3, M4 * @param speed2 [-255-255] speed of motor; eg: 150, -150 */ //% blockId=magicbit_motor_dual block="Motor|%motor1|speed %speed1|%motor2|speed %speed2" //% weight=84 //% speed1.min=-255 speed1.max=255 //% speed2.min=-255 speed2.max=255 //% inlineInputMode=inline //% name.fieldEditor="gridpicker" name.fieldOptions.columns=4 export function MotorRunDual(motor1: Motors, speed1: number, motor2: Motors, speed2: number): void { MotorRun(motor1, speed1); MotorRun(motor2, speed2); } /** * Execute single motors with delay * @param index Motor Index; eg: M1, M2, M3, M4 * @param speed [-255-255] speed of motor; eg: 150, -150 * @param delay seconde delay to stop; eg: 1 */ //% blockId=magicbit_motor_rundelay block="Motor|%index|speed %speed|delay %delay|s" //% weight=81 //% speed.min=-255 speed.max=255 //% name.fieldEditor="gridpicker" name.fieldOptions.columns=4 export function MotorRunDelay(index: Motors, speed: number, delay: number): void { MotorRun(index, speed); basic.pause(delay * 1000); MotorRun(index, 0); } //% blockId=magicbit_stop block="Motor Stop|%index|" //% weight=80 export function MotorStop(index: Motors): void { MotorRun(index, 0); } //% blockId=magicbit_stop_all block="Motor Stop All" //% weight=79 //% blockGap=50 export function MotorStopAll(): void { if (!initialized) { initPCA9685() } for (let idx = 1; idx <= 4; idx++) { stopMotor(idx); } } //% blockId="motorbit_rus04" block="On-board Ultrasonic part %index show color %rgb effect %effect" //% weight=78 export function motorbit_rus04(index: RgbUltrasonics, rgb: RgbColors, effect: ColorEffect): void { sensors.rus04_rgb(DigitalPin.P16, 4, index, rgb, effect); } //% blockId=Ultrasonic_reading_distance block="On-board Ultrasonic reading distance" //% weight=77 export function Ultrasonic_reading_distance(): number { return sensors.Ultrasonic(DigitalPin.P2); } //% blockId=Setting_the_on_board_lights block="Setting the on-board lights %index color %rgb Effect %effect" //% weight=76 export function Setting_the_on_board_lights(offset: Offset,rgb: RgbColors, effect: ColorEffect): void { sensors.rus04_rgb(DigitalPin.P16, offset, 0, rgb, effect); } /** * button pushed. */ //% blockId=onPressEvent //% block="on |%btn| button pressed" shim=IrRemote::onPressEvent group="micro:bit(v1)" export function OnPressEvent(btn: RemoteButton, body: () => void): void { return; } /** * initialises local variablesssss * @param pin describe parameter here, eg: IrPins.P5 */ //% blockId=IrRemote_init //% block="connect ir receiver to %pin" shim=IrRemote::IrRemote_init group="micro:bit(v1)" export function IrRemote_init(pin: IrPins): void { return; } export class Packeta { public mye: string; public myparam: number; } let irstate:string; let state:number; /** * Read IR sensor value V2. */ //% advanced=true shim=maqueenIRV2::irCode function irCode(): number { return 0; } //% weight=5 //% group="micro:bit(v2)" //% blockId=IR_readv2 block="read IR key value" export function IR_readV2(): string { let val = valuotokeyConversion(); let str; switch (val) { case 11: str = 'A'; break; case 12: str = 'B'; break; case 13: str = 'C'; break; case 14: str = 'D'; break; case 21: str = 'UP'; break; case 66: str = '+'; break; case 24: str = 'LEFT'; break; case 55: str = 'OK'; break; case 22: str = 'RIGHT'; break; case 0: str = '0'; break; case 23: str = 'DOWN'; break; case 99: str = '-'; break; case 1: str = '1'; break; case 2: str = '2'; break; case 3: str = '3'; break; case 4: str = '4'; break; case 5: str = '5'; break; case 6: str = '6'; break; case 7: str = '7'; break; case 8: str = '8'; break; case 9: str = '9'; break; default: str = '-1'; } return str; } //% weight=2 //% group="micro:bit(v2)" //% blockId=IR_callbackUserv2 block="on IR received" //% draggableParameters export function IR_callbackUserV2(cb: (message: string) => void) { state = 1; control.onEvent(11, 22, function() { cb(irstate) }) } function valuotokeyConversion(): number { let irdata: number; switch (irCode()) { case 0xba45: irdata = 11; break; case 0xb946: irdata = 12; break; case 0xb847: irdata = 13; break; case 0xbb44: irdata = 14; break; case 0xbf40: irdata = 21; break; case 0xbc43: irdata = 66; break; case 0xf807: irdata = 24; break; case 0xea15: irdata = 55; break; case 0xf609: irdata = 22; break; case 0xe916: irdata = 0; break; case 0xe619: irdata = 23; break; case 0xf20d: irdata = 99; break; case 0xf30c: irdata = 1; break; case 0xe718: irdata = 2; break; case 0xa15e: irdata = 3; break; case 0xf708: irdata = 4; break; case 0xe31c: irdata = 5; break; case 0xa55a: irdata = 6; break; case 0xbd42: irdata = 7; break; case 0xad52: irdata = 8; break; case 0xb54a: irdata = 9; break; default: irdata = -1; } return irdata; } basic.forever(() => { if(state == 1){ irstate = IR_readV2(); if(irstate != '-1'){ control.raiseEvent(11, 22) } } basic.pause(20); }) }
liyiligang/mxui
web/src/base/filter.ts
<reponame>liyiligang/mxui /* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {globals} from "./globals"; import {routerName, routerPath} from "../router"; import i18n from '../base/i18n' import {LocationQueryRaw, RouteLocationNormalizedLoaded} from "vue-router"; import {reactive} from "vue"; export module filter { export interface FilterTagInfo { sign:string type?:string value?:string } export let filterData = { nodeMap: reactive(new Map<string, Map<string, FilterTagInfo>>()), nodeFuncMap: reactive(new Map<string, Map<string, FilterTagInfo>>()), nodeReportMap: reactive(new Map<string, Map<string, FilterTagInfo>>()), nodeNotifyMap: reactive(new Map<string, Map<string, FilterTagInfo>>()), nodeResourceMap: reactive(new Map<string, Map<string, FilterTagInfo>>()) } export function getFilterDataWithRouterName (name:string):Map<string, Map<string, FilterTagInfo>> { switch (name) { case routerName.node: return filterData.nodeMap case routerName.nodeFunc: return filterData.nodeFuncMap case routerName.nodeReport: return filterData.nodeReportMap case routerName.nodeNotify: return filterData.nodeNotifyMap case routerName.nodeResource: return filterData.nodeResourceMap default: return new Map<string, Map<string, FilterTagInfo>>() } } export function getFilterDataQuery (name:string):LocationQueryRaw { let query = {} for (let item of getFilterDataWithRouterName(name).values()){ let sign = "" let tags = new Array<any>() for (let tag of item.keys()){ let tagInfo = item.get(tag) if (tagInfo){ if (tagInfo.value != undefined){ tags.push(tagInfo.value) }else{ tags.push(tag) } sign = tagInfo.sign } } query[sign] = tags } return query } export function addTag(filterName:string, itemName:string, tagName:string, filterTagInfo:FilterTagInfo, repeatTips:boolean){ let filterMap = getFilterDataWithRouterName(filterName) if (itemName.trim() == "" || tagName.trim() == "" || filterTagInfo.sign.trim() == ""){ globals.viewWarn(i18n.global.t('filter.invalidTag')) return } if (!filterMap.has(itemName)){ filterMap.set(itemName, new Map<string, FilterTagInfo>()) } let item = filterMap.get(itemName) if (!item){ return } if (repeatTips && item.has(tagName)){ globals.viewWarn(tagName + i18n.global.t('filter.existTag', {name: itemName})) return } item.set(tagName, filterTagInfo) globals.globalsData.tempSetting.setting.dataFilterView = true } export function clearTags(filterName, itemName){ getFilterDataWithRouterName(filterName).delete(itemName) } export function clearTag(filterName, itemName, tagName){ let filterMap = getFilterDataWithRouterName(filterName) let item = filterMap.get(itemName) if (item){ item.delete(tagName) if (item.size <= 0){ filterMap.delete(itemName) } } } export function toNodeWithID(id:number, route:RouteLocationNormalizedLoaded){ filter.clearTags(routerName.node, i18n.global.t('filter.tagName.ID')) filter.addTag(routerName.node, i18n.global.t('filter.tagName.ID'), String(id), {sign:"id"}, false) routerPath.toPath(routerName.node, {initPageNum:true, withPageSize:true}, route) } export function toNodeFuncWithNodeID(id:number, route:RouteLocationNormalizedLoaded){ filter.clearTags(routerName.nodeFunc, i18n.global.t('filter.tagName.nodeID')) filter.addTag(routerName.nodeFunc, i18n.global.t('filter.tagName.nodeID'), String(id), {sign:"nodeID"}, false) routerPath.toPath(routerName.nodeFunc, {initPageNum:true, withPageSize:true}, route) } export function toNodeReportWithNodeID(id:number, route:RouteLocationNormalizedLoaded){ filter.clearTags(routerName.nodeReport, i18n.global.t('filter.tagName.nodeID')) filter.addTag(routerName.nodeReport, i18n.global.t('filter.tagName.nodeID'), String(id), {sign:"nodeID"}, false) routerPath.toPath(routerName.nodeReport, {initPageNum:true, withPageSize:true}, route) } }
liyiligang/mxui
web/vite.config.ts
import vue from '@vitejs/plugin-vue' module.exports = { plugins:[vue()], build:{ outDir:"../bin/web" }, optimizeDeps: { include: ["protobufjs/minimal", "element-plus/lib/locale/lang/zh-cn", "dayjs/locale/zh-cn", "echarts/core", "echarts/charts", "echarts/renderers", "echarts/components"] }, server: { host:"localhost", proxy: { '/api': { target: 'http://localhost:806', changeOrigin: true, ws:true, rewrite: (path) => path.replace(/^\/api/, '') }, } } }
liyiligang/mxui
web/src/base/refresh.ts
/* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {watch} from "vue"; import {globals} from "./globals"; export module refresh { interface AutoRefreshHandle { timeout: NodeJS.Timeout|null call: (...args: any[]) => void parameter:any[] } let globalAutoRefreshMap:Map<number, AutoRefreshHandle> = new Map<number, AutoRefreshHandle>() let userAutoRefreshMap:Map<number, AutoRefreshHandle> = new Map<number, AutoRefreshHandle>() export function watchGlobalAutoRefresh() { updateAllGlobalAutoRefresh(false) watch(() => globals.globalsData.managerSetting.setting.autoUpdateInterval, () => { updateAllGlobalAutoRefresh(true) }) watch(() => globals.globalsData.tempSetting.setting.autoRefresh, () => { updateAllGlobalAutoRefresh(false) }) } export function addGlobalAutoRefresh(uid:number|undefined, callback: (...args: any[]) => void, ...args: any[]) { if (callback == null){ return } if (uid == undefined){ return } removeGlobalAutoRefresh(uid) callback(...args) let handle:AutoRefreshHandle = {timeout:null, call:callback, parameter:args} globalAutoRefreshMap.set(uid, handle) if (globals.globalsData.tempSetting.setting.autoRefresh && globals.globalsData.managerSetting.setting.autoUpdateInterval != 0){ handle.timeout = setInterval(callback, globals.globalsData.managerSetting.setting.autoUpdateInterval*1000, ...args) } } export function updateAllGlobalAutoRefresh(immediately:boolean) { globalAutoRefreshMap.forEach(function(value, key){ updateGlobalAutoRefresh(key, value, immediately) }); } export function updateGlobalAutoRefresh(uid:number|undefined, handle:AutoRefreshHandle, immediately:boolean) { if (uid == undefined){ return } if (handle == undefined){ return } clearHandleTimeout(handle) if (globals.globalsData.tempSetting.setting.autoRefresh && globals.globalsData.managerSetting.setting.autoUpdateInterval != 0){ if (immediately){ handle.call(...handle.parameter) } let t = setInterval(handle.call, globals.globalsData.managerSetting.setting.autoUpdateInterval*1000, ...handle.parameter) handle.timeout = t } } export function removeGlobalAutoRefresh(uid:number|undefined) { if (uid == undefined){ return } let handle = globalAutoRefreshMap.get(uid) if (handle == undefined){ return } clearHandleTimeout(handle) globalAutoRefreshMap.delete(uid) } export function addUserAutoRefresh(uid:number|undefined, time:number, callback: (...args: any[]) => void, ...args: any[]) { if (callback == null){ return } if (uid == undefined){ return } removeUserAutoRefresh(uid) callback(...args) let handle:AutoRefreshHandle = {timeout:null, call:callback, parameter:args} userAutoRefreshMap.set(uid, handle) if (time != 0){ handle.timeout = setInterval(callback, time*1000, ...args) } } export function updateUserAutoRefresh(uid:number|undefined, time:number, immediately:boolean) { if (uid == undefined){ return } let handle = userAutoRefreshMap.get(uid) if (handle == undefined){ return } clearHandleTimeout(handle) if (time != 0){ if (immediately){ handle.call(...handle.parameter) } let t = setInterval(handle.call, time*1000, ...handle.parameter) handle.timeout = t } } export function execUserRefresh(uid:number|undefined) { if (uid == undefined){ return } let handle = userAutoRefreshMap.get(uid) if (handle == undefined){ return } handle.call(...handle.parameter) } export function removeUserAutoRefresh(uid:number|undefined) { if (uid == undefined){ return } let handle = userAutoRefreshMap.get(uid) if (handle == undefined){ return } clearHandleTimeout(handle) userAutoRefreshMap.delete(uid) } export function clearHandleTimeout(handle:AutoRefreshHandle) { if (handle.timeout != null){ clearInterval(handle.timeout) handle.timeout = null } } }
liyiligang/mxui
web/src/base/defaultVal.ts
<filename>web/src/base/defaultVal.ts /* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {protoManage} from "../proto/manage"; import i18n from '../base/i18n' export module defaultVal { export function getDefaultProtoNode():protoManage.Node { return protoManage.Node.create({ Base: protoManage.Base.create(), Name: i18n.global.t('defaultVal.unknown') }) } export function getDefaultProtoNodeFunc():protoManage.NodeFunc { return protoManage.NodeFunc.create({ Base: protoManage.Base.create(), Name: i18n.global.t('defaultVal.unknown') }) } export function getDefaultProtoNodeFuncCall():protoManage.NodeFuncCall { return protoManage.NodeFuncCall.create({ Base: protoManage.Base.create(), }) } export function getDefaultProtoNodeReport():protoManage.NodeReport{ return protoManage.NodeReport.create({ Base: protoManage.Base.create(), Name: i18n.global.t('defaultVal.unknown') }) } export function getDefaultProtoNodeReportVal():protoManage.NodeReportVal{ return protoManage.NodeReportVal.create({ Base: protoManage.Base.create() }) } export function getDefaultLanguage():string{ // @ts-ignore let lan = navigator.systemLanguage || navigator.language; if(lan.toLowerCase().indexOf('zh')!==-1){ return 'chs' }else if(lan.toLowerCase().indexOf('en')!==-1){ return 'eng' } return 'eng' } }
liyiligang/mxui
web/src/base/convert.ts
/* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {protoManage} from "../proto/manage"; import i18n from '../base/i18n' export module convert { export function getColorByState(state: protoManage.State|undefined|null):string { switch (state) { case protoManage.State.StateNot: return "color-state-main" case protoManage.State.StateUnknow: return "color-state-lose" case protoManage.State.StateNormal: return "color-state-success" case protoManage.State.StateWarn: return "color-state-warning" case protoManage.State.StateError: return "color-state-danger" } return "color-state-main" } export function getColorWithResourceName(state: protoManage.State|undefined|null):string { switch (state) { case protoManage.State.StateNormal: return "success" } return "danger" } export function getColorWithResourceState(state: protoManage.State|undefined|null):string { switch (state) { case protoManage.State.StateNormal: return "color-state-success" } return "color-state-danger" } export function getTableRowColorByState(state: protoManage.State|undefined|null):string { switch (state) { case protoManage.State.StateUnknow: return "table-color-state-info" case protoManage.State.StateNormal: return "table-color-state-success" case protoManage.State.StateWarn: return "table-color-state-warning" case protoManage.State.StateError: return "table-color-state-danger" } return "" } export function getColorByManagerState(state: protoManage.State|undefined|null):string { switch (state) { case protoManage.State.StateNormal: return "color-state-success" } return "color-state-lose" } export function getColorByLevel(level: protoManage.Level|undefined|null):string { switch (level) { case protoManage.Level.LevelPrimary: return "color-state-main" case protoManage.Level.LevelIntermediate: return "color-state-success" case protoManage.Level.LevelSenior: return "color-state-warning" case protoManage.Level.LevelSuper: return "color-state-danger" } return "color-state-lose" } export function getNodeStateName(state: protoManage.State):string { switch (state) { case protoManage.State.StateNormal: return i18n.global.t('node.state.normal') case protoManage.State.StateWarn: return i18n.global.t('node.state.warn') case protoManage.State.StateError: return i18n.global.t('node.state.error') } return i18n.global.t('node.state.unknown') } export function getStateViewType(level: protoManage.State|undefined|null):string { switch (level) { case protoManage.State.StateUnknow: return "info" case protoManage.State.StateNormal: return "success" case protoManage.State.StateWarn: return "warning" case protoManage.State.StateError: return "danger" } return "" } export function getManagerLevelName(level: protoManage.Level|undefined|null):string { switch (level) { case protoManage.Level.LevelPrimary: return i18n.global.t('manager.level.primary') case protoManage.Level.LevelIntermediate: return i18n.global.t('manager.level.intermediate') case protoManage.Level.LevelSenior: return i18n.global.t('manager.level.senior') case protoManage.Level.LevelSuper: return i18n.global.t('manager.level.super') } return i18n.global.t('manager.level.unknown') } export function getLevelViewType(level: protoManage.Level|undefined|null):string { switch (level) { case protoManage.Level.LevelPrimary: return "" case protoManage.Level.LevelIntermediate: return "success" case protoManage.Level.LevelSenior: return "warning" case protoManage.Level.LevelSuper: return "danger" } return "info" } export function getNodeFuncCallStateName(state: protoManage.State|undefined|null):string { switch (state) { case protoManage.State.StateUnknow: return i18n.global.t('nodeFuncCall.state.timeout') case protoManage.State.StateNormal: return i18n.global.t('nodeFuncCall.state.normal') case protoManage.State.StateWarn: return i18n.global.t('nodeFuncCall.state.warn') case protoManage.State.StateError: return i18n.global.t('nodeFuncCall.state.error') } return i18n.global.t('nodeFuncCall.state.unknown') } export function getNodeFuncCallStateIcon(state: protoManage.State|undefined|null):string { switch (state) { case protoManage.State.StateUnknow: return "el-icon-remove" case protoManage.State.StateNormal: return "el-icon-success" case protoManage.State.StateWarn: return "el-icon-warning" case protoManage.State.StateError: return "el-icon-error" } return "el-icon-question" } export function getNodeFuncParameterTypeName():string { return i18n.global.t('nodeFunc.parameterTypeName.form') } export function getNodeFuncReturnTypeName(type: protoManage.NodeFuncReturnType|undefined|null):string { switch (type) { case protoManage.NodeFuncReturnType.NotReturn: return i18n.global.t('nodeFunc.returnTypeName.notReturn') case protoManage.NodeFuncReturnType.Error: return i18n.global.t('nodeFunc.returnTypeName.error') case protoManage.NodeFuncReturnType.Text: return i18n.global.t('nodeFunc.returnTypeName.text') case protoManage.NodeFuncReturnType.Json: return i18n.global.t('nodeFunc.returnTypeName.json') case protoManage.NodeFuncReturnType.Link: return i18n.global.t('nodeFunc.returnTypeName.link') case protoManage.NodeFuncReturnType.Image: return i18n.global.t('nodeFunc.returnTypeName.image') case protoManage.NodeFuncReturnType.Media: return i18n.global.t('nodeFunc.returnTypeName.media') case protoManage.NodeFuncReturnType.File: return i18n.global.t('nodeFunc.returnTypeName.file') case protoManage.NodeFuncReturnType.Table: return i18n.global.t('nodeFunc.returnTypeName.table') case protoManage.NodeFuncReturnType.Charts: return i18n.global.t('nodeFunc.returnTypeName.charts') } return i18n.global.t('nodeFunc.returnTypeName.unknown') } export function renderSize(fileSize:number):string{ if(null==fileSize){ return "0 Bytes"; } let unitArr = ["B","KB","MB","GB","TB","PB","EB","ZB","YB"]; let index=0; let srcSize = parseFloat(String(fileSize)); index=Math.floor(Math.log(srcSize)/Math.log(1024)); let size =srcSize/Math.pow(1024,index); let v = size.toFixed(2); return v+unitArr[index]; } export function getNodeReportIntervalStr(interval: number):string { if (interval <= 0) { return i18n.global.t('time.manual') } let conf = [{max:1000, name:i18n.global.t('time.ms')}, {max:60, name:i18n.global.t('time.s')}, {max:60, name:i18n.global.t('time.min')}, {max:24, name:i18n.global.t('time.hour')}, {max:30, name:i18n.global.t('time.day')}] let calc = function (index:number, val:number):string { if (index >= conf.length){ return "" } let f = conf[index] if (index == conf.length - 1){ return val + f.name } if (val >= f.max){ let c = parseInt(String(val / f.max)) let y = val % f.max let ys = "" if (y > 0) { ys = y + f.name } return calc(index + 1, c) + ys } return val + f.name } return calc(0, interval) + "/"+i18n.global.t('time.once') } export function isGrayByState(state: protoManage.State):boolean { return state < protoManage.State.StateNormal || state > protoManage.State.StateError } export function isGrayByLevel(state: protoManage.State):boolean { return state < protoManage.State.StateUnknow || state > protoManage.State.StateError } export function uint8ArrayToString(data: Uint8Array):string { let dataString = ""; for (let i = 0; i < data.length; i++) { dataString += String.fromCharCode(data[i]); } return dataString } export function stringToUint8Array(str: string):Uint8Array { let arr:Array<number> = []; for (let i = 0, j = str.length; i < j; ++i) { arr.push(str.charCodeAt(i)); } let tmpUint8Array = new Uint8Array(arr); return tmpUint8Array } export function timeStampToFormatDate (timeStamp:number|undefined|null):string { if (timeStamp == 0 || timeStamp==undefined){ return "0000-00-00 00:00:00" } function two(s:number){ return s<10?"0"+s:s; } let date = new Date(timeStamp) let year = date.getFullYear(); let month = date.getMonth() + 1; let day = date.getDate(); let hour = date.getHours(); let minute = date.getMinutes(); let second = date.getSeconds(); return year + "-" + two(month) + "-" + two(day) + " " + two(hour) + ":" + two(minute) + ":" + two(second); } export function dateStringToTimeStamp(dateStr:string):number { return new Date(dateStr).getTime() } export function timeStampToDateString(timeStamp:number):string { return new Date(timeStamp).toLocaleString() } export function dataToArray(data:any):any[] { if (data == undefined) { return data } return Array.isArray(data) ? data : [data] } export function dataToTimeArray(query:any):any[] { let senderTimeArray = convert.dataToArray(query) let protoSenderTime = new Array<protoManage.Time>() if (senderTimeArray) { for (let item of senderTimeArray) { let senderTime = String(item).split("-") let time = protoManage.Time.create({BeginTime: Number(senderTime[0]), EndTime: Number(senderTime[1])}) protoSenderTime.push(time) } } return protoSenderTime } }
liyiligang/mxui
web/src/components/tableInfiniteScroll/tableInfiniteScroll.ts
/* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * 对 element-ui 的无限滚动在 el-dialog 上使用的封装 */ import { ElInfiniteScroll } from "element-plus"; const elScope = 'ElInfiniteScroll'; // scope name const msgTitle = `[elTableInfiniteScroll]: `; // message title const elTableScrollWrapperClass = '.el-table__body-wrapper'; export default { install: (app, options) => {}, mounted(el, binding, vnode, oldVnode) { // 获取 dialog 中的滚动层 const scrollElem = el.querySelector(elTableScrollWrapperClass); // 如果没找到元素,返回错误 if (!scrollElem) { throw `${msgTitle}找不到 ${elTableScrollWrapperClass} 容器`; } // 设置自动滚动 scrollElem.style.overflowY = 'auto'; // dom 渲染后 setTimeout(() => { if (!el.style.height) { scrollElem.style.height = '400px'; console.warn( `${msgTitle}请尽量设置 el-table 的高度,可以设置为 auto/100%(自适应高度),未设置会取 400px 的默认值(不然会导致一直加载)` ); } asyncElOptions(vnode, el, scrollElem); // 绑定 infinite-scroll if (ElInfiniteScroll.mounted == undefined){ throw `${msgTitle} mounted is undefined`; } ElInfiniteScroll.mounted(scrollElem, binding, vnode, oldVnode); // 将子集的引用放入 el 上,用于 unmounted 中销毁事件 el[elScope] = scrollElem[elScope]; }, 0); }, updated(el, binding, vnode) { asyncElOptions(vnode, el, el.querySelector(elTableScrollWrapperClass)); }, unmounted(el, binding, vnode, oldVnode){ // if (ElInfiniteScroll.unmounted == undefined){ // throw `${msgTitle} unmounted is undefined`; // } // ElInfiniteScroll.unmounted(el, binding, vnode, oldVnode) } }; /** * 同步 el-infinite-scroll 的配置项 * @param sourceVNode * @param sourceElem * @param targetElem */ function asyncElOptions(sourceVNode, sourceElem, targetElem) { let value; ['disabled', 'delay', 'immediate'].forEach((name) => { name = 'infinite-scroll-' + name; value = sourceElem.getAttribute(name); if (value !== null) { targetElem.setAttribute(name, value); } }); // fix: windows/chrome 的 scrollTop + clientHeight 与 scrollHeight 不一致的 BUG const name = 'infinite-scroll-distance'; value = sourceElem.getAttribute(name); targetElem.setAttribute(name, value < 1 ? 1 : value); }
liyiligang/mxui
web/src/main.ts
/* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { createApp } from 'vue' import App from './App.vue' import './index.css' import router from './router' import ElementPlus from 'element-plus' import 'element-plus/dist/index.css' import VueForm from '@lljj/vue3-form-element'; import elTableInfiniteScroll from "./components/tableInfiniteScroll"; import UploadFile from "./components/vueForm/widgets/UploadFile.vue"; import JsonViewer from "vue3-json-viewer" import i18n from './base/i18n' import "echarts"; import ECharts from 'vue-echarts' const app = createApp(App) app.config.unwrapInjectedRef = true app.use(router) .use(elTableInfiniteScroll) .use(ElementPlus) .use(JsonViewer) .use(i18n) .component('v-chart', ECharts) .component('VueForm', VueForm) .component('UploadFile', UploadFile) .mount('#app')
liyiligang/mxui
web/src/base/websocket.ts
/* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {ElMessage, ElNotification} from "element-plus"; import {protoManage} from "../proto/manage"; import {globals} from "./globals"; import i18n from './i18n' export let ws:WebSocket|null = null export let reconnectCnt = 0 export let isClosed = false export let websocketCloseByServer = 4000 export let notifyOffset = 80 let heartbeatInterval:any = null export module websocket { export function wsConnect(addr:string) { try { if ('WebSocket' in window) { ws = new WebSocket(addr) } else if ('MozWebSocket' in window) { ws = new WebSocket(addr) } else { globals.viewWarn(i18n.global.t('websocket.notSupport')) } if (ws != null){ ws.onopen = onOpen ws.onclose = onClose ws.onerror = onError ws.onmessage = onMessage } } catch (e) { globals.viewError(i18n.global.t('websocket.initError') + e) } function wsReconnect() { reconnectCnt++ setTimeout(function () { globals.viewWarn(i18n.global.t('websocket.reconnect', {msg:reconnectCnt})); wsConnect(addr); }, globals.globalsConfig.wsConfig.wsReconnectTime); } function onOpen(ev: Event) { if (reconnectCnt != 0){ globals.viewSuccess(i18n.global.t('websocket.reconnectSuccess')); reconnectCnt = 0 } sendHeartbeat(true) } function onClose(ev: CloseEvent) { sendHeartbeat(false) if (reconnectCnt == 0 && !isClosed) { globals.viewError(i18n.global.t('websocket.disConnect')+ev.reason + '(' + ev.code + ')'); } console.warn("websocket is closed: ", ev); if (ev.code != websocketCloseByServer && !isClosed){ wsReconnect() }else{ if (!isClosed){ globals.reLogin() } } } function onError(ev: Event) { console.error("websocket error: ", ev); } function onMessage(ev: MessageEvent) { let arrayBuffer; let fileReader = new FileReader(); fileReader.onload = function() { arrayBuffer = this.result; let uint8Buffer= new Uint8Array(arrayBuffer); let wsMessage = protoManage.Message.decode(uint8Buffer) receiver(wsMessage) }; fileReader.readAsArrayBuffer(ev.data); } } export function wsClose() { isClosed = true ws?.close() } function sendHeartbeat(open:boolean){ if (open) { heartbeatInterval = setInterval(()=>{ ws?.send("heartbeat") },20*1000) }else{ if (heartbeatInterval){ clearInterval(heartbeatInterval) } } } function receiver(msg:protoManage.Message){ globals.globalsData.wsMessage.order = msg.order switch (msg.order) { case protoManage.Order.NodeNotifyAdd: nodeNotify(msg.message) break case protoManage.Order.NodeFuncCallAns: nodeFuncCallAns(msg.message) break case protoManage.Order.ManagerUpdate: managerUpdate(msg.message) break case protoManage.Order.ManagerDel: managerDel(msg.message) break default: globals.viewError(i18n.global.t('websocket.orderError') + msg.order) break } } function nodeFuncCallAns(data:Uint8Array){ globals.globalsData.wsMessage.message.nodeFuncCallAns = protoManage.AnsNodeFuncCall.decode(data) } function managerUpdate(data:Uint8Array){ let response = protoManage.Manager.decode(data) if (response.Level != globals.globalsData.manager.info.Level){ globals.reLogin() globals.viewWarn(i18n.global.t('websocket.levelChanged', {msg:response.Name})) } globals.updateManagerInfo(response) } function managerDel(data:Uint8Array){ let response = protoManage.Manager.decode(data) globals.reLogin() globals.viewWarn(i18n.global.t('websocket.deleteNotify', {msg:response.Name})) } function nodeNotify(data:Uint8Array){ let msg = protoManage.NodeNotify.decode(data) switch (msg.State) { case protoManage.State.StateNormal: notifySuccess(msg) break case protoManage.State.StateWarn: notifyWarn(msg) break case protoManage.State.StateError: notifyError(msg) break case protoManage.State.StateUnknow: notifyInfo(msg) break default: notifyInfo(msg) break } } function notifyInfo(msg:protoManage.NodeNotify){ if (msg.SenderType == protoManage.NotifySenderType.NotifySenderTypeNode){ ElNotification({ title: msg.SenderName, message: msg.Message, type: 'info', offset: notifyOffset }) }else{ ElMessage.info(msg.Message) } } function notifySuccess(msg:protoManage.NodeNotify){ if (msg.SenderType == protoManage.NotifySenderType.NotifySenderTypeNode){ ElNotification({ title: msg.SenderName, message: msg.Message, type: 'success', offset: notifyOffset }) }else{ ElMessage.success(msg.Message) } } function notifyWarn(msg:protoManage.NodeNotify){ if (msg.SenderType == protoManage.NotifySenderType.NotifySenderTypeNode){ ElNotification({ title: msg.SenderName, message: msg.Message, type: 'warning', offset: notifyOffset }) }else{ ElMessage.warning(msg.Message) } } function notifyError(msg:protoManage.NodeNotify){ if (msg.SenderType == protoManage.NotifySenderType.NotifySenderTypeNode){ ElNotification({ title: msg.SenderName, message: msg.Message, type: 'error', offset: notifyOffset }) }else{ ElMessage.error(msg.Message) } } }
liyiligang/mxui
web/src/router.ts
/* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {createRouter, createWebHistory, LocationQueryRaw, RouteLocationNormalizedLoaded, useRoute} from "vue-router"; import Login from "./views/Login.vue"; import Home from "./views/Home.vue"; import Node from "./views/node/Node.vue"; import NodeFunc from "./views/node/NodeFunc.vue"; import NodeReport from "./views/node/NodeReport.vue"; import NodeNotify from "./views/node/NodeNotify.vue"; import NodeResource from "./views/node/NodeResource.vue"; import NodeTest from "./views/node/NodeTest.vue"; import NotFound from "./views/NotFound.vue"; import {globals} from "./base/globals"; import {filter} from "./base/filter"; import merge from "webpack-merge"; export const routerName = { login:"login", node:"node", nodeFunc:"nodeFunc", nodeReport:"nodeReport", nodeNotify:"nodeNotify", nodeResource:"nodeResource", nodeTest:"nodeTest" } export interface routerPathInitConfig { initPageNum? :boolean withPageNum? :boolean initPageSize? :boolean withPageSize? :boolean disableFilter? :boolean disableAutoRefresh? :boolean customer?:object } const router = createRouter({ history:createWebHistory(), routes: [ {path: '/', name:"login", component: Login}, { path: '/home', name:"home", component: Home, children: [ {path: routerName.node, name: routerName.node, component: Node}, {path: routerName.nodeFunc, name: routerName.nodeFunc, component: NodeFunc}, {path: routerName.nodeReport, name: routerName.nodeReport, component: NodeReport}, {path: routerName.nodeNotify, name: routerName.nodeNotify, component: NodeNotify}, {path: routerName.nodeResource, name: routerName.nodeResource, component: NodeResource}, {path: routerName.nodeTest, name: routerName.nodeTest, component: NodeTest}, ] }, {path: "/notFound", name: "notFound", component: NotFound} ] }) router.beforeEach((to, from, next) => { if (to.matched.length ===0) { next('/notFound') } else { next(); } }); export module routerPath { export function getQuery(name:string, config?:routerPathInitConfig, route?:RouteLocationNormalizedLoaded):LocationQueryRaw{ let query:LocationQueryRaw = {} if (config) { if (config.initPageNum) {query.pageNum = globals.globalsConfig.pageConfig.initNum} if (config.withPageNum && route) { if (route.query.pageNum != undefined) { query.pageNum = route.query.pageNum }else { query.pageNum = globals.globalsConfig.pageConfig.initNum } } if (config.initPageSize) { query.pageSize = globals.globalsConfig.pageConfig.initSize } if (config.withPageSize && route) { if (route.query.pageSize != undefined) { query.pageSize = route.query.pageSize }else { query.pageSize = globals.globalsConfig.pageConfig.initSize } } if (!config.disableFilter && globals.globalsData.tempSetting.setting.dataFilterView){ query.filter = "true" query = merge<any>(query, filter.getFilterDataQuery(name)) } if (!config.disableAutoRefresh && globals.globalsData.tempSetting.setting.autoRefresh){ query.autoRefresh = "true" } } return query } export function getPath(name:string, config?:routerPathInitConfig, route?:RouteLocationNormalizedLoaded):string{ let query:LocationQueryRaw = getQuery(name, config, route) let str = name + "?" for (let item in query) { str += item + "=" + query[item] + "&" } if (config && config.customer){ for (let item in config.customer) { str += item + "=" + config.customer[item] + "&" } } if (str[str.length -1] == "&") { str=str.slice(0, str.length-1) } return str } export function toPath(name:string, config?:routerPathInitConfig, route?:RouteLocationNormalizedLoaded){ router.push({ name: name, query:getQuery(name, config, route) }).then() } } export default router
liyiligang/mxui
web/src/base/request.ts
/* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {protoManage} from "../proto/manage" import {globals} from "./globals"; import axios from "axios"; import {convert} from "./convert"; import i18n from '../base/i18n' export module request { export function reqFindSystemInitInfo(req:protoManage.ReqSystemInitInfo):Promise<protoManage.AnsSystemInitInfo> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqSystemInitInfo.encode(req).finish() request.httpRequest("/system/getInitInfo", msg) .then((response) => { let ans = protoManage.AnsSystemInitInfo.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerLogin(req:protoManage.Manager):Promise<protoManage.Manager> { return new Promise((resolve, reject)=>{ let msg = protoManage.Manager.encode(req).finish() request.httpRequest("/manager/login", msg) .then((response) => { let ans = protoManage.Manager.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerRegister(req:protoManage.Manager):Promise<protoManage.Manager> { return new Promise((resolve, reject)=>{ let msg = protoManage.Manager.encode(req).finish() request.httpRequest("/manager/register", msg) .then((response) => { let ans = protoManage.Manager.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerAdd(req:protoManage.Manager):Promise<protoManage.Manager> { return new Promise((resolve, reject)=>{ let msg = protoManage.Manager.encode(req).finish() request.httpRequest("/manager/add", msg) .then((response) => { let ans = protoManage.Manager.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerNickNameList():Promise<protoManage.AnsManagerList> { return new Promise((resolve, reject)=>{ let req = protoManage.ReqManagerList.create({}) let msg = protoManage.ReqManagerList.encode(req).finish() request.httpRequest("/manager/findNickName", msg) .then((response) => { let ans = protoManage.AnsManagerList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerList():Promise<protoManage.AnsManagerList> { return new Promise((resolve, reject)=>{ let req = protoManage.ReqManagerList.create({}) let msg = protoManage.ReqManagerList.encode(req).finish() request.httpRequest("/manager/find", msg) .then((response) => { let ans = protoManage.AnsManagerList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerByID(req:protoManage.Manager):Promise<protoManage.Manager> { return new Promise((resolve, reject)=>{ let msg = protoManage.Manager.encode(req).finish() request.httpRequest("/manager/findByID", msg) .then((response) => { let ans = protoManage.Manager.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerUpdate(req:protoManage.Manager):Promise<protoManage.Manager> { return new Promise((resolve, reject)=>{ let msg = protoManage.Manager.encode(req).finish() request.httpRequest("/manager/update", msg) .then((response) => { let ans = protoManage.Manager.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerUpdatePasswd(req:protoManage.Manager):Promise<protoManage.Manager> { return new Promise((resolve, reject)=>{ let msg = protoManage.Manager.encode(req).finish() request.httpRequest("/manager/updatePasswd", msg) .then((response) => { let ans = protoManage.Manager.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerUpdateSetting(req:protoManage.Manager):Promise<protoManage.Manager> { return new Promise((resolve, reject)=>{ let msg = protoManage.Manager.encode(req).finish() request.httpRequest("/manager/updateSetting", msg) .then((response) => { let ans = protoManage.Manager.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqManagerDel(req:protoManage.Manager):Promise<protoManage.Manager> { return new Promise((resolve, reject)=>{ let msg = protoManage.Manager.encode(req).finish() request.httpRequest("/manager/del", msg) .then((response) => { let ans = protoManage.Manager.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqTopLinkList():Promise<protoManage.AnsTopLinkList> { return new Promise((resolve, reject)=>{ let req = protoManage.ReqTopLinkList.create({}) let msg = protoManage.ReqTopLinkList.encode(req).finish() request.httpRequest("/topLink/find", msg) .then((response) => { let ans = protoManage.AnsTopLinkList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqTopLinkByID(req:protoManage.TopLink):Promise<protoManage.TopLink> { return new Promise((resolve, reject)=>{ let msg = protoManage.TopLink.encode(req).finish() request.httpRequest("/topLink/findByID", msg) .then((response) => { let ans = protoManage.TopLink.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqTopLinkAdd(req:protoManage.TopLink):Promise<protoManage.TopLink> { return new Promise((resolve, reject)=>{ let msg = protoManage.TopLink.encode(req).finish() request.httpRequest("/topLink/add", msg) .then((response) => { let ans = protoManage.TopLink.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqTopLinkDel(req:protoManage.TopLink):Promise<protoManage.TopLink> { return new Promise((resolve, reject)=>{ let msg = protoManage.TopLink.encode(req).finish() request.httpRequest("/topLink/del", msg) .then((response) => { let ans = protoManage.TopLink.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqTopLinkUpdate(req:protoManage.TopLink):Promise<protoManage.TopLink> { return new Promise((resolve, reject)=>{ let msg = protoManage.TopLink.encode(req).finish() request.httpRequest("/topLink/update", msg) .then((response) => { let ans = protoManage.TopLink.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeList(req:protoManage.ReqNodeList):Promise<protoManage.AnsNodeList> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeList.encode(req).finish() request.httpRequest("/node/find", msg) .then((response) => { let ans = protoManage.AnsNodeList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeByID(id: number):Promise<protoManage.Node> { return new Promise((resolve, reject)=>{ let req = protoManage.Node.create({Base:protoManage.Base.create({ID:id})}) let msg = protoManage.Node.encode(req).finish() request.httpRequest("/node/findByID", msg) .then((response) => { let ans = protoManage.Node.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeDel(req:protoManage.Node):Promise<protoManage.Node> { return new Promise((resolve, reject)=>{ let msg = protoManage.Node.encode(req).finish() request.httpRequest("/node/del", msg) .then((response) => { let ans = protoManage.Node.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeFuncList(req:protoManage.ReqNodeFuncList):Promise<protoManage.AnsNodeFuncList> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeFuncList.encode(req).finish() request.httpRequest("/nodeFunc/find", msg) .then((response) => { let ans = protoManage.AnsNodeFuncList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeFuncDel(req:protoManage.NodeFunc):Promise<protoManage.NodeFunc> { return new Promise((resolve, reject)=>{ let msg = protoManage.NodeFunc.encode(req).finish() request.httpRequest("/nodeFunc/del", msg) .then((response) => { let ans = protoManage.NodeFunc.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqCallNodeFunc(req:protoManage.ReqNodeFuncCall):Promise<protoManage.Base> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeFuncCall.encode(req).finish() request.httpRequest("/nodeFuncCall/call", msg) .then((response) => { let ans = protoManage.Base.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeFuncCallList(req:protoManage.ReqNodeFuncCallList):Promise<protoManage.AnsNodeFuncCallList> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeFuncCallList.encode(req).finish() request.httpRequest("/nodeFuncCall/find", msg) .then((response) => { let ans = protoManage.AnsNodeFuncCallList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeFuncCallByID(req:protoManage.NodeFuncCall):Promise<protoManage.NodeFuncCall> { return new Promise((resolve, reject)=>{ let msg = protoManage.NodeFuncCall.encode(req).finish() request.httpRequest("/nodeFuncCall/findByID", msg) .then((response) => { let ans = protoManage.NodeFuncCall.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeFuncCallParameterByID(req:protoManage.NodeFuncCall):Promise<protoManage.NodeFuncCall> { return new Promise((resolve, reject)=>{ let msg = protoManage.NodeFuncCall.encode(req).finish() request.httpRequest("/nodeFuncCall/findParameterByID", msg) .then((response) => { let ans = protoManage.NodeFuncCall.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeFuncCallReturnValByID(req:protoManage.NodeFuncCall):Promise<protoManage.NodeFuncCall> { return new Promise((resolve, reject)=>{ let msg = protoManage.NodeFuncCall.encode(req).finish() request.httpRequest("/nodeFuncCall/findReturnValByID", msg) .then((response) => { let ans = protoManage.NodeFuncCall.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeReportList(req:protoManage.ReqNodeReportList):Promise<protoManage.AnsNodeReportList> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeReportList.encode(req).finish() request.httpRequest("/nodeReport/find", msg) .then((response) => { let ans = protoManage.AnsNodeReportList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeReportDel(req:protoManage.NodeReport):Promise<protoManage.NodeReport> { return new Promise((resolve, reject)=>{ let msg = protoManage.NodeReport.encode(req).finish() request.httpRequest("/nodeReport/del", msg) .then((response) => { let ans = protoManage.NodeReport.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeReportValList(req:protoManage.ReqNodeReportValList):Promise<protoManage.AnsNodeReportValList> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeReportValList.encode(req).finish() request.httpRequest("/nodeReportVal/find", msg) .then((response) => { let ans = protoManage.AnsNodeReportValList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeNotifyList(req:protoManage.ReqNodeNotifyList):Promise<protoManage.AnsNodeNotifyList> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeNotifyList.encode(req).finish() request.httpRequest("/nodeNotify/find", msg) .then((response) => { let ans = protoManage.AnsNodeNotifyList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeResourceCheck(req:protoManage.NodeResource):Promise<protoManage.NodeResource> { return new Promise((resolve, reject)=>{ let msg = protoManage.NodeResource.encode(req).finish() request.httpRequest("/nodeResource/check", msg) .then((response) => { let ans = protoManage.NodeResource.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeResourceList(req:protoManage.ReqNodeResourceList):Promise<protoManage.AnsNodeResourceList> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeResourceList.encode(req).finish() request.httpRequest("/nodeResource/find", msg) .then((response) => { let ans = protoManage.AnsNodeResourceList.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeResourceUpload(req:protoManage.NodeResource, file:Blob, onUploadProgressCall:(e:any)=>void):Promise<protoManage.NodeResource> { return new Promise((resolve, reject)=>{ reqNodeResourceCheck(req).then((ans:protoManage.NodeResource) => { if (ans.Base?.ID != 0){ resolve(ans) }else { let msg = protoManage.NodeResource.encode(req).finish() httpRequestUpLoad("/nodeResource/upload", file, msg, onUploadProgressCall) .then((response:protoManage.NodeResource) => { resolve(response) }).catch(error => { reject(error) }) } }).catch(error => { reject(error) }) }) } export function reqNodeResourceDownLoad(req:protoManage.NodeResource) { httpRequestDownLoad("/nodeResource/download/" + req.Name, Number(req.Base?.ID)) } export function reqNodeResourceDel(req:protoManage.NodeResource):Promise<protoManage.NodeResource> { return new Promise((resolve, reject)=>{ let msg = protoManage.NodeResource.encode(req).finish() request.httpRequest("/nodeResource/del", msg) .then((response) => { let ans = protoManage.NodeResource.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function reqNodeTest(req:protoManage.ReqNodeTest):Promise<protoManage.AnsNodeTest> { return new Promise((resolve, reject)=>{ let msg = protoManage.ReqNodeTest.encode(req).finish() request.httpRequest("/nodeTest/test", msg) .then((response) => { let ans = protoManage.AnsNodeTest.decode(response) resolve(ans) }).catch(error => { reject(error) }) }) } export function httpRequest(path:string, data:Uint8Array):Promise<Uint8Array> { return new Promise((resolve, reject)=>{ let blob = new Blob([data], {type: 'buffer'}); axios({ responseType: 'arraybuffer', method:'post', url: globals.getHttpHost() + path, data: blob, headers:{ 'token':globals.globalsData.manager.info.Token }, timeout: globals.globalsConfig.httpConfig.requestTimeout, }).then(response => { if (response.status == 200) { resolve(new Uint8Array(<Uint8Array>response.data)) }else { httpError(response.status, <Uint8Array>response.data) reject(response) } }).catch(error => { if (error.response != undefined){ httpError(error.response.status, error.response.data) }else{ globals.viewError(i18n.global.t('request.error.fail', { msg: error})); } reject(error) }) }) } export function httpRequestUpLoad(path:string, file:Blob, data:Uint8Array, onUploadProgressCall:(e:any)=>void):Promise<protoManage.NodeResource> { let formData = new FormData(); let str = convert.uint8ArrayToString(data) formData.append("file", file); formData.append("data", str); return new Promise((resolve, reject)=>{ axios({ responseType: 'arraybuffer', method:'post', url: globals.getHttpHost() + path, data: formData, headers:{ 'Content-type':'multipart/form-data', 'token':globals.globalsData.manager.info.Token }, timeout: globals.globalsConfig.httpConfig.requestTimeout, onUploadProgress: onUploadProgressCall }).then(response => { if (response.status == 200) { let ans = protoManage.NodeResource.decode(new Uint8Array(<Uint8Array>response.data)) resolve(ans) }else { httpError(response.status, <Uint8Array>response.data) reject(response) } }).catch(error => { if (error.response != undefined){ httpError(error.response.status, error.response.data) }else{ globals.viewError(i18n.global.t('request.error.fail', { msg: error})); } reject(error) }) }) } export function httpRequestDownLoad(path:string, id:number) { let url = globals.getHttpHost() + path + "?token="+globals.globalsData.manager.info.Token + "&id="+id window.open(url) // let downloadLink = document.createElement("a"); // downloadLink.href = url; // downloadLink.download = name; // document.body.appendChild(downloadLink); // downloadLink.click(); // document.body.removeChild(downloadLink); } export function httpError(code:number, data:Uint8Array) { let str = new TextDecoder().decode(data) switch (code) { case protoManage.HttpError.HttpErrorGetHeader: globals.viewError(i18n.global.t('request.error.header', { msg: str})); break case protoManage.HttpError.HttpErrorGetBody: globals.viewError(i18n.global.t('request.error.body', { msg: str})); break case protoManage.HttpError.HttpErrorGetFile: globals.viewError(i18n.global.t('request.error.file', { msg: str})); break case protoManage.HttpError.HttpErrorCheckFile: globals.viewError(i18n.global.t('request.error.checkFile', { msg: str})); break case protoManage.HttpError.HttpErrorMarshal: globals.viewError(i18n.global.t('request.error.marshal', { msg: str})); break case protoManage.HttpError.HttpErrorUnmarshal: globals.viewError(i18n.global.t('request.error.unmarshal', { msg: str})); break case protoManage.HttpError.HttpErrorRegister: globals.viewError(i18n.global.t('request.error.register', { msg: str})); break case protoManage.HttpError.HttpErrorLoginWithAccount: globals.viewError(i18n.global.t('request.error.loginWithAccount', { msg: str})); break case protoManage.HttpError.HttpErrorPasswordWithAccount: globals.viewError(i18n.global.t('request.error.passwordWithAccount', { msg: str})); break case protoManage.HttpError.HttpErrorLoginWithToken: globals.viewError(i18n.global.t('request.error.loginWithToken', { msg: str})); globals.reLogin() break case protoManage.HttpError.HttpErrorLevelLow: globals.viewError(i18n.global.t('request.error.levelLow', { msg: str})); break case protoManage.HttpError.HttpErrorRequest: globals.viewError(i18n.global.t('request.error.request', { msg: str})); break default: globals.viewError(i18n.global.t('request.error.fail', { msg: str})); break } } }
liyiligang/mxui
web/src/base/i18n.ts
<gh_stars>100-1000 /* * Copyright 2021 liyiligang. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { createI18n } from 'vue-i18n/dist/vue-i18n.cjs.js' const i18n = createI18n({ locale: 'eng', fallbackLocale: 'eng', messages: { chs: { manager: { level: { primary: '初级', intermediate: '中级', senior: '高级', super: '管理员', unknown : '未知' } }, node: { state: { not:'未知', normal : '在线', warn : '异常', error : '停机', unknown : '离线' }, card:{ state:'状态', nodeFunc:'方法', nodeReport:'报告' } }, nodeFunc:{ card:{ level: '权限', call: '方法', node: '节点' }, parameterTypeName: { form : '表单', unknown : '未知' }, returnTypeName: { notReturn: '无', error: '错误', text: '文本', json: '对象', link: '链接', image: '图片', media: '媒体', file: '文件', table : '表格', charts : '图表', unknown : '未知' } }, nodeFuncCall:{ state: { normal : '成功', warn : '异常', error : '失败', timeout : '超时', unknown : '未知' }, card:{ call: '请求方法', history: '历史记录' }, table:{ id:'编号', caller:'请求者', state:'状态', parameter:'参数', returnVal:'返回值', date:'日期' } }, nodeReport:{ card:{ level: '权限', call: '方法', interval: '频率', node: '节点', viewReport:'查看报告' } }, nodeReportVal:{ table:{ id:'编号', date:'日期' } }, nodeNotify:{ table:{ id:'编号', sender:'通知者', sendType:'通知源', message:'消息', date:'日期' }, senderType:{ user: "用户", node: "节点", unknown : '未知' }, state: { not : '信息', unknown : '未知', normal : '成功', warn : '警告', error : '错误' }, }, nodeResource:{ table:{ id:'编号', uploader:'上传者', uploaderType:'上传源', name:'资源名', size:'资源大小', downLoadCnt:'下载次数', state:'状态', date:'上传日期' }, uploaderType:{ user: "用户", node: "节点", unknown : '未知' }, state:{ normal:'正常', invalid:'已失效' } }, defaultVal:{ unknown : '未知' }, time: { manual: '手动', ms : '毫秒', s : '秒', min : '分', hour : '时', day : '天', once :'次' }, filter:{ invalidTag: '无效的标签', existTag: ' 已添加在<{name}>中', start:'过滤器已激活', stop:'过滤器未激活', tagName: { ID: 'ID', nodeID: '节点ID', }, tips:{ select: '请选择', submit: '提交', startTime: '起始时间', endTime: '结束时间', timeError:'起始时间不能超过结束时间', clearTags: '清空{msg}标签', }, node:{ name:'节点名', state:'状态', time:'更新时间', }, nodeFunc:{ nodeID:'节点ID', nodeName:'节点名', name:'节点方法名', level:'权限', time:'更新时间', }, nodeReport:{ nodeID:'节点ID', nodeName:'节点名', name:'节点报告名', level:'权限', time:'更新时间', }, nodeNotify:{ message:'通知消息', sender:'通知者', senderType:'通知源', state:'状态', time:'通知时间', }, nodeResource:{ name:'资源名', uploader:'上传者', uploaderType:'上传源', state:'状态', time:'上传时间', } }, request:{ error:{ fail: '请求失败 ({msg})', header: '读取信息错误 ({msg})', body: '读取数据错误 ({msg})', file: '读取文件数据错误 ({msg})', checkFile: '文件信息校验失败 ({msg})', marshal: '数据编码失败 ({msg})', unmarshal: '数据解码失败 ({msg})', register: '注册失败 ({msg})', loginWithAccount: '登录失败 ({msg})', passwordWithAccount: '<PASSWORD>})', loginWithToken: '登录信息校验失败 ({msg})', levelLow: '权限不足 ({msg})', request: '请求错误 ({msg})', } }, websocket:{ notSupport: '当前浏览器不支持websocket协议, 部分功能将无法使用', initError:'websocket初始化错误: ', reconnect:'正在尝试重新连接服务器......{msg}次', reconnectSuccess:'网络连接成功', disConnect:'网络连接已断开: ', orderError:'错误的websocket指令: ', deleteNotify:'您的帐号 {msg} 已被管理员注销', levelChanged:'您的帐号 {msg} 发生权限变更, 请重新登录' }, confirm:{ delete:'此操作将永久删除相关数据, 是否继续?', warn: '警告', ok:'确定', cancel:'取消' }, setting:{ system:'系统设置', user:'用户设置', topLink:'顶栏设置', systemSet:{ user:'用户:', level:'权限:', fixPage:'固定分页:', language:'语言选择:', modifyPassword:'修改密码:', modifyPasswordButton:'修改', modifyPasswordDialog:'修改密码', loginOut:'退出登录:', loginOutButton:'退出', autoRefresh:'数据同步(秒/次):', }, topMenuSet:{ id:'外链ID', name:'名称', link:'链接', operate:'操作', deleteSuccess:'删除成功', createSuccess:'创建成功', updateSuccess:'更新成功' }, userSet:{ id:'用户ID', userName:'用户名', nickName:'昵称', level:'权限', operate:'操作', addUser:'新增帐号', deleteSuccess:'删除成功', updateSuccess:'更新成功' } }, file:{ downloadTips:'点击下载文件', uploadTips:'点击上传', exceedTips:'上传文件数量超出最大限制', failTips:'上传文件失败', forbidDelHistoryFile:'禁止删除历史文件' }, autoRefresh:{ start:'自动同步已开启', stop:'自动同步已停止' }, json:{ formatFail:'json格式化失败: ' }, level:{ selectTips:'请选择权限等级' }, passwordReset:{ originalPassword:'<PASSWORD>', newPassword:'<PASSWORD>', confirmPassword:'<PASSWORD>', passwordInconsistent:'两次输入密码不一致', passwordModifySuccess:'密码修改成功, 请重新登录', modifyButton: '修改' }, sideBar:{ node:'节点', nodeFunc:'方法', nodeReport:'报告', nodeResource:'资源', nodeNotify:'通知', nodeTest:'测试', errorKey:'错误的侧边栏菜单选项: ' }, dialog:{ frame:{ setting:'设置(ctrl+q)', sync:'同步(ctrl+space)', update:'刷新(~)', fullScreen:'全屏(ctrl+enter)', close:'关闭(esc)' }, nodeFuncCall:{ form: '表单', json: 'JSON', emptyParameter:'此方法沒有设置参数', emptyReturnVal:'此次调用方法沒有返回值', resetButton:'重置', submitButton:'提交', jsonCheckFail:'参数校验失败' }, nodeReportVal:{ waitResult:'请等待结果返回...', setting:'设置', resultSet:'结果集(条):', sync:'数据同步(秒/次):', okButton:'确定' } }, home:{ parseConfigFail:'用户配置解析失败, 将使用默认配置' }, login:{ userName:'用户名', password:'密码', login:'登录', logining:'登录中...', autoLogin:'自动登录', register:'注册帐号', createManager:'创建管理员', loginSuccess:'登录成功' }, register:{ nickName:'昵称', userName:'用户名', password:'密码', level:'权限', confirmPassword:'<PASSWORD>', registerButton:'注册', registerSuccess:'注册成功', passwordInconsistent:'两次输入密码不一致' }, empty:{ emptyData:'暂无数据' }, notFound:'抱歉, 您访问的页面不存在', notFoundToHome:'返回至首页', notFoundToUp:'返回上一级' }, eng: { manager: { level: { primary: 'Primary', intermediate: 'Intermediate', senior: 'Advanced', super: 'Administrators', unknown : 'Unknown' } }, node: { state: { not:'Unknown', normal : 'Online', warn : 'Abnormal', error : 'Shutdown', unknown : 'Offline' }, card:{ state:'State', nodeFunc:'Method', nodeReport:'Report' } }, nodeFunc:{ card:{ level: 'Level', call: 'Method', node: 'Node' }, parameterTypeName: { form : 'Form', unknown : 'Unknown' }, returnTypeName: { notReturn: 'Nothing', error: 'Error', text: 'Text', json: 'Object', link: 'Link', image: 'Picture', media: 'Media', file: 'File', table : 'Table', charts : 'Chart', unknown : 'Unknown' } }, nodeFuncCall:{ state: { normal : 'Success', warn : 'Abnormal', error : 'Fail', timeout : 'Timeout', unknown : 'Unknown' }, card:{ call: 'Request', history: 'History' }, table:{ id:'ID', caller:'Requester', state:'State', parameter:'Parameter', returnVal:'ReturnVal', date:'Date' } }, nodeReport:{ card:{ level: 'Level', call: 'Method', interval: 'Rate', node: 'Node', viewReport:'ViewReport' } }, nodeReportVal:{ table:{ id:'ID', date:'Date' } }, nodeNotify:{ table:{ id:'ID', sender:'Sender', sendType:'Source', message:'Message', date:'Date' }, senderType:{ user: "User", node: "Node", unknown : 'Unknown' }, state: { not : 'Info', unknown : 'Unknown', normal : 'Success', warn : 'Warn', error : 'Error' }, }, nodeResource:{ table:{ id:'ID', uploader:'Uploader', uploaderType:'Source', name:'Name', size:'Size', downLoadCnt:'DownLoads', state:'State', date:'Date' }, uploaderType:{ user: "User", node: "Node", unknown : 'Unknown' }, state:{ normal:'Valid', invalid:'Invalid' } }, defaultVal:{ unknown : 'Unknown' }, time: { manual: 'manual', ms : 'ms', s : 's', min : 'min', hour : 'hour', day : 'day', once :'t' }, filter:{ invalidTag: 'Invalid tag', existTag: ' Added in<{name}>', start:'Filter activated', stop:'Filter not active', tagName: { ID: 'ID', nodeID: 'Node ID', }, tips:{ select: 'Please select', submit: 'Submit', startTime: 'Start time', endTime: 'End time', timeError:'The start time cannot exceed the end time', clearTags: 'Clear {msg} tags', }, node:{ name:'Node name', state:'State', time:'Update time', }, nodeFunc:{ nodeID:'Node ID', nodeName:'Node name', name:'Node method name', level:'Level', time:'Update time', }, nodeReport:{ nodeID:'Node ID', nodeName:'Node name', name:'Node report name', level:'Level', time:'Update time', }, nodeNotify:{ message:'Message', sender:'Sender', senderType:'Send source', state:'State', time:'Send time', }, nodeResource:{ name:'Resource name', uploader:'Uploader', uploaderType:'Upload source', state:'State', time:'Update time', } }, request:{ error:{ fail: 'Request failure ({msg})', header: 'Error reading information ({msg})', body: 'Error reading data ({msg})', file: 'Error reading file data ({msg})', checkFile: 'File information verification failed ({msg})', marshal: 'Data encoding failed ({msg})', unmarshal: 'Data decoding failed ({msg})', register: 'Register failed ({msg})', loginWithAccount: 'Login failed ({msg})', passwordWithAccount: '<PASSWORD>})', loginWithToken: 'Login information verification failed ({msg})', levelLow: 'Insufficient permissions ({msg})', request: 'Request error ({msg})', } }, websocket:{ notSupport: 'The current browser does not support websocket protocol, and some functions will not be available', initError:'Websocket initialization error: ', reconnect:'Attempting to reconnect to the server......{msg} time', reconnectSuccess:'Network connection succeeded', disConnect:'The network connection has been disconnected: ', orderError:'Bad websocket instruction: ', deleteNotify:'Your account {msg} has been cancelled by the administrator', levelChanged:'Your account {msg} has changed permissions. Please log in again' }, confirm:{ delete:'This operation will permanently delete relevant data. Do you want to continue?', warn: 'Warn', ok:'Confirm', cancel:'Cancel' }, setting:{ system:'System set', user:'User set', topLink:'Top bar set', systemSet:{ user:'User:', level:'Level:', fixPage:'Fixed paging:', language:'Choose language:', modifyPassword:'Modify password:', modifyPasswordButton:'Modify', modifyPasswordDialog:'Modify Password', loginOut:'Logout:', loginOutButton:'Quit', autoRefresh:'Data sync(s/t):', }, topMenuSet:{ id:'ID', name:'Name', link:'Link', operate:'Operate', deleteSuccess:'Delete success', createSuccess:'Create success', updateSuccess:'Update success' }, userSet:{ id:'ID', userName:'Name', nickName:'Nickname', level:'Level', operate:'Operate', addUser:'Add user', deleteSuccess:'Delete success', updateSuccess:'Update success' } }, file:{ downloadTips:'Click to download the file', uploadTips:'Click upload', exceedTips:'The number of uploaded files exceeds the maximum limit', failTips:'Failed to upload file', forbidDelHistoryFile:'Prohibit deleting history files' }, autoRefresh:{ start:'Auto sync on', stop:'Auto sync off' }, json:{ formatFail:'JSON formatting failed: ' }, level:{ selectTips:'Please select permission level' }, passwordReset:{ originalPassword:'<PASSWORD>', newPassword:'<PASSWORD>', confirmPassword:'<PASSWORD>', passwordInconsistent:'<PASSWORD>', passwordModifySuccess:'Password changed successfully, please login again', modifyButton: 'Modify' }, sideBar:{ node:'Node', nodeFunc:'Method', nodeReport:'Report', nodeResource:'Resource', nodeNotify:'Notify', nodeTest:'Test', errorKey:'Wrong sidebar menu option: ' }, dialog:{ frame:{ setting:'Set up(ctrl+q)', sync:'Synchronization(ctrl+space)', update:'Refresh(~)', fullScreen:'Full screen(ctrl+enter)', close:'Close(esc)' }, nodeFuncCall:{ form: 'Form', json: 'JSON', emptyParameter:'This method has no parameters set', emptyReturnVal:'The method called this time has no return value', resetButton:'Reset', submitButton:'Submit', jsonCheckFail:'Parameter verification failed' }, nodeReportVal:{ waitResult:'Please wait for the result to return...', setting:'Set up', resultSet:'Result set(p):', sync:'Data sync(s/t):', okButton:'Confirm' } }, home:{ parseConfigFail:'User configuration resolution failed. The default configuration will be used' }, login:{ userName:'User name', password:'Password', login:'Login', logining:'Logging...', autoLogin:'Auto login', register:'Register account', createManager:'Create administrator', loginSuccess:'Login succeeded' }, register:{ nickName:'Nick name', userName:'User name', password:'Password', level:' level', confirmPassword:'<PASSWORD>', registerButton:'Register', registerSuccess:'Register succeeded', passwordInconsistent:'The two <PASSWORD> are <PASSWORD>' }, empty:{ emptyData:'No data' }, notFound:'Sorry, the page you visited does not exist', notFoundToHome:'Return to login page', notFoundToUp:'Return to previous page' }, }, }) export default i18n
liyiligang/mxui
web/src/proto/manage.d.ts
import * as $protobuf from "protobufjs"; /** Namespace protoManage. */ export namespace protoManage { /** Order enum. */ enum Order { Unknow = 0, ManagerUpdate = 101, ManagerDel = 102, NodeFuncCallReq = 608, NodeFuncCallAns = 609, NodeReportUpdateVal = 704, NodeNotifyAdd = 801, NodeNotifyError = 803 } /** State enum. */ enum State { StateNot = 0, StateUnknow = 1, StateNormal = 2, StateWarn = 3, StateError = 4 } /** Level enum. */ enum Level { LevelNot = 0, LevelPrimary = 1, LevelIntermediate = 2, LevelSenior = 3, LevelSuper = 4 } /** NodeFuncReturnType enum. */ enum NodeFuncReturnType { Unknown = 0, NotReturn = 1, Error = 2, Text = 3, Json = 4, Link = 5, Image = 6, Media = 7, File = 8, Table = 9, Charts = 10 } /** NodeReportType enum. */ enum NodeReportType { NodeReportTypeUnknown = 0, NodeReportTypeTable = 1, NodeReportTypeLine = 2 } /** NotifySenderType enum. */ enum NotifySenderType { NotifySenderTypeUnknow = 0, NotifySenderTypeUser = 1, NotifySenderTypeNode = 2 } /** NodeResourceType enum. */ enum NodeResourceType { NodeResourceTypeUnknow = 0, NodeResourceTypeCache = 1 } /** HttpError enum. */ enum HttpError { HttpErrorNull = 0, HttpErrorGetHeader = 601, HttpErrorGetBody = 602, HttpErrorGetFile = 603, HttpErrorCheckFile = 604, HttpErrorMarshal = 605, HttpErrorUnmarshal = 606, HttpErrorRegister = 607, HttpErrorLoginWithAccount = 608, HttpErrorPasswordWithAccount = <PASSWORD>, HttpErrorLoginWithToken = <PASSWORD>, HttpErrorLevelLow = 611, HttpErrorRequest = 612 } /** Properties of a Message. */ interface IMessage { /** Message order */ order?: (protoManage.Order|null); /** Message message */ message?: (Uint8Array|null); } /** Represents a Message. */ class Message implements IMessage { /** * Constructs a new Message. * @param [properties] Properties to set */ constructor(properties?: protoManage.IMessage); /** Message order. */ public order: protoManage.Order; /** Message message. */ public message: Uint8Array; /** * Creates a new Message instance using the specified properties. * @param [properties] Properties to set * @returns Message instance */ public static create(properties?: protoManage.IMessage): protoManage.Message; /** * Encodes the specified Message message. Does not implicitly {@link protoManage.Message.verify|verify} messages. * @param message Message message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IMessage, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified Message message, length delimited. Does not implicitly {@link protoManage.Message.verify|verify} messages. * @param message Message message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IMessage, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a Message message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns Message * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.Message; /** * Decodes a Message message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns Message * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.Message; /** * Verifies a Message message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a Message message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns Message */ public static fromObject(object: { [k: string]: any }): protoManage.Message; /** * Creates a plain object from a Message message. Also converts values to other types if specified. * @param message Message * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.Message, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this Message to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a HttpMessage. */ interface IHttpMessage { /** HttpMessage order */ order?: (protoManage.Order|null); /** HttpMessage message */ message?: (Uint8Array|null); /** HttpMessage token */ token?: (string|null); } /** Represents a HttpMessage. */ class HttpMessage implements IHttpMessage { /** * Constructs a new HttpMessage. * @param [properties] Properties to set */ constructor(properties?: protoManage.IHttpMessage); /** HttpMessage order. */ public order: protoManage.Order; /** HttpMessage message. */ public message: Uint8Array; /** HttpMessage token. */ public token: string; /** * Creates a new HttpMessage instance using the specified properties. * @param [properties] Properties to set * @returns HttpMessage instance */ public static create(properties?: protoManage.IHttpMessage): protoManage.HttpMessage; /** * Encodes the specified HttpMessage message. Does not implicitly {@link protoManage.HttpMessage.verify|verify} messages. * @param message HttpMessage message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IHttpMessage, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified HttpMessage message, length delimited. Does not implicitly {@link protoManage.HttpMessage.verify|verify} messages. * @param message HttpMessage message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IHttpMessage, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a HttpMessage message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns HttpMessage * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.HttpMessage; /** * Decodes a HttpMessage message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns HttpMessage * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.HttpMessage; /** * Verifies a HttpMessage message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a HttpMessage message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns HttpMessage */ public static fromObject(object: { [k: string]: any }): protoManage.HttpMessage; /** * Creates a plain object from a HttpMessage message. Also converts values to other types if specified. * @param message HttpMessage * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.HttpMessage, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this HttpMessage to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Represents a RpcEngine */ class RpcEngine extends $protobuf.rpc.Service { /** * Constructs a new RpcEngine service. * @param rpcImpl RPC implementation * @param [requestDelimited=false] Whether requests are length-delimited * @param [responseDelimited=false] Whether responses are length-delimited */ constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); /** * Creates new RpcEngine service using the specified rpc implementation. * @param rpcImpl RPC implementation * @param [requestDelimited=false] Whether requests are length-delimited * @param [responseDelimited=false] Whether responses are length-delimited * @returns RPC service. Useful where requests and/or responses are streamed. */ public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): RpcEngine; /** * Calls RegisterNode. * @param request Node message or plain object * @param callback Node-style callback called with the error, if any, and Node */ public registerNode(request: protoManage.INode, callback: protoManage.RpcEngine.RegisterNodeCallback): void; /** * Calls RegisterNode. * @param request Node message or plain object * @returns Promise */ public registerNode(request: protoManage.INode): Promise<protoManage.Node>; /** * Calls RegisterNodeFunc. * @param request NodeFunc message or plain object * @param callback Node-style callback called with the error, if any, and NodeFunc */ public registerNodeFunc(request: protoManage.INodeFunc, callback: protoManage.RpcEngine.RegisterNodeFuncCallback): void; /** * Calls RegisterNodeFunc. * @param request NodeFunc message or plain object * @returns Promise */ public registerNodeFunc(request: protoManage.INodeFunc): Promise<protoManage.NodeFunc>; /** * Calls RegisterNodeReport. * @param request NodeReport message or plain object * @param callback Node-style callback called with the error, if any, and NodeReport */ public registerNodeReport(request: protoManage.INodeReport, callback: protoManage.RpcEngine.RegisterNodeReportCallback): void; /** * Calls RegisterNodeReport. * @param request NodeReport message or plain object * @returns Promise */ public registerNodeReport(request: protoManage.INodeReport): Promise<protoManage.NodeReport>; /** * Calls CheckNodeResource. * @param request NodeResource message or plain object * @param callback Node-style callback called with the error, if any, and NodeResource */ public checkNodeResource(request: protoManage.INodeResource, callback: protoManage.RpcEngine.CheckNodeResourceCallback): void; /** * Calls CheckNodeResource. * @param request NodeResource message or plain object * @returns Promise */ public checkNodeResource(request: protoManage.INodeResource): Promise<protoManage.NodeResource>; /** * Calls UploadNodeResource. * @param request ReqNodeResourceUpload message or plain object * @param callback Node-style callback called with the error, if any, and AnsNodeResourceUpload */ public uploadNodeResource(request: protoManage.IReqNodeResourceUpload, callback: protoManage.RpcEngine.UploadNodeResourceCallback): void; /** * Calls UploadNodeResource. * @param request ReqNodeResourceUpload message or plain object * @returns Promise */ public uploadNodeResource(request: protoManage.IReqNodeResourceUpload): Promise<protoManage.AnsNodeResourceUpload>; /** * Calls DownloadNodeResource. * @param request ReqNodeResourceDownload message or plain object * @param callback Node-style callback called with the error, if any, and AnsNodeResourceDownload */ public downloadNodeResource(request: protoManage.IReqNodeResourceDownload, callback: protoManage.RpcEngine.DownloadNodeResourceCallback): void; /** * Calls DownloadNodeResource. * @param request ReqNodeResourceDownload message or plain object * @returns Promise */ public downloadNodeResource(request: protoManage.IReqNodeResourceDownload): Promise<protoManage.AnsNodeResourceDownload>; /** * Calls RpcChannel. * @param request Message message or plain object * @param callback Node-style callback called with the error, if any, and Message */ public rpcChannel(request: protoManage.IMessage, callback: protoManage.RpcEngine.RpcChannelCallback): void; /** * Calls RpcChannel. * @param request Message message or plain object * @returns Promise */ public rpcChannel(request: protoManage.IMessage): Promise<protoManage.Message>; } namespace RpcEngine { /** * Callback as used by {@link protoManage.RpcEngine#registerNode}. * @param error Error, if any * @param [response] Node */ type RegisterNodeCallback = (error: (Error|null), response?: protoManage.Node) => void; /** * Callback as used by {@link protoManage.RpcEngine#registerNodeFunc}. * @param error Error, if any * @param [response] NodeFunc */ type RegisterNodeFuncCallback = (error: (Error|null), response?: protoManage.NodeFunc) => void; /** * Callback as used by {@link protoManage.RpcEngine#registerNodeReport}. * @param error Error, if any * @param [response] NodeReport */ type RegisterNodeReportCallback = (error: (Error|null), response?: protoManage.NodeReport) => void; /** * Callback as used by {@link protoManage.RpcEngine#checkNodeResource}. * @param error Error, if any * @param [response] NodeResource */ type CheckNodeResourceCallback = (error: (Error|null), response?: protoManage.NodeResource) => void; /** * Callback as used by {@link protoManage.RpcEngine#uploadNodeResource}. * @param error Error, if any * @param [response] AnsNodeResourceUpload */ type UploadNodeResourceCallback = (error: (Error|null), response?: protoManage.AnsNodeResourceUpload) => void; /** * Callback as used by {@link protoManage.RpcEngine#downloadNodeResource}. * @param error Error, if any * @param [response] AnsNodeResourceDownload */ type DownloadNodeResourceCallback = (error: (Error|null), response?: protoManage.AnsNodeResourceDownload) => void; /** * Callback as used by {@link protoManage.RpcEngine#rpcChannel}. * @param error Error, if any * @param [response] Message */ type RpcChannelCallback = (error: (Error|null), response?: protoManage.Message) => void; } /** Properties of a Base. */ interface IBase { /** Base ID */ ID?: (number|null); /** Base UpdateTime */ UpdateTime?: (number|null); } /** Represents a Base. */ class Base implements IBase { /** * Constructs a new Base. * @param [properties] Properties to set */ constructor(properties?: protoManage.IBase); /** Base ID. */ public ID: number; /** Base UpdateTime. */ public UpdateTime: number; /** * Creates a new Base instance using the specified properties. * @param [properties] Properties to set * @returns Base instance */ public static create(properties?: protoManage.IBase): protoManage.Base; /** * Encodes the specified Base message. Does not implicitly {@link protoManage.Base.verify|verify} messages. * @param message Base message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IBase, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified Base message, length delimited. Does not implicitly {@link protoManage.Base.verify|verify} messages. * @param message Base message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IBase, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a Base message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns Base * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.Base; /** * Decodes a Base message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns Base * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.Base; /** * Verifies a Base message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a Base message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns Base */ public static fromObject(object: { [k: string]: any }): protoManage.Base; /** * Creates a plain object from a Base message. Also converts values to other types if specified. * @param message Base * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.Base, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this Base to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a Page. */ interface IPage { /** Page Count */ Count?: (number|null); /** Page Num */ Num?: (number|null); } /** Represents a Page. */ class Page implements IPage { /** * Constructs a new Page. * @param [properties] Properties to set */ constructor(properties?: protoManage.IPage); /** Page Count. */ public Count: number; /** Page Num. */ public Num: number; /** * Creates a new Page instance using the specified properties. * @param [properties] Properties to set * @returns Page instance */ public static create(properties?: protoManage.IPage): protoManage.Page; /** * Encodes the specified Page message. Does not implicitly {@link protoManage.Page.verify|verify} messages. * @param message Page message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IPage, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified Page message, length delimited. Does not implicitly {@link protoManage.Page.verify|verify} messages. * @param message Page message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IPage, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a Page message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns Page * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.Page; /** * Decodes a Page message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns Page * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.Page; /** * Verifies a Page message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a Page message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns Page */ public static fromObject(object: { [k: string]: any }): protoManage.Page; /** * Creates a plain object from a Page message. Also converts values to other types if specified. * @param message Page * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.Page, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this Page to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a Time. */ interface ITime { /** Time BeginTime */ BeginTime?: (number|null); /** Time EndTime */ EndTime?: (number|null); } /** Represents a Time. */ class Time implements ITime { /** * Constructs a new Time. * @param [properties] Properties to set */ constructor(properties?: protoManage.ITime); /** Time BeginTime. */ public BeginTime: number; /** Time EndTime. */ public EndTime: number; /** * Creates a new Time instance using the specified properties. * @param [properties] Properties to set * @returns Time instance */ public static create(properties?: protoManage.ITime): protoManage.Time; /** * Encodes the specified Time message. Does not implicitly {@link protoManage.Time.verify|verify} messages. * @param message Time message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.ITime, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified Time message, length delimited. Does not implicitly {@link protoManage.Time.verify|verify} messages. * @param message Time message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.ITime, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a Time message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns Time * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.Time; /** * Decodes a Time message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns Time * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.Time; /** * Verifies a Time message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a Time message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns Time */ public static fromObject(object: { [k: string]: any }): protoManage.Time; /** * Creates a plain object from a Time message. Also converts values to other types if specified. * @param message Time * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.Time, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this Time to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a Manager. */ interface IManager { /** Manager Base */ Base?: (protoManage.IBase|null); /** Manager Name */ Name?: (string|null); /** Manager Password */ Password?: (string|null); /** Manager NickName */ NickName?: (string|null); /** Manager Token */ Token?: (string|null); /** Manager Setting */ Setting?: (string|null); /** Manager Level */ Level?: (protoManage.Level|null); /** Manager State */ State?: (protoManage.State|null); } /** Represents a Manager. */ class Manager implements IManager { /** * Constructs a new Manager. * @param [properties] Properties to set */ constructor(properties?: protoManage.IManager); /** Manager Base. */ public Base?: (protoManage.IBase|null); /** Manager Name. */ public Name: string; /** Manager Password. */ public Password: string; /** Manager NickName. */ public NickName: string; /** Manager Token. */ public Token: string; /** Manager Setting. */ public Setting: string; /** Manager Level. */ public Level: protoManage.Level; /** Manager State. */ public State: protoManage.State; /** * Creates a new Manager instance using the specified properties. * @param [properties] Properties to set * @returns Manager instance */ public static create(properties?: protoManage.IManager): protoManage.Manager; /** * Encodes the specified Manager message. Does not implicitly {@link protoManage.Manager.verify|verify} messages. * @param message Manager message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IManager, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified Manager message, length delimited. Does not implicitly {@link protoManage.Manager.verify|verify} messages. * @param message Manager message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IManager, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a Manager message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns Manager * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.Manager; /** * Decodes a Manager message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns Manager * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.Manager; /** * Verifies a Manager message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a Manager message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns Manager */ public static fromObject(object: { [k: string]: any }): protoManage.Manager; /** * Creates a plain object from a Manager message. Also converts values to other types if specified. * @param message Manager * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.Manager, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this Manager to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a TopLink. */ interface ITopLink { /** TopLink Base */ Base?: (protoManage.IBase|null); /** TopLink Name */ Name?: (string|null); /** TopLink Url */ Url?: (string|null); /** TopLink State */ State?: (protoManage.State|null); } /** Represents a TopLink. */ class TopLink implements ITopLink { /** * Constructs a new TopLink. * @param [properties] Properties to set */ constructor(properties?: protoManage.ITopLink); /** TopLink Base. */ public Base?: (protoManage.IBase|null); /** TopLink Name. */ public Name: string; /** TopLink Url. */ public Url: string; /** TopLink State. */ public State: protoManage.State; /** * Creates a new TopLink instance using the specified properties. * @param [properties] Properties to set * @returns TopLink instance */ public static create(properties?: protoManage.ITopLink): protoManage.TopLink; /** * Encodes the specified TopLink message. Does not implicitly {@link protoManage.TopLink.verify|verify} messages. * @param message TopLink message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.ITopLink, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified TopLink message, length delimited. Does not implicitly {@link protoManage.TopLink.verify|verify} messages. * @param message TopLink message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.ITopLink, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a TopLink message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns TopLink * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.TopLink; /** * Decodes a TopLink message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns TopLink * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.TopLink; /** * Verifies a TopLink message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a TopLink message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns TopLink */ public static fromObject(object: { [k: string]: any }): protoManage.TopLink; /** * Creates a plain object from a TopLink message. Also converts values to other types if specified. * @param message TopLink * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.TopLink, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this TopLink to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a Node. */ interface INode { /** Node Base */ Base?: (protoManage.IBase|null); /** Node Name */ Name?: (string|null); /** Node State */ State?: (protoManage.State|null); } /** Represents a Node. */ class Node implements INode { /** * Constructs a new Node. * @param [properties] Properties to set */ constructor(properties?: protoManage.INode); /** Node Base. */ public Base?: (protoManage.IBase|null); /** Node Name. */ public Name: string; /** Node State. */ public State: protoManage.State; /** * Creates a new Node instance using the specified properties. * @param [properties] Properties to set * @returns Node instance */ public static create(properties?: protoManage.INode): protoManage.Node; /** * Encodes the specified Node message. Does not implicitly {@link protoManage.Node.verify|verify} messages. * @param message Node message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.INode, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified Node message, length delimited. Does not implicitly {@link protoManage.Node.verify|verify} messages. * @param message Node message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.INode, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a Node message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns Node * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.Node; /** * Decodes a Node message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns Node * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.Node; /** * Verifies a Node message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a Node message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns Node */ public static fromObject(object: { [k: string]: any }): protoManage.Node; /** * Creates a plain object from a Node message. Also converts values to other types if specified. * @param message Node * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.Node, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this Node to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a NodeFunc. */ interface INodeFunc { /** NodeFunc Base */ Base?: (protoManage.IBase|null); /** NodeFunc NodeID */ NodeID?: (number|null); /** NodeFunc Name */ Name?: (string|null); /** NodeFunc Func */ Func?: (string|null); /** NodeFunc Schema */ Schema?: (string|null); /** NodeFunc Level */ Level?: (protoManage.Level|null); /** NodeFunc State */ State?: (protoManage.State|null); } /** Represents a NodeFunc. */ class NodeFunc implements INodeFunc { /** * Constructs a new NodeFunc. * @param [properties] Properties to set */ constructor(properties?: protoManage.INodeFunc); /** NodeFunc Base. */ public Base?: (protoManage.IBase|null); /** NodeFunc NodeID. */ public NodeID: number; /** NodeFunc Name. */ public Name: string; /** NodeFunc Func. */ public Func: string; /** NodeFunc Schema. */ public Schema: string; /** NodeFunc Level. */ public Level: protoManage.Level; /** NodeFunc State. */ public State: protoManage.State; /** * Creates a new NodeFunc instance using the specified properties. * @param [properties] Properties to set * @returns NodeFunc instance */ public static create(properties?: protoManage.INodeFunc): protoManage.NodeFunc; /** * Encodes the specified NodeFunc message. Does not implicitly {@link protoManage.NodeFunc.verify|verify} messages. * @param message NodeFunc message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.INodeFunc, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified NodeFunc message, length delimited. Does not implicitly {@link protoManage.NodeFunc.verify|verify} messages. * @param message NodeFunc message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.INodeFunc, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a NodeFunc message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns NodeFunc * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.NodeFunc; /** * Decodes a NodeFunc message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns NodeFunc * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.NodeFunc; /** * Verifies a NodeFunc message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a NodeFunc message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns NodeFunc */ public static fromObject(object: { [k: string]: any }): protoManage.NodeFunc; /** * Creates a plain object from a NodeFunc message. Also converts values to other types if specified. * @param message NodeFunc * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.NodeFunc, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this NodeFunc to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a NodeFuncCall. */ interface INodeFuncCall { /** NodeFuncCall Base */ Base?: (protoManage.IBase|null); /** NodeFuncCall RequesterID */ RequesterID?: (number|null); /** NodeFuncCall RequesterName */ RequesterName?: (string|null); /** NodeFuncCall FuncID */ FuncID?: (number|null); /** NodeFuncCall Parameter */ Parameter?: (string|null); /** NodeFuncCall ReturnVal */ ReturnVal?: (string|null); /** NodeFuncCall ReturnType */ ReturnType?: (protoManage.NodeFuncReturnType|null); /** NodeFuncCall State */ State?: (protoManage.State|null); } /** Represents a NodeFuncCall. */ class NodeFuncCall implements INodeFuncCall { /** * Constructs a new NodeFuncCall. * @param [properties] Properties to set */ constructor(properties?: protoManage.INodeFuncCall); /** NodeFuncCall Base. */ public Base?: (protoManage.IBase|null); /** NodeFuncCall RequesterID. */ public RequesterID: number; /** NodeFuncCall RequesterName. */ public RequesterName: string; /** NodeFuncCall FuncID. */ public FuncID: number; /** NodeFuncCall Parameter. */ public Parameter: string; /** NodeFuncCall ReturnVal. */ public ReturnVal: string; /** NodeFuncCall ReturnType. */ public ReturnType: protoManage.NodeFuncReturnType; /** NodeFuncCall State. */ public State: protoManage.State; /** * Creates a new NodeFuncCall instance using the specified properties. * @param [properties] Properties to set * @returns NodeFuncCall instance */ public static create(properties?: protoManage.INodeFuncCall): protoManage.NodeFuncCall; /** * Encodes the specified NodeFuncCall message. Does not implicitly {@link protoManage.NodeFuncCall.verify|verify} messages. * @param message NodeFuncCall message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.INodeFuncCall, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified NodeFuncCall message, length delimited. Does not implicitly {@link protoManage.NodeFuncCall.verify|verify} messages. * @param message NodeFuncCall message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.INodeFuncCall, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a NodeFuncCall message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns NodeFuncCall * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.NodeFuncCall; /** * Decodes a NodeFuncCall message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns NodeFuncCall * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.NodeFuncCall; /** * Verifies a NodeFuncCall message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a NodeFuncCall message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns NodeFuncCall */ public static fromObject(object: { [k: string]: any }): protoManage.NodeFuncCall; /** * Creates a plain object from a NodeFuncCall message. Also converts values to other types if specified. * @param message NodeFuncCall * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.NodeFuncCall, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this NodeFuncCall to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a NodeReport. */ interface INodeReport { /** NodeReport Base */ Base?: (protoManage.IBase|null); /** NodeReport NodeID */ NodeID?: (number|null); /** NodeReport Name */ Name?: (string|null); /** NodeReport Type */ Type?: (protoManage.NodeReportType|null); /** NodeReport Func */ Func?: (string|null); /** NodeReport Schema */ Schema?: (string|null); /** NodeReport Interval */ Interval?: (number|null); /** NodeReport Level */ Level?: (protoManage.Level|null); /** NodeReport State */ State?: (protoManage.State|null); } /** Represents a NodeReport. */ class NodeReport implements INodeReport { /** * Constructs a new NodeReport. * @param [properties] Properties to set */ constructor(properties?: protoManage.INodeReport); /** NodeReport Base. */ public Base?: (protoManage.IBase|null); /** NodeReport NodeID. */ public NodeID: number; /** NodeReport Name. */ public Name: string; /** NodeReport Type. */ public Type: protoManage.NodeReportType; /** NodeReport Func. */ public Func: string; /** NodeReport Schema. */ public Schema: string; /** NodeReport Interval. */ public Interval: number; /** NodeReport Level. */ public Level: protoManage.Level; /** NodeReport State. */ public State: protoManage.State; /** * Creates a new NodeReport instance using the specified properties. * @param [properties] Properties to set * @returns NodeReport instance */ public static create(properties?: protoManage.INodeReport): protoManage.NodeReport; /** * Encodes the specified NodeReport message. Does not implicitly {@link protoManage.NodeReport.verify|verify} messages. * @param message NodeReport message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.INodeReport, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified NodeReport message, length delimited. Does not implicitly {@link protoManage.NodeReport.verify|verify} messages. * @param message NodeReport message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.INodeReport, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a NodeReport message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns NodeReport * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.NodeReport; /** * Decodes a NodeReport message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns NodeReport * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.NodeReport; /** * Verifies a NodeReport message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a NodeReport message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns NodeReport */ public static fromObject(object: { [k: string]: any }): protoManage.NodeReport; /** * Creates a plain object from a NodeReport message. Also converts values to other types if specified. * @param message NodeReport * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.NodeReport, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this NodeReport to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a NodeReportVal. */ interface INodeReportVal { /** NodeReportVal Base */ Base?: (protoManage.IBase|null); /** NodeReportVal ReportID */ ReportID?: (number|null); /** NodeReportVal Value */ Value?: (string|null); /** NodeReportVal State */ State?: (protoManage.State|null); } /** Represents a NodeReportVal. */ class NodeReportVal implements INodeReportVal { /** * Constructs a new NodeReportVal. * @param [properties] Properties to set */ constructor(properties?: protoManage.INodeReportVal); /** NodeReportVal Base. */ public Base?: (protoManage.IBase|null); /** NodeReportVal ReportID. */ public ReportID: number; /** NodeReportVal Value. */ public Value: string; /** NodeReportVal State. */ public State: protoManage.State; /** * Creates a new NodeReportVal instance using the specified properties. * @param [properties] Properties to set * @returns NodeReportVal instance */ public static create(properties?: protoManage.INodeReportVal): protoManage.NodeReportVal; /** * Encodes the specified NodeReportVal message. Does not implicitly {@link protoManage.NodeReportVal.verify|verify} messages. * @param message NodeReportVal message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.INodeReportVal, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified NodeReportVal message, length delimited. Does not implicitly {@link protoManage.NodeReportVal.verify|verify} messages. * @param message NodeReportVal message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.INodeReportVal, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a NodeReportVal message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns NodeReportVal * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.NodeReportVal; /** * Decodes a NodeReportVal message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns NodeReportVal * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.NodeReportVal; /** * Verifies a NodeReportVal message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a NodeReportVal message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns NodeReportVal */ public static fromObject(object: { [k: string]: any }): protoManage.NodeReportVal; /** * Creates a plain object from a NodeReportVal message. Also converts values to other types if specified. * @param message NodeReportVal * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.NodeReportVal, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this NodeReportVal to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a NodeNotify. */ interface INodeNotify { /** NodeNotify Base */ Base?: (protoManage.IBase|null); /** NodeNotify SenderID */ SenderID?: (number|null); /** NodeNotify SenderName */ SenderName?: (string|null); /** NodeNotify SenderType */ SenderType?: (protoManage.NotifySenderType|null); /** NodeNotify Message */ Message?: (string|null); /** NodeNotify State */ State?: (protoManage.State|null); /** NodeNotify showPop */ showPop?: (boolean|null); } /** Represents a NodeNotify. */ class NodeNotify implements INodeNotify { /** * Constructs a new NodeNotify. * @param [properties] Properties to set */ constructor(properties?: protoManage.INodeNotify); /** NodeNotify Base. */ public Base?: (protoManage.IBase|null); /** NodeNotify SenderID. */ public SenderID: number; /** NodeNotify SenderName. */ public SenderName: string; /** NodeNotify SenderType. */ public SenderType: protoManage.NotifySenderType; /** NodeNotify Message. */ public Message: string; /** NodeNotify State. */ public State: protoManage.State; /** NodeNotify showPop. */ public showPop: boolean; /** * Creates a new NodeNotify instance using the specified properties. * @param [properties] Properties to set * @returns NodeNotify instance */ public static create(properties?: protoManage.INodeNotify): protoManage.NodeNotify; /** * Encodes the specified NodeNotify message. Does not implicitly {@link protoManage.NodeNotify.verify|verify} messages. * @param message NodeNotify message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.INodeNotify, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified NodeNotify message, length delimited. Does not implicitly {@link protoManage.NodeNotify.verify|verify} messages. * @param message NodeNotify message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.INodeNotify, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a NodeNotify message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns NodeNotify * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.NodeNotify; /** * Decodes a NodeNotify message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns NodeNotify * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.NodeNotify; /** * Verifies a NodeNotify message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a NodeNotify message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns NodeNotify */ public static fromObject(object: { [k: string]: any }): protoManage.NodeNotify; /** * Creates a plain object from a NodeNotify message. Also converts values to other types if specified. * @param message NodeNotify * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.NodeNotify, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this NodeNotify to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a NodeResource. */ interface INodeResource { /** NodeResource Base */ Base?: (protoManage.IBase|null); /** NodeResource Name */ Name?: (string|null); /** NodeResource Md5 */ Md5?: (string|null); /** NodeResource Sizes */ Sizes?: (number|null); /** NodeResource Type */ Type?: (protoManage.NodeResourceType|null); /** NodeResource UploaderID */ UploaderID?: (number|null); /** NodeResource UploaderName */ UploaderName?: (string|null); /** NodeResource UploaderType */ UploaderType?: (protoManage.NotifySenderType|null); /** NodeResource UploadTime */ UploadTime?: (number|null); /** NodeResource DownLoadCnt */ DownLoadCnt?: (number|null); /** NodeResource State */ State?: (protoManage.State|null); } /** Represents a NodeResource. */ class NodeResource implements INodeResource { /** * Constructs a new NodeResource. * @param [properties] Properties to set */ constructor(properties?: protoManage.INodeResource); /** NodeResource Base. */ public Base?: (protoManage.IBase|null); /** NodeResource Name. */ public Name: string; /** NodeResource Md5. */ public Md5: string; /** NodeResource Sizes. */ public Sizes: number; /** NodeResource Type. */ public Type: protoManage.NodeResourceType; /** NodeResource UploaderID. */ public UploaderID: number; /** NodeResource UploaderName. */ public UploaderName: string; /** NodeResource UploaderType. */ public UploaderType: protoManage.NotifySenderType; /** NodeResource UploadTime. */ public UploadTime: number; /** NodeResource DownLoadCnt. */ public DownLoadCnt: number; /** NodeResource State. */ public State: protoManage.State; /** * Creates a new NodeResource instance using the specified properties. * @param [properties] Properties to set * @returns NodeResource instance */ public static create(properties?: protoManage.INodeResource): protoManage.NodeResource; /** * Encodes the specified NodeResource message. Does not implicitly {@link protoManage.NodeResource.verify|verify} messages. * @param message NodeResource message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.INodeResource, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified NodeResource message, length delimited. Does not implicitly {@link protoManage.NodeResource.verify|verify} messages. * @param message NodeResource message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.INodeResource, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a NodeResource message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns NodeResource * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.NodeResource; /** * Decodes a NodeResource message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns NodeResource * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.NodeResource; /** * Verifies a NodeResource message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a NodeResource message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns NodeResource */ public static fromObject(object: { [k: string]: any }): protoManage.NodeResource; /** * Creates a plain object from a NodeResource message. Also converts values to other types if specified. * @param message NodeResource * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.NodeResource, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this NodeResource to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqSystemInitInfo. */ interface IReqSystemInitInfo { } /** Represents a ReqSystemInitInfo. */ class ReqSystemInitInfo implements IReqSystemInitInfo { /** * Constructs a new ReqSystemInitInfo. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqSystemInitInfo); /** * Creates a new ReqSystemInitInfo instance using the specified properties. * @param [properties] Properties to set * @returns ReqSystemInitInfo instance */ public static create(properties?: protoManage.IReqSystemInitInfo): protoManage.ReqSystemInitInfo; /** * Encodes the specified ReqSystemInitInfo message. Does not implicitly {@link protoManage.ReqSystemInitInfo.verify|verify} messages. * @param message ReqSystemInitInfo message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqSystemInitInfo, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqSystemInitInfo message, length delimited. Does not implicitly {@link protoManage.ReqSystemInitInfo.verify|verify} messages. * @param message ReqSystemInitInfo message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqSystemInitInfo, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqSystemInitInfo message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqSystemInitInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqSystemInitInfo; /** * Decodes a ReqSystemInitInfo message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqSystemInitInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqSystemInitInfo; /** * Verifies a ReqSystemInitInfo message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqSystemInitInfo message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqSystemInitInfo */ public static fromObject(object: { [k: string]: any }): protoManage.ReqSystemInitInfo; /** * Creates a plain object from a ReqSystemInitInfo message. Also converts values to other types if specified. * @param message ReqSystemInitInfo * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqSystemInitInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqSystemInitInfo to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsSystemInitInfo. */ interface IAnsSystemInitInfo { /** AnsSystemInitInfo systemInit */ systemInit?: (boolean|null); /** AnsSystemInitInfo openRegister */ openRegister?: (boolean|null); } /** Represents an AnsSystemInitInfo. */ class AnsSystemInitInfo implements IAnsSystemInitInfo { /** * Constructs a new AnsSystemInitInfo. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsSystemInitInfo); /** AnsSystemInitInfo systemInit. */ public systemInit: boolean; /** AnsSystemInitInfo openRegister. */ public openRegister: boolean; /** * Creates a new AnsSystemInitInfo instance using the specified properties. * @param [properties] Properties to set * @returns AnsSystemInitInfo instance */ public static create(properties?: protoManage.IAnsSystemInitInfo): protoManage.AnsSystemInitInfo; /** * Encodes the specified AnsSystemInitInfo message. Does not implicitly {@link protoManage.AnsSystemInitInfo.verify|verify} messages. * @param message AnsSystemInitInfo message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsSystemInitInfo, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsSystemInitInfo message, length delimited. Does not implicitly {@link protoManage.AnsSystemInitInfo.verify|verify} messages. * @param message AnsSystemInitInfo message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsSystemInitInfo, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsSystemInitInfo message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsSystemInitInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsSystemInitInfo; /** * Decodes an AnsSystemInitInfo message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsSystemInitInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsSystemInitInfo; /** * Verifies an AnsSystemInitInfo message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsSystemInitInfo message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsSystemInitInfo */ public static fromObject(object: { [k: string]: any }): protoManage.AnsSystemInitInfo; /** * Creates a plain object from an AnsSystemInitInfo message. Also converts values to other types if specified. * @param message AnsSystemInitInfo * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsSystemInitInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsSystemInitInfo to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqTopLinkList. */ interface IReqTopLinkList { } /** Represents a ReqTopLinkList. */ class ReqTopLinkList implements IReqTopLinkList { /** * Constructs a new ReqTopLinkList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqTopLinkList); /** * Creates a new ReqTopLinkList instance using the specified properties. * @param [properties] Properties to set * @returns ReqTopLinkList instance */ public static create(properties?: protoManage.IReqTopLinkList): protoManage.ReqTopLinkList; /** * Encodes the specified ReqTopLinkList message. Does not implicitly {@link protoManage.ReqTopLinkList.verify|verify} messages. * @param message ReqTopLinkList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqTopLinkList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqTopLinkList message, length delimited. Does not implicitly {@link protoManage.ReqTopLinkList.verify|verify} messages. * @param message ReqTopLinkList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqTopLinkList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqTopLinkList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqTopLinkList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqTopLinkList; /** * Decodes a ReqTopLinkList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqTopLinkList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqTopLinkList; /** * Verifies a ReqTopLinkList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqTopLinkList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqTopLinkList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqTopLinkList; /** * Creates a plain object from a ReqTopLinkList message. Also converts values to other types if specified. * @param message ReqTopLinkList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqTopLinkList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqTopLinkList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsTopLinkList. */ interface IAnsTopLinkList { /** AnsTopLinkList TopLinkList */ TopLinkList?: (protoManage.ITopLink[]|null); } /** Represents an AnsTopLinkList. */ class AnsTopLinkList implements IAnsTopLinkList { /** * Constructs a new AnsTopLinkList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsTopLinkList); /** AnsTopLinkList TopLinkList. */ public TopLinkList: protoManage.ITopLink[]; /** * Creates a new AnsTopLinkList instance using the specified properties. * @param [properties] Properties to set * @returns AnsTopLinkList instance */ public static create(properties?: protoManage.IAnsTopLinkList): protoManage.AnsTopLinkList; /** * Encodes the specified AnsTopLinkList message. Does not implicitly {@link protoManage.AnsTopLinkList.verify|verify} messages. * @param message AnsTopLinkList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsTopLinkList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsTopLinkList message, length delimited. Does not implicitly {@link protoManage.AnsTopLinkList.verify|verify} messages. * @param message AnsTopLinkList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsTopLinkList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsTopLinkList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsTopLinkList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsTopLinkList; /** * Decodes an AnsTopLinkList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsTopLinkList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsTopLinkList; /** * Verifies an AnsTopLinkList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsTopLinkList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsTopLinkList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsTopLinkList; /** * Creates a plain object from an AnsTopLinkList message. Also converts values to other types if specified. * @param message AnsTopLinkList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsTopLinkList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsTopLinkList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqManagerList. */ interface IReqManagerList { } /** Represents a ReqManagerList. */ class ReqManagerList implements IReqManagerList { /** * Constructs a new ReqManagerList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqManagerList); /** * Creates a new ReqManagerList instance using the specified properties. * @param [properties] Properties to set * @returns ReqManagerList instance */ public static create(properties?: protoManage.IReqManagerList): protoManage.ReqManagerList; /** * Encodes the specified ReqManagerList message. Does not implicitly {@link protoManage.ReqManagerList.verify|verify} messages. * @param message ReqManagerList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqManagerList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqManagerList message, length delimited. Does not implicitly {@link protoManage.ReqManagerList.verify|verify} messages. * @param message ReqManagerList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqManagerList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqManagerList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqManagerList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqManagerList; /** * Decodes a ReqManagerList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqManagerList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqManagerList; /** * Verifies a ReqManagerList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqManagerList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqManagerList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqManagerList; /** * Creates a plain object from a ReqManagerList message. Also converts values to other types if specified. * @param message ReqManagerList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqManagerList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqManagerList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsManagerList. */ interface IAnsManagerList { /** AnsManagerList ManagerList */ ManagerList?: (protoManage.IManager[]|null); } /** Represents an AnsManagerList. */ class AnsManagerList implements IAnsManagerList { /** * Constructs a new AnsManagerList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsManagerList); /** AnsManagerList ManagerList. */ public ManagerList: protoManage.IManager[]; /** * Creates a new AnsManagerList instance using the specified properties. * @param [properties] Properties to set * @returns AnsManagerList instance */ public static create(properties?: protoManage.IAnsManagerList): protoManage.AnsManagerList; /** * Encodes the specified AnsManagerList message. Does not implicitly {@link protoManage.AnsManagerList.verify|verify} messages. * @param message AnsManagerList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsManagerList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsManagerList message, length delimited. Does not implicitly {@link protoManage.AnsManagerList.verify|verify} messages. * @param message AnsManagerList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsManagerList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsManagerList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsManagerList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsManagerList; /** * Decodes an AnsManagerList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsManagerList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsManagerList; /** * Verifies an AnsManagerList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsManagerList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsManagerList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsManagerList; /** * Creates a plain object from an AnsManagerList message. Also converts values to other types if specified. * @param message AnsManagerList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsManagerList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsManagerList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeList. */ interface IReqNodeList { /** ReqNodeList ID */ ID?: (number[]|null); /** ReqNodeList Name */ Name?: (string[]|null); /** ReqNodeList State */ State?: (protoManage.State[]|null); /** ReqNodeList UpdateTime */ UpdateTime?: (protoManage.ITime[]|null); /** ReqNodeList Page */ Page?: (protoManage.IPage|null); } /** Represents a ReqNodeList. */ class ReqNodeList implements IReqNodeList { /** * Constructs a new ReqNodeList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeList); /** ReqNodeList ID. */ public ID: number[]; /** ReqNodeList Name. */ public Name: string[]; /** ReqNodeList State. */ public State: protoManage.State[]; /** ReqNodeList UpdateTime. */ public UpdateTime: protoManage.ITime[]; /** ReqNodeList Page. */ public Page?: (protoManage.IPage|null); /** * Creates a new ReqNodeList instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeList instance */ public static create(properties?: protoManage.IReqNodeList): protoManage.ReqNodeList; /** * Encodes the specified ReqNodeList message. Does not implicitly {@link protoManage.ReqNodeList.verify|verify} messages. * @param message ReqNodeList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeList message, length delimited. Does not implicitly {@link protoManage.ReqNodeList.verify|verify} messages. * @param message ReqNodeList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeList; /** * Decodes a ReqNodeList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeList; /** * Verifies a ReqNodeList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeList; /** * Creates a plain object from a ReqNodeList message. Also converts values to other types if specified. * @param message ReqNodeList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeList. */ interface IAnsNodeList { /** AnsNodeList Length */ Length?: (number|null); /** AnsNodeList NodeList */ NodeList?: (protoManage.INode[]|null); } /** Represents an AnsNodeList. */ class AnsNodeList implements IAnsNodeList { /** * Constructs a new AnsNodeList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeList); /** AnsNodeList Length. */ public Length: number; /** AnsNodeList NodeList. */ public NodeList: protoManage.INode[]; /** * Creates a new AnsNodeList instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeList instance */ public static create(properties?: protoManage.IAnsNodeList): protoManage.AnsNodeList; /** * Encodes the specified AnsNodeList message. Does not implicitly {@link protoManage.AnsNodeList.verify|verify} messages. * @param message AnsNodeList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeList message, length delimited. Does not implicitly {@link protoManage.AnsNodeList.verify|verify} messages. * @param message AnsNodeList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeList; /** * Decodes an AnsNodeList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeList; /** * Verifies an AnsNodeList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeList; /** * Creates a plain object from an AnsNodeList message. Also converts values to other types if specified. * @param message AnsNodeList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeFuncList. */ interface IReqNodeFuncList { /** ReqNodeFuncList ID */ ID?: (number[]|null); /** ReqNodeFuncList Name */ Name?: (string[]|null); /** ReqNodeFuncList Level */ Level?: (protoManage.Level[]|null); /** ReqNodeFuncList LevelMax */ LevelMax?: (protoManage.Level|null); /** ReqNodeFuncList NodeID */ NodeID?: (number[]|null); /** ReqNodeFuncList NodeName */ NodeName?: (string[]|null); /** ReqNodeFuncList UpdateTime */ UpdateTime?: (protoManage.ITime[]|null); /** ReqNodeFuncList Page */ Page?: (protoManage.IPage|null); } /** Represents a ReqNodeFuncList. */ class ReqNodeFuncList implements IReqNodeFuncList { /** * Constructs a new ReqNodeFuncList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeFuncList); /** ReqNodeFuncList ID. */ public ID: number[]; /** ReqNodeFuncList Name. */ public Name: string[]; /** ReqNodeFuncList Level. */ public Level: protoManage.Level[]; /** ReqNodeFuncList LevelMax. */ public LevelMax: protoManage.Level; /** ReqNodeFuncList NodeID. */ public NodeID: number[]; /** ReqNodeFuncList NodeName. */ public NodeName: string[]; /** ReqNodeFuncList UpdateTime. */ public UpdateTime: protoManage.ITime[]; /** ReqNodeFuncList Page. */ public Page?: (protoManage.IPage|null); /** * Creates a new ReqNodeFuncList instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeFuncList instance */ public static create(properties?: protoManage.IReqNodeFuncList): protoManage.ReqNodeFuncList; /** * Encodes the specified ReqNodeFuncList message. Does not implicitly {@link protoManage.ReqNodeFuncList.verify|verify} messages. * @param message ReqNodeFuncList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeFuncList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeFuncList message, length delimited. Does not implicitly {@link protoManage.ReqNodeFuncList.verify|verify} messages. * @param message ReqNodeFuncList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeFuncList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeFuncList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeFuncList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeFuncList; /** * Decodes a ReqNodeFuncList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeFuncList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeFuncList; /** * Verifies a ReqNodeFuncList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeFuncList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeFuncList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeFuncList; /** * Creates a plain object from a ReqNodeFuncList message. Also converts values to other types if specified. * @param message ReqNodeFuncList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeFuncList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeFuncList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeFuncList. */ interface IAnsNodeFuncList { /** AnsNodeFuncList Length */ Length?: (number|null); /** AnsNodeFuncList NodeFuncList */ NodeFuncList?: (protoManage.INodeFunc[]|null); /** AnsNodeFuncList NodeList */ NodeList?: (protoManage.INode[]|null); } /** Represents an AnsNodeFuncList. */ class AnsNodeFuncList implements IAnsNodeFuncList { /** * Constructs a new AnsNodeFuncList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeFuncList); /** AnsNodeFuncList Length. */ public Length: number; /** AnsNodeFuncList NodeFuncList. */ public NodeFuncList: protoManage.INodeFunc[]; /** AnsNodeFuncList NodeList. */ public NodeList: protoManage.INode[]; /** * Creates a new AnsNodeFuncList instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeFuncList instance */ public static create(properties?: protoManage.IAnsNodeFuncList): protoManage.AnsNodeFuncList; /** * Encodes the specified AnsNodeFuncList message. Does not implicitly {@link protoManage.AnsNodeFuncList.verify|verify} messages. * @param message AnsNodeFuncList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeFuncList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeFuncList message, length delimited. Does not implicitly {@link protoManage.AnsNodeFuncList.verify|verify} messages. * @param message AnsNodeFuncList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeFuncList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeFuncList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeFuncList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeFuncList; /** * Decodes an AnsNodeFuncList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeFuncList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeFuncList; /** * Verifies an AnsNodeFuncList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeFuncList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeFuncList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeFuncList; /** * Creates a plain object from an AnsNodeFuncList message. Also converts values to other types if specified. * @param message AnsNodeFuncList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeFuncList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeFuncList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeReportList. */ interface IReqNodeReportList { /** ReqNodeReportList ID */ ID?: (number[]|null); /** ReqNodeReportList Name */ Name?: (string[]|null); /** ReqNodeReportList Level */ Level?: (protoManage.Level[]|null); /** ReqNodeReportList LevelMax */ LevelMax?: (protoManage.Level|null); /** ReqNodeReportList NodeID */ NodeID?: (number[]|null); /** ReqNodeReportList NodeName */ NodeName?: (string[]|null); /** ReqNodeReportList UpdateTime */ UpdateTime?: (protoManage.ITime[]|null); /** ReqNodeReportList Page */ Page?: (protoManage.IPage|null); } /** Represents a ReqNodeReportList. */ class ReqNodeReportList implements IReqNodeReportList { /** * Constructs a new ReqNodeReportList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeReportList); /** ReqNodeReportList ID. */ public ID: number[]; /** ReqNodeReportList Name. */ public Name: string[]; /** ReqNodeReportList Level. */ public Level: protoManage.Level[]; /** ReqNodeReportList LevelMax. */ public LevelMax: protoManage.Level; /** ReqNodeReportList NodeID. */ public NodeID: number[]; /** ReqNodeReportList NodeName. */ public NodeName: string[]; /** ReqNodeReportList UpdateTime. */ public UpdateTime: protoManage.ITime[]; /** ReqNodeReportList Page. */ public Page?: (protoManage.IPage|null); /** * Creates a new ReqNodeReportList instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeReportList instance */ public static create(properties?: protoManage.IReqNodeReportList): protoManage.ReqNodeReportList; /** * Encodes the specified ReqNodeReportList message. Does not implicitly {@link protoManage.ReqNodeReportList.verify|verify} messages. * @param message ReqNodeReportList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeReportList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeReportList message, length delimited. Does not implicitly {@link protoManage.ReqNodeReportList.verify|verify} messages. * @param message ReqNodeReportList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeReportList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeReportList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeReportList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeReportList; /** * Decodes a ReqNodeReportList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeReportList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeReportList; /** * Verifies a ReqNodeReportList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeReportList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeReportList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeReportList; /** * Creates a plain object from a ReqNodeReportList message. Also converts values to other types if specified. * @param message ReqNodeReportList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeReportList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeReportList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeReportList. */ interface IAnsNodeReportList { /** AnsNodeReportList Length */ Length?: (number|null); /** AnsNodeReportList NodeReportList */ NodeReportList?: (protoManage.INodeReport[]|null); /** AnsNodeReportList NodeList */ NodeList?: (protoManage.INode[]|null); } /** Represents an AnsNodeReportList. */ class AnsNodeReportList implements IAnsNodeReportList { /** * Constructs a new AnsNodeReportList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeReportList); /** AnsNodeReportList Length. */ public Length: number; /** AnsNodeReportList NodeReportList. */ public NodeReportList: protoManage.INodeReport[]; /** AnsNodeReportList NodeList. */ public NodeList: protoManage.INode[]; /** * Creates a new AnsNodeReportList instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeReportList instance */ public static create(properties?: protoManage.IAnsNodeReportList): protoManage.AnsNodeReportList; /** * Encodes the specified AnsNodeReportList message. Does not implicitly {@link protoManage.AnsNodeReportList.verify|verify} messages. * @param message AnsNodeReportList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeReportList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeReportList message, length delimited. Does not implicitly {@link protoManage.AnsNodeReportList.verify|verify} messages. * @param message AnsNodeReportList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeReportList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeReportList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeReportList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeReportList; /** * Decodes an AnsNodeReportList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeReportList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeReportList; /** * Verifies an AnsNodeReportList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeReportList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeReportList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeReportList; /** * Creates a plain object from an AnsNodeReportList message. Also converts values to other types if specified. * @param message AnsNodeReportList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeReportList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeReportList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeFuncCall. */ interface IReqNodeFuncCall { /** ReqNodeFuncCall NodeFuncCall */ NodeFuncCall?: (protoManage.INodeFuncCall|null); } /** Represents a ReqNodeFuncCall. */ class ReqNodeFuncCall implements IReqNodeFuncCall { /** * Constructs a new ReqNodeFuncCall. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeFuncCall); /** ReqNodeFuncCall NodeFuncCall. */ public NodeFuncCall?: (protoManage.INodeFuncCall|null); /** * Creates a new ReqNodeFuncCall instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeFuncCall instance */ public static create(properties?: protoManage.IReqNodeFuncCall): protoManage.ReqNodeFuncCall; /** * Encodes the specified ReqNodeFuncCall message. Does not implicitly {@link protoManage.ReqNodeFuncCall.verify|verify} messages. * @param message ReqNodeFuncCall message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeFuncCall, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeFuncCall message, length delimited. Does not implicitly {@link protoManage.ReqNodeFuncCall.verify|verify} messages. * @param message ReqNodeFuncCall message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeFuncCall, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeFuncCall message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeFuncCall * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeFuncCall; /** * Decodes a ReqNodeFuncCall message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeFuncCall * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeFuncCall; /** * Verifies a ReqNodeFuncCall message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeFuncCall message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeFuncCall */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeFuncCall; /** * Creates a plain object from a ReqNodeFuncCall message. Also converts values to other types if specified. * @param message ReqNodeFuncCall * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeFuncCall, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeFuncCall to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeFuncCall. */ interface IAnsNodeFuncCall { /** AnsNodeFuncCall NodeFuncCall */ NodeFuncCall?: (protoManage.INodeFuncCall|null); /** AnsNodeFuncCall Error */ Error?: (string|null); } /** Represents an AnsNodeFuncCall. */ class AnsNodeFuncCall implements IAnsNodeFuncCall { /** * Constructs a new AnsNodeFuncCall. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeFuncCall); /** AnsNodeFuncCall NodeFuncCall. */ public NodeFuncCall?: (protoManage.INodeFuncCall|null); /** AnsNodeFuncCall Error. */ public Error: string; /** * Creates a new AnsNodeFuncCall instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeFuncCall instance */ public static create(properties?: protoManage.IAnsNodeFuncCall): protoManage.AnsNodeFuncCall; /** * Encodes the specified AnsNodeFuncCall message. Does not implicitly {@link protoManage.AnsNodeFuncCall.verify|verify} messages. * @param message AnsNodeFuncCall message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeFuncCall, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeFuncCall message, length delimited. Does not implicitly {@link protoManage.AnsNodeFuncCall.verify|verify} messages. * @param message AnsNodeFuncCall message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeFuncCall, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeFuncCall message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeFuncCall * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeFuncCall; /** * Decodes an AnsNodeFuncCall message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeFuncCall * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeFuncCall; /** * Verifies an AnsNodeFuncCall message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeFuncCall message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeFuncCall */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeFuncCall; /** * Creates a plain object from an AnsNodeFuncCall message. Also converts values to other types if specified. * @param message AnsNodeFuncCall * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeFuncCall, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeFuncCall to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeFuncCallList. */ interface IReqNodeFuncCallList { /** ReqNodeFuncCallList FuncID */ FuncID?: (number|null); /** ReqNodeFuncCallList Page */ Page?: (protoManage.IPage|null); } /** Represents a ReqNodeFuncCallList. */ class ReqNodeFuncCallList implements IReqNodeFuncCallList { /** * Constructs a new ReqNodeFuncCallList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeFuncCallList); /** ReqNodeFuncCallList FuncID. */ public FuncID: number; /** ReqNodeFuncCallList Page. */ public Page?: (protoManage.IPage|null); /** * Creates a new ReqNodeFuncCallList instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeFuncCallList instance */ public static create(properties?: protoManage.IReqNodeFuncCallList): protoManage.ReqNodeFuncCallList; /** * Encodes the specified ReqNodeFuncCallList message. Does not implicitly {@link protoManage.ReqNodeFuncCallList.verify|verify} messages. * @param message ReqNodeFuncCallList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeFuncCallList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeFuncCallList message, length delimited. Does not implicitly {@link protoManage.ReqNodeFuncCallList.verify|verify} messages. * @param message ReqNodeFuncCallList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeFuncCallList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeFuncCallList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeFuncCallList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeFuncCallList; /** * Decodes a ReqNodeFuncCallList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeFuncCallList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeFuncCallList; /** * Verifies a ReqNodeFuncCallList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeFuncCallList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeFuncCallList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeFuncCallList; /** * Creates a plain object from a ReqNodeFuncCallList message. Also converts values to other types if specified. * @param message ReqNodeFuncCallList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeFuncCallList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeFuncCallList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeFuncCallList. */ interface IAnsNodeFuncCallList { /** AnsNodeFuncCallList NodeFuncCallList */ NodeFuncCallList?: (protoManage.INodeFuncCall[]|null); } /** Represents an AnsNodeFuncCallList. */ class AnsNodeFuncCallList implements IAnsNodeFuncCallList { /** * Constructs a new AnsNodeFuncCallList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeFuncCallList); /** AnsNodeFuncCallList NodeFuncCallList. */ public NodeFuncCallList: protoManage.INodeFuncCall[]; /** * Creates a new AnsNodeFuncCallList instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeFuncCallList instance */ public static create(properties?: protoManage.IAnsNodeFuncCallList): protoManage.AnsNodeFuncCallList; /** * Encodes the specified AnsNodeFuncCallList message. Does not implicitly {@link protoManage.AnsNodeFuncCallList.verify|verify} messages. * @param message AnsNodeFuncCallList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeFuncCallList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeFuncCallList message, length delimited. Does not implicitly {@link protoManage.AnsNodeFuncCallList.verify|verify} messages. * @param message AnsNodeFuncCallList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeFuncCallList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeFuncCallList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeFuncCallList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeFuncCallList; /** * Decodes an AnsNodeFuncCallList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeFuncCallList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeFuncCallList; /** * Verifies an AnsNodeFuncCallList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeFuncCallList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeFuncCallList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeFuncCallList; /** * Creates a plain object from an AnsNodeFuncCallList message. Also converts values to other types if specified. * @param message AnsNodeFuncCallList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeFuncCallList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeFuncCallList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeReportValList. */ interface IReqNodeReportValList { /** ReqNodeReportValList ID */ ID?: (number|null); /** ReqNodeReportValList ReportID */ ReportID?: (number|null); /** ReqNodeReportValList Page */ Page?: (protoManage.IPage|null); } /** Represents a ReqNodeReportValList. */ class ReqNodeReportValList implements IReqNodeReportValList { /** * Constructs a new ReqNodeReportValList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeReportValList); /** ReqNodeReportValList ID. */ public ID: number; /** ReqNodeReportValList ReportID. */ public ReportID: number; /** ReqNodeReportValList Page. */ public Page?: (protoManage.IPage|null); /** * Creates a new ReqNodeReportValList instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeReportValList instance */ public static create(properties?: protoManage.IReqNodeReportValList): protoManage.ReqNodeReportValList; /** * Encodes the specified ReqNodeReportValList message. Does not implicitly {@link protoManage.ReqNodeReportValList.verify|verify} messages. * @param message ReqNodeReportValList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeReportValList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeReportValList message, length delimited. Does not implicitly {@link protoManage.ReqNodeReportValList.verify|verify} messages. * @param message ReqNodeReportValList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeReportValList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeReportValList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeReportValList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeReportValList; /** * Decodes a ReqNodeReportValList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeReportValList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeReportValList; /** * Verifies a ReqNodeReportValList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeReportValList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeReportValList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeReportValList; /** * Creates a plain object from a ReqNodeReportValList message. Also converts values to other types if specified. * @param message ReqNodeReportValList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeReportValList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeReportValList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeReportValList. */ interface IAnsNodeReportValList { /** AnsNodeReportValList NodeReportValList */ NodeReportValList?: (protoManage.INodeReportVal[]|null); } /** Represents an AnsNodeReportValList. */ class AnsNodeReportValList implements IAnsNodeReportValList { /** * Constructs a new AnsNodeReportValList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeReportValList); /** AnsNodeReportValList NodeReportValList. */ public NodeReportValList: protoManage.INodeReportVal[]; /** * Creates a new AnsNodeReportValList instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeReportValList instance */ public static create(properties?: protoManage.IAnsNodeReportValList): protoManage.AnsNodeReportValList; /** * Encodes the specified AnsNodeReportValList message. Does not implicitly {@link protoManage.AnsNodeReportValList.verify|verify} messages. * @param message AnsNodeReportValList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeReportValList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeReportValList message, length delimited. Does not implicitly {@link protoManage.AnsNodeReportValList.verify|verify} messages. * @param message AnsNodeReportValList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeReportValList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeReportValList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeReportValList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeReportValList; /** * Decodes an AnsNodeReportValList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeReportValList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeReportValList; /** * Verifies an AnsNodeReportValList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeReportValList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeReportValList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeReportValList; /** * Creates a plain object from an AnsNodeReportValList message. Also converts values to other types if specified. * @param message AnsNodeReportValList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeReportValList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeReportValList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeNotifyList. */ interface IReqNodeNotifyList { /** ReqNodeNotifyList Message */ Message?: (string[]|null); /** ReqNodeNotifyList State */ State?: (protoManage.State[]|null); /** ReqNodeNotifyList SenderName */ SenderName?: (string[]|null); /** ReqNodeNotifyList SenderType */ SenderType?: (protoManage.NotifySenderType[]|null); /** ReqNodeNotifyList SenderTime */ SenderTime?: (protoManage.ITime[]|null); /** ReqNodeNotifyList Page */ Page?: (protoManage.IPage|null); } /** Represents a ReqNodeNotifyList. */ class ReqNodeNotifyList implements IReqNodeNotifyList { /** * Constructs a new ReqNodeNotifyList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeNotifyList); /** ReqNodeNotifyList Message. */ public Message: string[]; /** ReqNodeNotifyList State. */ public State: protoManage.State[]; /** ReqNodeNotifyList SenderName. */ public SenderName: string[]; /** ReqNodeNotifyList SenderType. */ public SenderType: protoManage.NotifySenderType[]; /** ReqNodeNotifyList SenderTime. */ public SenderTime: protoManage.ITime[]; /** ReqNodeNotifyList Page. */ public Page?: (protoManage.IPage|null); /** * Creates a new ReqNodeNotifyList instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeNotifyList instance */ public static create(properties?: protoManage.IReqNodeNotifyList): protoManage.ReqNodeNotifyList; /** * Encodes the specified ReqNodeNotifyList message. Does not implicitly {@link protoManage.ReqNodeNotifyList.verify|verify} messages. * @param message ReqNodeNotifyList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeNotifyList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeNotifyList message, length delimited. Does not implicitly {@link protoManage.ReqNodeNotifyList.verify|verify} messages. * @param message ReqNodeNotifyList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeNotifyList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeNotifyList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeNotifyList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeNotifyList; /** * Decodes a ReqNodeNotifyList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeNotifyList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeNotifyList; /** * Verifies a ReqNodeNotifyList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeNotifyList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeNotifyList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeNotifyList; /** * Creates a plain object from a ReqNodeNotifyList message. Also converts values to other types if specified. * @param message ReqNodeNotifyList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeNotifyList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeNotifyList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeNotifyList. */ interface IAnsNodeNotifyList { /** AnsNodeNotifyList Length */ Length?: (number|null); /** AnsNodeNotifyList NodeNotifyList */ NodeNotifyList?: (protoManage.INodeNotify[]|null); } /** Represents an AnsNodeNotifyList. */ class AnsNodeNotifyList implements IAnsNodeNotifyList { /** * Constructs a new AnsNodeNotifyList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeNotifyList); /** AnsNodeNotifyList Length. */ public Length: number; /** AnsNodeNotifyList NodeNotifyList. */ public NodeNotifyList: protoManage.INodeNotify[]; /** * Creates a new AnsNodeNotifyList instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeNotifyList instance */ public static create(properties?: protoManage.IAnsNodeNotifyList): protoManage.AnsNodeNotifyList; /** * Encodes the specified AnsNodeNotifyList message. Does not implicitly {@link protoManage.AnsNodeNotifyList.verify|verify} messages. * @param message AnsNodeNotifyList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeNotifyList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeNotifyList message, length delimited. Does not implicitly {@link protoManage.AnsNodeNotifyList.verify|verify} messages. * @param message AnsNodeNotifyList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeNotifyList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeNotifyList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeNotifyList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeNotifyList; /** * Decodes an AnsNodeNotifyList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeNotifyList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeNotifyList; /** * Verifies an AnsNodeNotifyList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeNotifyList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeNotifyList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeNotifyList; /** * Creates a plain object from an AnsNodeNotifyList message. Also converts values to other types if specified. * @param message AnsNodeNotifyList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeNotifyList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeNotifyList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeResourceList. */ interface IReqNodeResourceList { /** ReqNodeResourceList Name */ Name?: (string[]|null); /** ReqNodeResourceList State */ State?: (protoManage.State[]|null); /** ReqNodeResourceList UploaderName */ UploaderName?: (string[]|null); /** ReqNodeResourceList UploaderType */ UploaderType?: (protoManage.NotifySenderType[]|null); /** ReqNodeResourceList UploadTime */ UploadTime?: (protoManage.ITime[]|null); /** ReqNodeResourceList Page */ Page?: (protoManage.IPage|null); } /** Represents a ReqNodeResourceList. */ class ReqNodeResourceList implements IReqNodeResourceList { /** * Constructs a new ReqNodeResourceList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeResourceList); /** ReqNodeResourceList Name. */ public Name: string[]; /** ReqNodeResourceList State. */ public State: protoManage.State[]; /** ReqNodeResourceList UploaderName. */ public UploaderName: string[]; /** ReqNodeResourceList UploaderType. */ public UploaderType: protoManage.NotifySenderType[]; /** ReqNodeResourceList UploadTime. */ public UploadTime: protoManage.ITime[]; /** ReqNodeResourceList Page. */ public Page?: (protoManage.IPage|null); /** * Creates a new ReqNodeResourceList instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeResourceList instance */ public static create(properties?: protoManage.IReqNodeResourceList): protoManage.ReqNodeResourceList; /** * Encodes the specified ReqNodeResourceList message. Does not implicitly {@link protoManage.ReqNodeResourceList.verify|verify} messages. * @param message ReqNodeResourceList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeResourceList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeResourceList message, length delimited. Does not implicitly {@link protoManage.ReqNodeResourceList.verify|verify} messages. * @param message ReqNodeResourceList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeResourceList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeResourceList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeResourceList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeResourceList; /** * Decodes a ReqNodeResourceList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeResourceList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeResourceList; /** * Verifies a ReqNodeResourceList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeResourceList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeResourceList */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeResourceList; /** * Creates a plain object from a ReqNodeResourceList message. Also converts values to other types if specified. * @param message ReqNodeResourceList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeResourceList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeResourceList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeResourceList. */ interface IAnsNodeResourceList { /** AnsNodeResourceList Length */ Length?: (number|null); /** AnsNodeResourceList NodeResourceList */ NodeResourceList?: (protoManage.INodeResource[]|null); } /** Represents an AnsNodeResourceList. */ class AnsNodeResourceList implements IAnsNodeResourceList { /** * Constructs a new AnsNodeResourceList. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeResourceList); /** AnsNodeResourceList Length. */ public Length: number; /** AnsNodeResourceList NodeResourceList. */ public NodeResourceList: protoManage.INodeResource[]; /** * Creates a new AnsNodeResourceList instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeResourceList instance */ public static create(properties?: protoManage.IAnsNodeResourceList): protoManage.AnsNodeResourceList; /** * Encodes the specified AnsNodeResourceList message. Does not implicitly {@link protoManage.AnsNodeResourceList.verify|verify} messages. * @param message AnsNodeResourceList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeResourceList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeResourceList message, length delimited. Does not implicitly {@link protoManage.AnsNodeResourceList.verify|verify} messages. * @param message AnsNodeResourceList message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeResourceList, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeResourceList message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeResourceList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeResourceList; /** * Decodes an AnsNodeResourceList message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeResourceList * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeResourceList; /** * Verifies an AnsNodeResourceList message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeResourceList message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeResourceList */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeResourceList; /** * Creates a plain object from an AnsNodeResourceList message. Also converts values to other types if specified. * @param message AnsNodeResourceList * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeResourceList, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeResourceList to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeResourceUpload. */ interface IReqNodeResourceUpload { /** ReqNodeResourceUpload Data */ Data?: (Uint8Array|null); } /** Represents a ReqNodeResourceUpload. */ class ReqNodeResourceUpload implements IReqNodeResourceUpload { /** * Constructs a new ReqNodeResourceUpload. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeResourceUpload); /** ReqNodeResourceUpload Data. */ public Data: Uint8Array; /** * Creates a new ReqNodeResourceUpload instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeResourceUpload instance */ public static create(properties?: protoManage.IReqNodeResourceUpload): protoManage.ReqNodeResourceUpload; /** * Encodes the specified ReqNodeResourceUpload message. Does not implicitly {@link protoManage.ReqNodeResourceUpload.verify|verify} messages. * @param message ReqNodeResourceUpload message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeResourceUpload, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeResourceUpload message, length delimited. Does not implicitly {@link protoManage.ReqNodeResourceUpload.verify|verify} messages. * @param message ReqNodeResourceUpload message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeResourceUpload, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeResourceUpload message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeResourceUpload * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeResourceUpload; /** * Decodes a ReqNodeResourceUpload message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeResourceUpload * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeResourceUpload; /** * Verifies a ReqNodeResourceUpload message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeResourceUpload message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeResourceUpload */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeResourceUpload; /** * Creates a plain object from a ReqNodeResourceUpload message. Also converts values to other types if specified. * @param message ReqNodeResourceUpload * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeResourceUpload, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeResourceUpload to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeResourceUpload. */ interface IAnsNodeResourceUpload { /** AnsNodeResourceUpload NodeResource */ NodeResource?: (protoManage.INodeResource|null); } /** Represents an AnsNodeResourceUpload. */ class AnsNodeResourceUpload implements IAnsNodeResourceUpload { /** * Constructs a new AnsNodeResourceUpload. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeResourceUpload); /** AnsNodeResourceUpload NodeResource. */ public NodeResource?: (protoManage.INodeResource|null); /** * Creates a new AnsNodeResourceUpload instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeResourceUpload instance */ public static create(properties?: protoManage.IAnsNodeResourceUpload): protoManage.AnsNodeResourceUpload; /** * Encodes the specified AnsNodeResourceUpload message. Does not implicitly {@link protoManage.AnsNodeResourceUpload.verify|verify} messages. * @param message AnsNodeResourceUpload message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeResourceUpload, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeResourceUpload message, length delimited. Does not implicitly {@link protoManage.AnsNodeResourceUpload.verify|verify} messages. * @param message AnsNodeResourceUpload message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeResourceUpload, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeResourceUpload message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeResourceUpload * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeResourceUpload; /** * Decodes an AnsNodeResourceUpload message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeResourceUpload * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeResourceUpload; /** * Verifies an AnsNodeResourceUpload message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeResourceUpload message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeResourceUpload */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeResourceUpload; /** * Creates a plain object from an AnsNodeResourceUpload message. Also converts values to other types if specified. * @param message AnsNodeResourceUpload * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeResourceUpload, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeResourceUpload to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeResourceDownload. */ interface IReqNodeResourceDownload { /** ReqNodeResourceDownload NodeResource */ NodeResource?: (protoManage.INodeResource|null); } /** Represents a ReqNodeResourceDownload. */ class ReqNodeResourceDownload implements IReqNodeResourceDownload { /** * Constructs a new ReqNodeResourceDownload. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeResourceDownload); /** ReqNodeResourceDownload NodeResource. */ public NodeResource?: (protoManage.INodeResource|null); /** * Creates a new ReqNodeResourceDownload instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeResourceDownload instance */ public static create(properties?: protoManage.IReqNodeResourceDownload): protoManage.ReqNodeResourceDownload; /** * Encodes the specified ReqNodeResourceDownload message. Does not implicitly {@link protoManage.ReqNodeResourceDownload.verify|verify} messages. * @param message ReqNodeResourceDownload message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeResourceDownload, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeResourceDownload message, length delimited. Does not implicitly {@link protoManage.ReqNodeResourceDownload.verify|verify} messages. * @param message ReqNodeResourceDownload message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeResourceDownload, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeResourceDownload message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeResourceDownload * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeResourceDownload; /** * Decodes a ReqNodeResourceDownload message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeResourceDownload * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeResourceDownload; /** * Verifies a ReqNodeResourceDownload message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeResourceDownload message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeResourceDownload */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeResourceDownload; /** * Creates a plain object from a ReqNodeResourceDownload message. Also converts values to other types if specified. * @param message ReqNodeResourceDownload * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeResourceDownload, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeResourceDownload to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeResourceDownload. */ interface IAnsNodeResourceDownload { /** AnsNodeResourceDownload Data */ Data?: (Uint8Array|null); } /** Represents an AnsNodeResourceDownload. */ class AnsNodeResourceDownload implements IAnsNodeResourceDownload { /** * Constructs a new AnsNodeResourceDownload. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeResourceDownload); /** AnsNodeResourceDownload Data. */ public Data: Uint8Array; /** * Creates a new AnsNodeResourceDownload instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeResourceDownload instance */ public static create(properties?: protoManage.IAnsNodeResourceDownload): protoManage.AnsNodeResourceDownload; /** * Encodes the specified AnsNodeResourceDownload message. Does not implicitly {@link protoManage.AnsNodeResourceDownload.verify|verify} messages. * @param message AnsNodeResourceDownload message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeResourceDownload, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeResourceDownload message, length delimited. Does not implicitly {@link protoManage.AnsNodeResourceDownload.verify|verify} messages. * @param message AnsNodeResourceDownload message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeResourceDownload, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeResourceDownload message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeResourceDownload * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeResourceDownload; /** * Decodes an AnsNodeResourceDownload message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeResourceDownload * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeResourceDownload; /** * Verifies an AnsNodeResourceDownload message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeResourceDownload message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeResourceDownload */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeResourceDownload; /** * Creates a plain object from an AnsNodeResourceDownload message. Also converts values to other types if specified. * @param message AnsNodeResourceDownload * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeResourceDownload, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeResourceDownload to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of a ReqNodeTest. */ interface IReqNodeTest { /** ReqNodeTest ID */ ID?: (number|null); /** ReqNodeTest Type */ Type?: (number|null); /** ReqNodeTest Message */ Message?: (string|null); /** ReqNodeTest State */ State?: (protoManage.State|null); } /** Represents a ReqNodeTest. */ class ReqNodeTest implements IReqNodeTest { /** * Constructs a new ReqNodeTest. * @param [properties] Properties to set */ constructor(properties?: protoManage.IReqNodeTest); /** ReqNodeTest ID. */ public ID: number; /** ReqNodeTest Type. */ public Type: number; /** ReqNodeTest Message. */ public Message: string; /** ReqNodeTest State. */ public State: protoManage.State; /** * Creates a new ReqNodeTest instance using the specified properties. * @param [properties] Properties to set * @returns ReqNodeTest instance */ public static create(properties?: protoManage.IReqNodeTest): protoManage.ReqNodeTest; /** * Encodes the specified ReqNodeTest message. Does not implicitly {@link protoManage.ReqNodeTest.verify|verify} messages. * @param message ReqNodeTest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IReqNodeTest, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified ReqNodeTest message, length delimited. Does not implicitly {@link protoManage.ReqNodeTest.verify|verify} messages. * @param message ReqNodeTest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IReqNodeTest, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReqNodeTest message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns ReqNodeTest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.ReqNodeTest; /** * Decodes a ReqNodeTest message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns ReqNodeTest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.ReqNodeTest; /** * Verifies a ReqNodeTest message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates a ReqNodeTest message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns ReqNodeTest */ public static fromObject(object: { [k: string]: any }): protoManage.ReqNodeTest; /** * Creates a plain object from a ReqNodeTest message. Also converts values to other types if specified. * @param message ReqNodeTest * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.ReqNodeTest, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReqNodeTest to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } /** Properties of an AnsNodeTest. */ interface IAnsNodeTest { } /** Represents an AnsNodeTest. */ class AnsNodeTest implements IAnsNodeTest { /** * Constructs a new AnsNodeTest. * @param [properties] Properties to set */ constructor(properties?: protoManage.IAnsNodeTest); /** * Creates a new AnsNodeTest instance using the specified properties. * @param [properties] Properties to set * @returns AnsNodeTest instance */ public static create(properties?: protoManage.IAnsNodeTest): protoManage.AnsNodeTest; /** * Encodes the specified AnsNodeTest message. Does not implicitly {@link protoManage.AnsNodeTest.verify|verify} messages. * @param message AnsNodeTest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encode(message: protoManage.IAnsNodeTest, writer?: $protobuf.Writer): $protobuf.Writer; /** * Encodes the specified AnsNodeTest message, length delimited. Does not implicitly {@link protoManage.AnsNodeTest.verify|verify} messages. * @param message AnsNodeTest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ public static encodeDelimited(message: protoManage.IAnsNodeTest, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes an AnsNodeTest message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand * @returns AnsNodeTest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): protoManage.AnsNodeTest; /** * Decodes an AnsNodeTest message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from * @returns AnsNodeTest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): protoManage.AnsNodeTest; /** * Verifies an AnsNodeTest message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** * Creates an AnsNodeTest message from a plain object. Also converts values to their respective internal types. * @param object Plain object * @returns AnsNodeTest */ public static fromObject(object: { [k: string]: any }): protoManage.AnsNodeTest; /** * Creates a plain object from an AnsNodeTest message. Also converts values to other types if specified. * @param message AnsNodeTest * @param [options] Conversion options * @returns Plain object */ public static toObject(message: protoManage.AnsNodeTest, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this AnsNodeTest to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } }