repo_id
stringclasses
279 values
file_path
stringlengths
43
179
content
stringlengths
1
4.18M
__index_level_0__
int64
0
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/constants.ts
import { BN } from '@coral-xyz/anchor'; import { Buffer } from 'buffer'; import { ConfirmOptions, PublicKey } from '@solana/web3.js'; export const FIELD_SIZE = new BN( '21888242871839275222246405745257275088548364400416034343698204186575808495617', ); export const HIGHEST_ADDRESS_PLUS_ONE = new BN( '452312848583266388373324160190187140051835877600158453279131187530910662655', ); // TODO: implement properly export const noopProgram = 'noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV'; export const lightProgram = 'SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7'; export const accountCompressionProgram = // also: merkletree program 'compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq'; export const getRegisteredProgramPda = () => new PublicKey('35hkDgaAKwMCaxRz2ocSZ6NaUrtKkyNqU6c4RV3tYJRh'); // TODO: better labelling. gov authority pda export const getAccountCompressionAuthority = () => PublicKey.findProgramAddressSync( [Buffer.from('cpi_authority')], new PublicKey( // TODO: can add check to ensure its consistent with the idl lightProgram, ), )[0]; export const defaultStaticAccounts = () => [ new PublicKey(getRegisteredProgramPda()), new PublicKey(noopProgram), new PublicKey(accountCompressionProgram), new PublicKey(getAccountCompressionAuthority()), ]; export const defaultStaticAccountsStruct = () => { return { registeredProgramPda: new PublicKey(getRegisteredProgramPda()), noopProgram: new PublicKey(noopProgram), accountCompressionProgram: new PublicKey(accountCompressionProgram), accountCompressionAuthority: new PublicKey( getAccountCompressionAuthority(), ), cpiSignatureAccount: null, }; }; export const defaultTestStateTreeAccounts = () => { return { nullifierQueue: new PublicKey(nullifierQueuePubkey), merkleTree: new PublicKey(merkletreePubkey), merkleTreeHeight: DEFAULT_MERKLE_TREE_HEIGHT, addressTree: new PublicKey(addressTree), addressQueue: new PublicKey(addressQueue), }; }; export const nullifierQueuePubkey = 'nfq1NvQDJ2GEgnS8zt9prAe8rjjpAW1zFkrvZoBR148'; export const merkletreePubkey = 'smt1NamzXdq4AMqS2fS2F1i5KTYPZRhoHgWx38d8WsT'; export const addressTree = 'amt1Ayt45jfbdw5YSo7iz6WZxUmnZsQTYXy82hVwyC2'; export const addressQueue = 'aq1S9z4reTSQAdgWHGD2zDaS39sjGrAxbR31vxJ2F4F'; export const confirmConfig: ConfirmOptions = { commitment: 'confirmed', preflightCommitment: 'confirmed', }; export const DEFAULT_MERKLE_TREE_HEIGHT = 26; export const DEFAULT_MERKLE_TREE_ROOTS = 2800; /** Threshold (per asset) at which new in-UTXOs get merged, in order to reduce UTXO pool size */ export const UTXO_MERGE_THRESHOLD = 20; export const UTXO_MERGE_MAXIMUM = 10; /** * Treshold after which the currently used transaction Merkle tree is switched * to the next one */ export const TRANSACTION_MERKLE_TREE_ROLLOVER_THRESHOLD = new BN( Math.floor(2 ** DEFAULT_MERKLE_TREE_HEIGHT * 0.95), ); /** * Fee to provide continous funding for the state Merkle tree. * Once the state Merkle tree is at 95% capacity the accumulated fees * will be used to fund the next state Merkle tree with the same parameters. * * Is charged per output compressed account. */ export const STATE_MERKLE_TREE_ROLLOVER_FEE = new BN(300); /** * Fee to provide continous funding for the address queue and address Merkle tree. * Once the address Merkle tree is at 95% capacity the accumulated fees * will be used to fund the next address queue and address tree with the same parameters. * * Is charged per newly created address. */ export const ADDRESS_QUEUE_ROLLOVER_FEE = new BN(392); /** * Is charged if the transaction nullifies at least one compressed account. */ export const STATE_MERKLE_TREE_NETWORK_FEE = new BN(5000); /** * Is charged if the transaction creates at least one address. */ export const ADDRESS_TREE_NETWORK_FEE = new BN(5000);
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/index.ts
export * from './actions'; export * from './idls'; export * from './instruction'; export * from './programs'; export * from './state'; export * from './utils'; export * from './wallet'; export * from './constants'; export * from './errors'; export * from './rpc-interface'; export * from './rpc'; export * from './test-helpers';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/rpc-interface.ts
import { PublicKey, MemcmpFilter, DataSlice } from '@solana/web3.js'; import { type as pick, number, string, array, literal, union, coerce, instance, create, unknown, any, nullable, Struct, } from 'superstruct'; import { BN254, createBN254, CompressedProof, CompressedAccountWithMerkleContext, MerkleContextWithMerkleProof, bn, TokenData, } from './state'; import { BN } from '@coral-xyz/anchor'; export interface LatestNonVotingSignatures { context: { slot: number }; value: { items: { signature: string; slot: number; blockTime: number; error: string | null; }[]; }; } export interface GetCompressedAccountsByOwnerConfig { filters?: GetCompressedAccountsFilter[]; dataSlice?: DataSlice; cursor?: string; limit?: BN; } export interface CompressedMintTokenHolders { balance: BN; owner: PublicKey; } export interface LatestNonVotingSignaturesPaginated { context: { slot: number }; value: { items: { signature: string; slot: number; blockTime: number; }[]; cursor: string | null; }; } export interface SignatureWithMetadata { blockTime: number; signature: string; slot: number; } export interface HashWithTree { hash: BN254; tree: PublicKey; queue: PublicKey; } export interface AddressWithTree { address: BN254; tree: PublicKey; queue: PublicKey; } export interface CompressedTransaction { compressionInfo: { closedAccounts: { account: CompressedAccountWithMerkleContext; maybeTokenData: TokenData | null; }[]; openedAccounts: { account: CompressedAccountWithMerkleContext; maybeTokenData: TokenData | null; }[]; preTokenBalances?: { owner: PublicKey; mint: PublicKey; amount: BN; }[]; postTokenBalances?: { owner: PublicKey; mint: PublicKey; amount: BN; }[]; }; transaction: any; } export interface HexBatchInputsForProver { 'input-compressed-accounts': HexInputsForProver[]; } export interface HexInputsForProver { root: string; pathIndex: number; pathElements: string[]; leaf: string; } // TODO: Rename Compressed -> ValidityProof export type CompressedProofWithContext = { compressedProof: CompressedProof; roots: BN[]; rootIndices: number[]; leafIndices: number[]; leaves: BN[]; merkleTrees: PublicKey[]; nullifierQueues: PublicKey[]; }; export interface GetCompressedTokenAccountsByOwnerOrDelegateOptions { mint?: PublicKey; cursor?: string; limit?: BN; } export type TokenBalance = { balance: BN; mint: PublicKey }; /** * **Cursor** is a unique identifier for a page of results by which the next page can be fetched. * * **Limit** is the maximum number of results to return per page. */ export interface PaginatedOptions { cursor?: string; limit?: BN; } /** * Note, DataSizeFilter is currently not available. */ export type GetCompressedAccountsFilter = MemcmpFilter; // | DataSizeFilter; export type GetCompressedAccountConfig = { encoding?: string; }; export type GetCompressedAccountsConfig = { dataSlice: DataSlice; filters?: GetCompressedAccountsFilter[]; }; export interface ParsedTokenAccount { compressedAccount: CompressedAccountWithMerkleContext; parsed: TokenData; } export type WithContext<T> = { /** context */ context: { slot: number; }; /** response value */ value: T; }; export type WithCursor<T> = { /** context */ cursor: string | null; /** response value */ items: T; }; /** * @internal */ const PublicKeyFromString = coerce( instance(PublicKey), string(), value => new PublicKey(value), ); /** * @internal */ const ArrayFromString = coerce(instance(Array<number>), string(), value => Array.from(new PublicKey(value).toBytes()), ); /** * @internal */ const BN254FromString = coerce(instance(BN), string(), value => { return createBN254(value, 'base58'); }); const BNFromInt = coerce(instance(BN), number(), value => { // Check if the number is safe if (Number.isSafeInteger(value)) { return bn(value); } else { // Convert to string if the number is unsafe return bn(value.toString(), 10); } }); /** * @internal */ const Base64EncodedCompressedAccountDataResult = coerce( string(), string(), value => (value === '' ? null : value), ); /** * @internal */ export function createRpcResult<T, U>(result: Struct<T, U>) { return union([ pick({ jsonrpc: literal('2.0'), id: string(), result, }), pick({ jsonrpc: literal('2.0'), id: string(), error: pick({ code: unknown(), message: string(), data: nullable(any()), }), }), ]) as Struct<RpcResult<T>, null>; } /** * @internal */ const UnknownRpcResult = createRpcResult(unknown()); /** * @internal */ export function jsonRpcResult<T, U>(schema: Struct<T, U>) { return coerce(createRpcResult(schema), UnknownRpcResult, value => { if ('error' in value) { return value as RpcResultError; } else { return { ...value, result: create(value.result, schema), } as RpcResultSuccess<T>; } }) as Struct<RpcResult<T>, null>; } // Add this type for the context wrapper export type WithRpcContext<T> = { context: { slot: number; }; value: T; }; /** * @internal */ export function jsonRpcResultAndContext<T, U>(value: Struct<T, U>) { return jsonRpcResult( pick({ context: pick({ slot: number(), }), value, }), ) as Struct<RpcResult<WithRpcContext<T>>, null>; } /** * @internal */ export const CompressedAccountResult = pick({ address: nullable(ArrayFromString), hash: BN254FromString, data: nullable( pick({ data: Base64EncodedCompressedAccountDataResult, dataHash: BN254FromString, discriminator: BNFromInt, }), ), lamports: BNFromInt, owner: PublicKeyFromString, leafIndex: number(), tree: PublicKeyFromString, seq: nullable(BNFromInt), slotCreated: BNFromInt, }); export const TokenDataResult = pick({ mint: PublicKeyFromString, owner: PublicKeyFromString, amount: BNFromInt, delegate: nullable(PublicKeyFromString), state: string(), }); /** * @internal */ export const CompressedTokenAccountResult = pick({ tokenData: TokenDataResult, account: CompressedAccountResult, }); /** * @internal */ export const MultipleCompressedAccountsResult = pick({ items: array(CompressedAccountResult), }); /** * @internal */ export const CompressedAccountsByOwnerResult = pick({ items: array(CompressedAccountResult), cursor: nullable(string()), }); /** * @internal */ export const CompressedTokenAccountsByOwnerOrDelegateResult = pick({ items: array(CompressedTokenAccountResult), cursor: nullable(string()), }); /** * @internal */ export const SlotResult = number(); /** * @internal */ export const HealthResult = string(); /** * @internal */ export const LatestNonVotingSignaturesResult = pick({ items: array( pick({ signature: string(), slot: number(), blockTime: number(), error: nullable(string()), }), ), }); /** * @internal */ export const LatestNonVotingSignaturesResultPaginated = pick({ items: array( pick({ signature: string(), slot: number(), blockTime: number(), }), ), cursor: nullable(string()), }); /** * @internal */ export const MerkeProofResult = pick({ hash: BN254FromString, leafIndex: number(), merkleTree: PublicKeyFromString, proof: array(BN254FromString), rootSeq: number(), root: BN254FromString, }); /** * @internal */ export const NewAddressProofResult = pick({ address: BN254FromString, nextIndex: number(), merkleTree: PublicKeyFromString, proof: array(BN254FromString), // this is: merkleProofHashedIndexedElementLeaf rootSeq: number(), root: BN254FromString, lowerRangeAddress: BN254FromString, // this is: leafLowerRangeValue. higherRangeAddress: BN254FromString, // this is: leafHigherRangeValue lowElementLeafIndex: number(), // this is: indexHashedIndexedElementLeaf }); /** * @internal */ const CompressedProofResult = pick({ a: array(number()), b: array(number()), c: array(number()), }); /** * @internal */ export const ValidityProofResult = pick({ compressedProof: CompressedProofResult, leafIndices: array(number()), leaves: array(BN254FromString), rootIndices: array(number()), roots: array(BN254FromString), merkleTrees: array(PublicKeyFromString), // TODO: enable nullifierQueues // nullifierQueues: array(PublicKeyFromString), }); /** * @internal */ export const MultipleMerkleProofsResult = array(MerkeProofResult); /** * @internal */ export const BalanceResult = pick({ amount: BNFromInt, }); export const NativeBalanceResult = BNFromInt; export const TokenBalanceResult = pick({ balance: BNFromInt, mint: PublicKeyFromString, }); export const TokenBalanceListResult = pick({ tokenBalances: array(TokenBalanceResult), cursor: nullable(string()), }); export const TokenBalanceListResultV2 = pick({ items: array(TokenBalanceResult), cursor: nullable(string()), }); export const CompressedMintTokenHoldersResult = pick({ cursor: nullable(string()), items: array( pick({ balance: BNFromInt, owner: PublicKeyFromString, }), ), }); export const AccountProofResult = pick({ hash: array(number()), root: array(number()), proof: array(array(number())), }); export const toUnixTimestamp = (blockTime: string): number => { return new Date(blockTime).getTime(); }; export const SignatureListResult = pick({ items: array( pick({ blockTime: number(), signature: string(), slot: number(), }), ), }); export const SignatureListWithCursorResult = pick({ items: array( pick({ blockTime: number(), signature: string(), slot: number(), }), ), cursor: nullable(string()), }); export const CompressedTransactionResult = pick({ compressionInfo: pick({ closedAccounts: array( pick({ account: CompressedAccountResult, optionalTokenData: nullable(TokenDataResult), }), ), openedAccounts: array( pick({ account: CompressedAccountResult, optionalTokenData: nullable(TokenDataResult), }), ), }), /// TODO: add transaction struct /// https://github.com/solana-labs/solana/blob/27eff8408b7223bb3c4ab70523f8a8dca3ca6645/transaction-status/src/lib.rs#L1061 transaction: any(), }); export interface CompressionApiInterface { getCompressedAccount( address?: BN254, hash?: BN254, ): Promise<CompressedAccountWithMerkleContext | null>; getCompressedBalance(address?: BN254, hash?: BN254): Promise<BN | null>; getCompressedBalanceByOwner(owner: PublicKey): Promise<BN>; getCompressedAccountProof( hash: BN254, ): Promise<MerkleContextWithMerkleProof>; getMultipleCompressedAccounts( hashes: BN254[], ): Promise<CompressedAccountWithMerkleContext[]>; getMultipleCompressedAccountProofs( hashes: BN254[], ): Promise<MerkleContextWithMerkleProof[]>; getValidityProof( hashes: BN254[], newAddresses: BN254[], ): Promise<CompressedProofWithContext>; getValidityProofV0( hashes: HashWithTree[], newAddresses: AddressWithTree[], ): Promise<CompressedProofWithContext>; getValidityProofAndRpcContext( hashes: HashWithTree[], newAddresses: AddressWithTree[], ): Promise<WithContext<CompressedProofWithContext>>; getCompressedAccountsByOwner( owner: PublicKey, config?: GetCompressedAccountsByOwnerConfig, ): Promise<WithCursor<CompressedAccountWithMerkleContext[]>>; getCompressedMintTokenHolders( mint: PublicKey, options?: PaginatedOptions, ): Promise<WithContext<WithCursor<CompressedMintTokenHolders[]>>>; getCompressedTokenAccountsByOwner( publicKey: PublicKey, options: GetCompressedTokenAccountsByOwnerOrDelegateOptions, ): Promise<WithCursor<ParsedTokenAccount[]>>; getCompressedTokenAccountsByDelegate( delegate: PublicKey, options: GetCompressedTokenAccountsByOwnerOrDelegateOptions, ): Promise<WithCursor<ParsedTokenAccount[]>>; getCompressedTokenAccountBalance(hash: BN254): Promise<{ amount: BN }>; getCompressedTokenBalancesByOwner( publicKey: PublicKey, options: GetCompressedTokenAccountsByOwnerOrDelegateOptions, ): Promise<WithCursor<TokenBalance[]>>; getCompressedTokenBalancesByOwnerV2( publicKey: PublicKey, options: GetCompressedTokenAccountsByOwnerOrDelegateOptions, ): Promise<WithContext<WithCursor<TokenBalance[]>>>; getTransactionWithCompressionInfo( signature: string, ): Promise<CompressedTransaction | null>; getCompressionSignaturesForAccount( hash: BN254, ): Promise<SignatureWithMetadata[]>; getCompressionSignaturesForAddress( address: PublicKey, options?: PaginatedOptions, ): Promise<WithCursor<SignatureWithMetadata[]>>; getCompressionSignaturesForOwner( owner: PublicKey, options?: PaginatedOptions, ): Promise<WithCursor<SignatureWithMetadata[]>>; getCompressionSignaturesForTokenOwner( owner: PublicKey, options?: PaginatedOptions, ): Promise<WithCursor<SignatureWithMetadata[]>>; getLatestNonVotingSignatures( limit?: number, cursor?: string, ): Promise<LatestNonVotingSignatures>; getLatestCompressionSignatures( cursor?: string, limit?: number, ): Promise<LatestNonVotingSignaturesPaginated>; getIndexerHealth(): Promise<string>; getIndexerSlot(): Promise<number>; } // Public types for consumers export type RpcResultSuccess<T> = { jsonrpc: '2.0'; id: string; result: T; }; export type RpcResultError = { jsonrpc: '2.0'; id: string; error: { code: unknown; message: string; data?: any; }; }; export type RpcResult<T> = RpcResultSuccess<T> | RpcResultError;
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/validation.ts
import { BN } from '@coral-xyz/anchor'; import { CompressedAccount, CompressedAccountWithMerkleContext, bn, } from '../state'; export const validateSufficientBalance = (balance: BN) => { if (balance.lt(bn(0))) { throw new Error('Not enough balance for transfer'); } }; export const validateSameOwner = ( compressedAccounts: | CompressedAccount[] | CompressedAccountWithMerkleContext[], ) => { if (compressedAccounts.length === 0) { throw new Error('No accounts provided for validation'); } const zerothOwner = compressedAccounts[0].owner; if ( !compressedAccounts.every(account => account.owner.equals(zerothOwner)) ) { throw new Error('All input accounts must have the same owner'); } };
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/pipe.ts
/** pipe function */ export function pipe<T, R>( initialFunction: (arg: T) => R, ...functions: ((arg: R) => R)[] ): (initialValue: T) => R { return (initialValue: T): R => functions.reduce( (currentValue, currentFunction) => currentFunction(currentValue), initialFunction(initialValue), ); } //@ts-ignore if (import.meta.vitest) { //@ts-ignore const { it, expect, describe } = import.meta.vitest; describe('pipe', () => { it('should return the result of applying all fns to the initial value', () => { const addOne = (x: number) => x + 1; const multiplyByTwo = (x: number) => x * 2; const subtractThree = (x: number) => x - 3; const addOneMultiplyByTwoSubtractThree = pipe( addOne, multiplyByTwo, subtractThree, ); expect(addOneMultiplyByTwoSubtractThree(5)).toBe(9); }); }); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/airdrop.ts
import { Commitment, Connection, PublicKey, TransactionConfirmationStrategy, } from '@solana/web3.js'; export async function airdropSol({ connection, lamports, recipientPublicKey, }: { connection: Connection; lamports: number; recipientPublicKey: PublicKey; }) { const txHash = await connection.requestAirdrop( recipientPublicKey, lamports, ); await confirmTransaction(connection, txHash); return txHash; } export async function confirmTransaction( connection: Connection, signature: string, confirmation: Commitment = 'confirmed', ) { const latestBlockHash = await connection.getLatestBlockhash(confirmation); const strategy: TransactionConfirmationStrategy = { signature: signature.toString(), lastValidBlockHeight: latestBlockHash.lastValidBlockHeight, blockhash: latestBlockHash.blockhash, }; return await connection.confirmTransaction(strategy, confirmation); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/sleep.ts
// zzz export function sleep(ms: number): Promise<void> { return new Promise(resolve => setTimeout(resolve, ms)); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/calculate-compute-unit-price.ts
/** * @param targetLamports - Target priority fee in lamports * @param computeUnits - Expected compute units used by the transaction * @returns microLamports per compute unit (use in * `ComputeBudgetProgram.setComputeUnitPrice`) */ export function calculateComputeUnitPrice( targetLamports: number, computeUnits: number, ): number { return Math.ceil((targetLamports * 1_000_000) / computeUnits); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/parse-validity-proof.ts
import { BN } from '@coral-xyz/anchor'; import { FIELD_SIZE } from '../constants'; import { CompressedProof } from '../state'; interface GnarkProofJson { ar: string[]; bs: string[][]; krs: string[]; } type ProofABC = { a: Uint8Array; b: Uint8Array; c: Uint8Array; }; export const placeholderValidityProof = () => ({ a: Array.from({ length: 32 }, (_, i) => i + 1), b: Array.from({ length: 64 }, (_, i) => i + 1), c: Array.from({ length: 32 }, (_, i) => i + 1), }); export const checkValidityProofShape = (proof: CompressedProof) => { if ( proof.a.length !== 32 || proof.b.length !== 64 || proof.c.length !== 32 ) { throw new Error('ValidityProof has invalid shape'); } }; export function proofFromJsonStruct(json: GnarkProofJson): ProofABC { const proofAX = deserializeHexStringToBeBytes(json.ar[0]); const proofAY = deserializeHexStringToBeBytes(json.ar[1]); const proofA: Uint8Array = new Uint8Array([...proofAX, ...proofAY]); const proofBX0 = deserializeHexStringToBeBytes(json.bs[0][0]); const proofBX1 = deserializeHexStringToBeBytes(json.bs[0][1]); const proofBY0 = deserializeHexStringToBeBytes(json.bs[1][0]); const proofBY1 = deserializeHexStringToBeBytes(json.bs[1][1]); const proofB: Uint8Array = new Uint8Array([ ...proofBX0, ...proofBX1, ...proofBY0, ...proofBY1, ]); const proofCX = deserializeHexStringToBeBytes(json.krs[0]); const proofCY = deserializeHexStringToBeBytes(json.krs[1]); const proofC: Uint8Array = new Uint8Array([...proofCX, ...proofCY]); const proofABC: ProofABC = { a: proofA, b: proofB, c: proofC }; return proofABC; } // TODO: add unit test for negation // TODO: test if LE BE issue. unit test export function negateAndCompressProof(proof: ProofABC): CompressedProof { const proofA = proof.a; const proofB = proof.b; const proofC = proof.c; const aXElement = proofA.slice(0, 32); const aYElement = new BN(proofA.slice(32, 64), 32, 'be'); /// Negate const proofAIsPositive = yElementIsPositiveG1(aYElement) ? false : true; /// First byte of proofA is the bitmask aXElement[0] = addBitmaskToByte(aXElement[0], proofAIsPositive); const bXElement = proofB.slice(0, 64); const bYElement = proofB.slice(64, 128); const proofBIsPositive = yElementIsPositiveG2( new BN(bYElement.slice(0, 32), 32, 'be'), new BN(bYElement.slice(32, 64), 32, 'be'), ); bXElement[0] = addBitmaskToByte(bXElement[0], proofBIsPositive); const cXElement = proofC.slice(0, 32); const cYElement = proofC.slice(32, 64); const proofCIsPositive = yElementIsPositiveG1(new BN(cYElement, 32, 'be')); cXElement[0] = addBitmaskToByte(cXElement[0], proofCIsPositive); const compressedProof: CompressedProof = { a: Array.from(aXElement), b: Array.from(bXElement), c: Array.from(cXElement), }; return compressedProof; } function deserializeHexStringToBeBytes(hexStr: string): Uint8Array { // Using BN for simpler conversion from hex string to byte array const bn = new BN( hexStr.startsWith('0x') ? hexStr.substring(2) : hexStr, 'hex', ); return new Uint8Array(bn.toArray('be', 32)); } function yElementIsPositiveG1(yElement: BN): boolean { return yElement.lte(FIELD_SIZE.sub(yElement)); } function yElementIsPositiveG2(yElement1: BN, yElement2: BN): boolean { const fieldMidpoint = FIELD_SIZE.div(new BN(2)); // Compare the first component of the y coordinate if (yElement1.lt(fieldMidpoint)) { return true; } else if (yElement1.gt(fieldMidpoint)) { return false; } // If the first component is equal to the midpoint, compare the second component return yElement2.lt(fieldMidpoint); } // bitmask compatible with solana altbn128 compression syscall and arkworks' implementation // https://github.com/arkworks-rs/algebra/blob/master/ff/src/fields/models/fp/mod.rs#L580 // https://github.com/arkworks-rs/algebra/blob/master/serialize/src/flags.rs#L18 // fn u8_bitmask(value: u8, inf: bool, neg: bool) -> u8 { // let mut mask = 0; // match self { // inf => mask |= 1 << 6, // neg => mask |= 1 << 7, // _ => (), // } // mask // } function addBitmaskToByte(byte: number, yIsPositive: boolean): number { if (!yIsPositive) { return (byte |= 1 << 7); } else { return byte; } } //@ts-ignore if (import.meta.vitest) { //@ts-ignore const { it, expect, describe } = import.meta.vitest; // Unit test for addBitmaskToByte function describe('addBitmaskToByte', () => { it('should add a bitmask to the byte if yIsPositive is false', () => { const byte = 0b00000000; const yIsPositive = false; const result = addBitmaskToByte(byte, yIsPositive); expect(result).toBe(0b10000000); // 128 in binary, which is 1 << 7 }); it('should not modify the byte if yIsPositive is true', () => { const byte = 0b00000000; const yIsPositive = true; const result = addBitmaskToByte(byte, yIsPositive); expect(result).toBe(0b00000000); }); }); describe('test prover server', () => { const TEST_JSON = { ar: [ '0x22bdaa3187d8fe294925a66fa0165a11bc9e07678fa2fc72402ebfd33d521c69', '0x2d18ff780b69898b4cdd8d7b6ac72d077799399f0f45e52665426456f3903584', ], bs: [ [ '0x138cc0962e49f76a701d2871d2799892c9782940095eb0429e979f336d2e162d', '0x2fe1bfbb15cbfb83d7e00ace23e45f890604003783eaf34affa35e0d6f4822bc', ], [ '0x1a89264f82cc6e8ef1c696bea0b5803c28c0ba6ab61366bcb71e73a4135cae8d', '0xf778d857b3df01a4100265c9d014ce02d47425f0114685356165fa5ee3f3a26', ], ], krs: [ '0x176b6ae9001f66832951e2d43a98a972667447bb1781f534b70cb010270dcdd3', '0xb748d5fac1686db28d94c02250af7eb4f28dfdabc8983305c45bcbc6e163eeb', ], }; const COMPRESSED_PROOF_A = [ 34, 189, 170, 49, 135, 216, 254, 41, 73, 37, 166, 111, 160, 22, 90, 17, 188, 158, 7, 103, 143, 162, 252, 114, 64, 46, 191, 211, 61, 82, 28, 105, ]; const COMPRESSED_PROOF_B = [ 147, 140, 192, 150, 46, 73, 247, 106, 112, 29, 40, 113, 210, 121, 152, 146, 201, 120, 41, 64, 9, 94, 176, 66, 158, 151, 159, 51, 109, 46, 22, 45, 47, 225, 191, 187, 21, 203, 251, 131, 215, 224, 10, 206, 35, 228, 95, 137, 6, 4, 0, 55, 131, 234, 243, 74, 255, 163, 94, 13, 111, 72, 34, 188, ]; const COMPRESSED_PROOF_C = [ 23, 107, 106, 233, 0, 31, 102, 131, 41, 81, 226, 212, 58, 152, 169, 114, 102, 116, 71, 187, 23, 129, 245, 52, 183, 12, 176, 16, 39, 13, 205, 211, ]; it('should execute a compressed token mint', async () => { const proof = proofFromJsonStruct(TEST_JSON); const compressedProof = negateAndCompressProof(proof); expect(compressedProof.a).toEqual(COMPRESSED_PROOF_A); expect(compressedProof.b).toEqual(COMPRESSED_PROOF_B); expect(compressedProof.c).toEqual(COMPRESSED_PROOF_C); }); }); describe('Validity Proof Functions', () => { describe('placeholderValidityProof', () => { it('should create a validity proof with correct shape', () => { const validityProof = placeholderValidityProof(); expect(validityProof.a.length).toBe(32); expect(validityProof.b.length).toBe(64); expect(validityProof.c.length).toBe(32); }); }); describe('checkValidityProofShape', () => { it('should not throw an error for valid proof shape', () => { const validProof = { a: Array.from(new Uint8Array(32)), b: Array.from(new Uint8Array(64)), c: Array.from(new Uint8Array(32)), }; expect(() => checkValidityProofShape(validProof)).not.toThrow(); }); it('should throw an error for an invalid proof', () => { const invalidProof = { a: Array.from(new Uint8Array(31)), // incorrect length b: Array.from(new Uint8Array(64)), c: Array.from(new Uint8Array(32)), }; expect(() => checkValidityProofShape(invalidProof)).toThrow( 'ValidityProof has invalid shape', ); }); }); }); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/test-utils.ts
import { Connection, Keypair, Signer } from '@solana/web3.js'; import { confirmTx } from '../utils/send-and-confirm'; import { Rpc } from '../rpc'; let c = 1; export const ALICE = getTestKeypair(255); export const BOB = getTestKeypair(254); export const CHARLIE = getTestKeypair(253); export const DAVE = getTestKeypair(252); /** * Create a new account and airdrop lamports to it * * @param rpc connection to use * @param lamports amount of lamports to airdrop * @param counter counter to use for generating the keypair. * If undefined or >255, generates random keypair. */ export async function newAccountWithLamports( rpc: Rpc, lamports = 1000000000, counter: number | undefined = undefined, ): Promise<Signer> { /// get random keypair if (counter === undefined || counter > 255) { counter = 256; } const account = getTestKeypair(counter); const sig = await rpc.requestAirdrop(account.publicKey, lamports); await confirmTx(rpc, sig); return account; } export function getConnection(): Connection { const url = 'http://127.0.0.1:8899'; const connection = new Connection(url, 'confirmed'); return connection; } /** * For use in tests. * Generate a unique keypair by passing in a counter <255. If no counter * is supplied, it uses and increments a global counter. * if counter > 255, generates random keypair */ export function getTestKeypair( counter: number | undefined = undefined, ): Keypair { if (!counter) { counter = c; c++; } if (counter > 255) { return Keypair.generate(); } const seed = new Uint8Array(32); seed[31] = counter; // le return Keypair.fromSeed(seed); } //@ts-ignore if (import.meta.vitest) { //@ts-ignore const { describe, it, expect } = import.meta.vitest; describe('getTestKeypair', () => { it('should generate a keypair with a specific counter', () => { const keypair = getTestKeypair(10); const keypair2 = getTestKeypair(10); expect(keypair).toEqual(keypair2); expect(keypair).toBeInstanceOf(Keypair); expect(keypair.publicKey).toBeDefined(); expect(keypair.secretKey).toBeDefined(); }); it('should generate random keypair if counter is greater than 255', () => { const testFn = () => getTestKeypair(256); const kp1 = testFn(); const kp2 = testFn(); expect(kp1).not.toEqual(kp2); }); it('should increment the global counter if no counter is provided', () => { const initialKeypair = getTestKeypair(); const nextKeypair = getTestKeypair(); const nextNextKeypair = getTestKeypair(); const nextNextNextKeypair = getTestKeypair(3); expect(initialKeypair).not.toEqual(nextKeypair); expect(nextKeypair).not.toEqual(nextNextKeypair); expect(nextNextKeypair).toEqual(nextNextNextKeypair); }); }); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/conversion.ts
import { Buffer } from 'buffer'; import { bn, createBN254 } from '../state/BN254'; import { FIELD_SIZE } from '../constants'; import { keccak_256 } from '@noble/hashes/sha3'; import { Keypair } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; export function byteArrayToKeypair(byteArray: number[]): Keypair { return Keypair.fromSecretKey(Uint8Array.from(byteArray)); } /** * @internal * convert BN to hex with '0x' prefix */ export function toHex(bn: BN): string { return '0x' + bn.toString('hex'); } export const toArray = <T>(value: T | T[]) => Array.isArray(value) ? value : [value]; export const bufToDecStr = (buf: Buffer): string => { return createBN254(buf).toString(); }; function isSmallerThanBn254FieldSizeBe(bytes: Buffer): boolean { const bigint = bn(bytes, undefined, 'be'); return bigint.lt(FIELD_SIZE); } /** * Hash the provided `bytes` with Keccak256 and ensure the result fits in the * BN254 prime field by repeatedly hashing the inputs with various "bump seeds" * and truncating the resulting hash to 31 bytes. * * @deprecated Use `hashvToBn254FieldSizeBe` instead. */ export function hashToBn254FieldSizeBe(bytes: Buffer): [Buffer, number] | null { // TODO(vadorovsky, affects-onchain): Get rid of the bump mechanism, it // makes no sense. Doing the same as in the `hashvToBn254FieldSizeBe` below // - overwriting the most significant byte with zero - is sufficient for // truncation, it's also faster, doesn't force us to return `Option` and // care about handling an error which is practically never returned. // // The reason we can't do it now is that it would affect on-chain programs. // Once we can update programs, we can get rid of the seed bump (or even of // this function all together in favor of the `hashv` variant). let bumpSeed = 255; while (bumpSeed >= 0) { const inputWithBumpSeed = Buffer.concat([ bytes, Buffer.from([bumpSeed]), ]); const hash = keccak_256(inputWithBumpSeed); if (hash.length !== 32) { throw new Error('Invalid hash length'); } hash[0] = 0; if (isSmallerThanBn254FieldSizeBe(Buffer.from(hash))) { return [Buffer.from(hash), bumpSeed]; } bumpSeed -= 1; } return null; } /** * Hash the provided `bytes` with Keccak256 and ensure that the result fits in * the BN254 prime field by truncating the resulting hash to 31 bytes. * * @param bytes Input bytes * * @returns Hash digest */ export function hashvToBn254FieldSizeBe(bytes: Uint8Array[]): Uint8Array { const hasher = keccak_256.create(); for (const input of bytes) { hasher.update(input); } const hash = hasher.digest(); hash[0] = 0; return hash; } /** Mutates array in place */ export function pushUniqueItems<T>(items: T[], map: T[]): void { items.forEach(item => { if (!map.includes(item)) { map.push(item); } }); } export function toCamelCase( obj: Array<any> | unknown | any, ): Array<any> | unknown | any { if (Array.isArray(obj)) { return obj.map(v => toCamelCase(v)); } else if (obj !== null && obj.constructor === Object) { return Object.keys(obj).reduce((result, key) => { const camelCaseKey = key.replace(/([-_][a-z])/gi, $1 => { return $1.toUpperCase().replace('-', '').replace('_', ''); }); result[camelCaseKey] = toCamelCase(obj[key]); return result; }, {} as any); } return obj; } // FIXME: check bundling and how to resolve the type error //@ts-ignore if (import.meta.vitest) { //@ts-ignore const { it, expect, describe } = import.meta.vitest; describe('toArray function', () => { it('should convert a single item to an array', () => { expect(toArray(1)).toEqual([1]); }); it('should leave an array unchanged', () => { expect(toArray([1, 2, 3])).toEqual([1, 2, 3]); }); }); describe('isSmallerThanBn254FieldSizeBe function', () => { it('should return true for a small number', () => { const buf = Buffer.from( '0000000000000000000000000000000000000000000000000000000000000000', 'hex', ); expect(isSmallerThanBn254FieldSizeBe(buf)).toBe(true); }); it('should return false for a large number', () => { const buf = Buffer.from( '0000000000000000000000000000000000000000000000000000000000000065', 'hex', ).reverse(); expect(isSmallerThanBn254FieldSizeBe(buf)).toBe(false); }); }); describe('hashToBn254FieldSizeBe function', () => { const refBumpSeed = [252]; const bytes = [ 131, 219, 249, 246, 221, 196, 33, 3, 114, 23, 121, 235, 18, 229, 71, 152, 39, 87, 169, 208, 143, 101, 43, 128, 245, 59, 22, 134, 182, 231, 116, 33, ]; const refResult = [ 0, 146, 15, 187, 171, 163, 183, 93, 237, 121, 37, 231, 55, 162, 208, 188, 244, 77, 185, 157, 93, 9, 101, 193, 220, 247, 109, 94, 48, 212, 98, 149, ]; it('should return a valid value for initial buffer', async () => { const result = await hashToBn254FieldSizeBe(Buffer.from(bytes)); expect(Array.from(result![0])).toEqual(refResult); }); it('should return a valid value for initial buffer', async () => { const buf = Buffer.from( '0000000000000000000000000000000000000000000000000000000000000000', 'hex', ); const result = await hashToBn254FieldSizeBe(buf); expect(result).not.toBeNull(); if (result) { expect(result[0]).toBeInstanceOf(Buffer); expect(result[1]).toBe(255); } }); it('should return a valid value for a buffer that can be hashed to a smaller value', async () => { const buf = Buffer.from( 'fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe', 'hex', ); const result = await hashToBn254FieldSizeBe(buf); expect(result).not.toBeNull(); if (result) { expect(result[1]).toBeLessThanOrEqual(255); expect(result[0]).toBeInstanceOf(Buffer); // Check if the hashed value is indeed smaller than the bn254 field size expect(isSmallerThanBn254FieldSizeBe(result[0])).toBe(true); } }); it('should correctly hash the input buffer', async () => { const buf = Buffer.from('deadbeef', 'hex'); const result = await hashToBn254FieldSizeBe(buf); expect(result).not.toBeNull(); if (result) { // Since the actual hash value depends on the crypto implementation and input, // we cannot predict the exact output. However, we can check if the output is valid. expect(result[0].length).toBe(32); // SHA-256 hash length expect(result[1]).toBeLessThanOrEqual(255); expect(isSmallerThanBn254FieldSizeBe(result[0])).toBe(true); } }); }); describe('pushUniqueItems function', () => { it('should add unique items', () => { const map = [1, 2, 3]; const itemsToAdd = [3, 4, 5]; pushUniqueItems(itemsToAdd, map); expect(map).toEqual([1, 2, 3, 4, 5]); }); it('should ignore duplicates', () => { const map = [1, 2, 3]; const itemsToAdd = [1, 2, 3]; pushUniqueItems(itemsToAdd, map); expect(map).toEqual([1, 2, 3]); }); it('should handle empty arrays', () => { const map: number[] = []; const itemsToAdd: number[] = []; pushUniqueItems(itemsToAdd, map); expect(map).toEqual([]); }); }); describe('bufToDecStr', () => { it("should convert buffer [0] to '0'", () => { expect(bufToDecStr(Buffer.from([0]))).toEqual('0'); }); it("should convert buffer [1] to '1'", () => { expect(bufToDecStr(Buffer.from([1]))).toEqual('1'); }); it("should convert buffer [1, 0] to '256'", () => { expect(bufToDecStr(Buffer.from([1, 0]))).toEqual('256'); }); it("should convert buffer [1, 1] to '257'", () => { expect(bufToDecStr(Buffer.from([1, 1]))).toEqual('257'); }); it("should convert buffer [7, 91, 205, 21] to '123456789'", () => { expect(bufToDecStr(Buffer.from([7, 91, 205, 21]))).toEqual( '123456789', ); }); }); describe('toCamelCase', () => { it('should convert object keys to camelCase', () => { const input = { test_key: 1, 'another-testKey': 2 }; const expected = { testKey: 1, anotherTestKey: 2 }; expect(toCamelCase(input)).toEqual(expected); }); it('should handle arrays of objects', () => { const input = [{ array_key: 3 }, { 'another_array-key': 4 }]; const expected = [{ arrayKey: 3 }, { anotherArrayKey: 4 }]; expect(toCamelCase(input)).toEqual(expected); }); it('should return the input if it is neither an object nor an array', () => { const input = 'testString'; expect(toCamelCase(input)).toBe(input); }); }); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/send-and-confirm.ts
import { VersionedTransaction, TransactionConfirmationStrategy, SignatureResult, RpcResponseAndContext, Signer, TransactionInstruction, TransactionMessage, ConfirmOptions, TransactionSignature, PublicKey, AddressLookupTableAccount, } from '@solana/web3.js'; import { Rpc } from '../rpc'; /** * Builds a versioned Transaction from instructions. * * @param instructions instructions to include * @param payerPublicKey fee payer public key * @param blockhash blockhash to use * @param lookupTableAccounts lookup table accounts to include * * @return VersionedTransaction */ export function buildTx( instructions: TransactionInstruction[], payerPublicKey: PublicKey, blockhash: string, lookupTableAccounts?: AddressLookupTableAccount[], ): VersionedTransaction { const messageV0 = new TransactionMessage({ payerKey: payerPublicKey, recentBlockhash: blockhash, instructions, }).compileToV0Message(lookupTableAccounts); return new VersionedTransaction(messageV0); } /** * Sends a versioned transaction and confirms it. * * @param rpc connection to use * @param tx versioned transaction to send * @param confirmOptions confirmation options * @param blockHashCtx blockhash context for confirmation * * @return TransactionSignature */ export async function sendAndConfirmTx( rpc: Rpc, tx: VersionedTransaction, confirmOptions?: ConfirmOptions, blockHashCtx?: { blockhash: string; lastValidBlockHeight: number }, ): Promise<TransactionSignature> { const txId = await rpc.sendTransaction(tx, confirmOptions); if (!blockHashCtx) blockHashCtx = await rpc.getLatestBlockhash(); const transactionConfirmationStrategy0: TransactionConfirmationStrategy = { signature: txId, blockhash: blockHashCtx.blockhash, lastValidBlockHeight: blockHashCtx.lastValidBlockHeight, }; const ctxAndRes = await rpc.confirmTransaction( transactionConfirmationStrategy0, confirmOptions?.commitment || rpc.commitment || 'confirmed', ); const slot = ctxAndRes.context.slot; await rpc.confirmTransactionIndexed(slot); return txId; } /** * Confirms a transaction with a given txId. * * @param rpc connection to use * @param txId transaction signature to confirm * @param confirmOptions confirmation options * @param blockHashCtx blockhash context for confirmation * @return SignatureResult */ export async function confirmTx( rpc: Rpc, txId: string, confirmOptions?: ConfirmOptions, blockHashCtx?: { blockhash: string; lastValidBlockHeight: number }, ): Promise<RpcResponseAndContext<SignatureResult>> { if (!blockHashCtx) blockHashCtx = await rpc.getLatestBlockhash(); const transactionConfirmationStrategy: TransactionConfirmationStrategy = { signature: txId, blockhash: blockHashCtx.blockhash, lastValidBlockHeight: blockHashCtx.lastValidBlockHeight, }; const res = await rpc.confirmTransaction( transactionConfirmationStrategy, confirmOptions?.commitment || rpc.commitment || 'confirmed', ); const slot = res.context.slot; await rpc.confirmTransactionIndexed(slot); return res; } /** * Builds a versioned Transaction from instructions and signs it. * * @param instructions instructions to include in the transaction * @param payer payer of the transaction * @param blockhash recent blockhash to use in the transaction * @param additionalSigners non-feepayer signers to include in the * transaction * @param lookupTableAccounts lookup table accounts to include in the * transaction */ export function buildAndSignTx( instructions: TransactionInstruction[], payer: Signer, blockhash: string, additionalSigners: Signer[] = [], lookupTableAccounts?: AddressLookupTableAccount[], ): VersionedTransaction { if (additionalSigners.includes(payer)) throw new Error('payer must not be in additionalSigners'); const allSigners = [payer, ...additionalSigners]; const tx = buildTx( instructions, payer.publicKey, blockhash, lookupTableAccounts, ); tx.sign(allSigners); return tx; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/index.ts
export * from './address'; export * from './airdrop'; export * from './conversion'; export * from './parse-validity-proof'; export * from './pipe'; export * from './send-and-confirm'; export * from './sleep'; export * from './test-utils'; export * from './validation'; export * from './calculate-compute-unit-price';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/utils/address.ts
import { AccountMeta, PublicKey } from '@solana/web3.js'; import { hashToBn254FieldSizeBe, hashvToBn254FieldSizeBe } from './conversion'; import { defaultTestStateTreeAccounts } from '../constants'; import { getIndexOrAdd } from '../instruction'; export function deriveAddressSeed( seeds: Uint8Array[], programId: PublicKey, ): Uint8Array { const combinedSeeds: Uint8Array[] = [programId.toBytes(), ...seeds]; const hash = hashvToBn254FieldSizeBe(combinedSeeds); return hash; } /** * Derive an address for a compressed account from a seed and an address Merkle * tree public key. * * @param seed Seed to derive the address from * @param addressMerkleTreePubkey Merkle tree public key. Defaults to * defaultTestStateTreeAccounts().addressTree * @returns Derived address */ export function deriveAddress( seed: Uint8Array, addressMerkleTreePubkey: PublicKey = defaultTestStateTreeAccounts() .addressTree, ): PublicKey { if (seed.length != 32) { throw new Error('Seed length is not 32 bytes.'); } const bytes = addressMerkleTreePubkey.toBytes(); const combined = Buffer.from([...bytes, ...seed]); const hash = hashToBn254FieldSizeBe(combined); if (hash === null) { throw new Error('DeriveAddressError'); } const buf = hash[0]; return new PublicKey(buf); } export interface NewAddressParams { /** * Seed for the compressed account. Must be seed used to derive * newAccountAddress */ seed: Uint8Array; /** * Recent state root index of the address tree. The expiry is tied to the * validity proof. */ addressMerkleTreeRootIndex: number; /** * Address tree pubkey. Must be base pubkey used to derive new address */ addressMerkleTreePubkey: PublicKey; /** * Address space queue pubkey. Associated with the state tree. */ addressQueuePubkey: PublicKey; } export interface NewAddressParamsPacked { /** * Seed for the compressed account. Must be seed used to derive * newAccountAddress */ seed: number[]; /** * Recent state root index of the address tree. The expiry is tied to the * validity proof. */ addressMerkleTreeRootIndex: number; /** * Index of the address merkle tree account in the remaining accounts array */ addressMerkleTreeAccountIndex: number; /** * Index of the address queue account in the remaining accounts array */ addressQueueAccountIndex: number; } /** * Packs new address params for instruction data in TypeScript clients * * @param newAddressParams New address params * @param remainingAccounts Remaining accounts * @returns Packed new address params */ export function packNewAddressParams( newAddressParams: NewAddressParams[], remainingAccounts: PublicKey[], ): { newAddressParamsPacked: NewAddressParamsPacked[]; remainingAccounts: PublicKey[]; } { const _remainingAccounts = remainingAccounts.slice(); const newAddressParamsPacked: NewAddressParamsPacked[] = newAddressParams.map(x => ({ seed: Array.from(x.seed), addressMerkleTreeRootIndex: x.addressMerkleTreeRootIndex, addressMerkleTreeAccountIndex: 0, // will be assigned later addressQueueAccountIndex: 0, // will be assigned later })); newAddressParams.forEach((params, i) => { newAddressParamsPacked[i].addressMerkleTreeAccountIndex = getIndexOrAdd( _remainingAccounts, params.addressMerkleTreePubkey, ); }); newAddressParams.forEach((params, i) => { newAddressParamsPacked[i].addressQueueAccountIndex = getIndexOrAdd( _remainingAccounts, params.addressQueuePubkey, ); }); return { newAddressParamsPacked, remainingAccounts: _remainingAccounts }; } //@ts-ignore if (import.meta.vitest) { //@ts-ignore const { it, expect, describe } = import.meta.vitest; const programId = new PublicKey( '7yucc7fL3JGbyMwg4neUaenNSdySS39hbAk89Ao3t1Hz', ); describe('derive address seed', () => { it('should derive a valid address seed', () => { const seeds: Uint8Array[] = [ new TextEncoder().encode('foo'), new TextEncoder().encode('bar'), ]; expect(deriveAddressSeed(seeds, programId)).toStrictEqual( new Uint8Array([ 0, 246, 150, 3, 192, 95, 53, 123, 56, 139, 206, 179, 253, 133, 115, 103, 120, 155, 251, 72, 250, 47, 117, 217, 118, 59, 174, 207, 49, 101, 201, 110, ]), ); }); it('should derive a valid address seed', () => { const seeds: Uint8Array[] = [ new TextEncoder().encode('ayy'), new TextEncoder().encode('lmao'), ]; expect(deriveAddressSeed(seeds, programId)).toStrictEqual( new Uint8Array([ 0, 202, 44, 25, 221, 74, 144, 92, 69, 168, 38, 19, 206, 208, 29, 162, 53, 27, 120, 214, 152, 116, 15, 107, 212, 168, 33, 121, 187, 10, 76, 233, ]), ); }); }); describe('deriveAddress function', () => { it('should derive a valid address from a seed and a merkle tree public key', async () => { const seeds: Uint8Array[] = [ new TextEncoder().encode('foo'), new TextEncoder().encode('bar'), ]; const seed = deriveAddressSeed(seeds, programId); const merkleTreePubkey = new PublicKey( '11111111111111111111111111111111', ); const derivedAddress = deriveAddress(seed, merkleTreePubkey); expect(derivedAddress).toBeInstanceOf(PublicKey); expect(derivedAddress).toStrictEqual( new PublicKey('139uhyyBtEh4e1CBDJ68ooK5nCeWoncZf9HPyAfRrukA'), ); }); it('should derive a valid address from a seed and a merkle tree public key', async () => { const seeds: Uint8Array[] = [ new TextEncoder().encode('ayy'), new TextEncoder().encode('lmao'), ]; const seed = deriveAddressSeed(seeds, programId); const merkleTreePubkey = new PublicKey( '11111111111111111111111111111111', ); const derivedAddress = deriveAddress(seed, merkleTreePubkey); expect(derivedAddress).toBeInstanceOf(PublicKey); expect(derivedAddress).toStrictEqual( new PublicKey('12bhHm6PQjbNmEn3Yu1Gq9k7XwVn2rZpzYokmLwbFazN'), ); }); }); describe('packNewAddressParams function', () => { it('should pack new address params correctly', () => { const newAddressParams = [ { seed: new Uint8Array([1, 2, 3, 4]), addressMerkleTreeRootIndex: 0, addressMerkleTreePubkey: new PublicKey( '11111111111111111111111111111111', ), addressQueuePubkey: new PublicKey( '11111111111111111111111111111112', ), }, ]; const remainingAccounts = [ new PublicKey('11111111111111111111111111111112'), new PublicKey('11111111111111111111111111111111'), ]; const packedParams = packNewAddressParams( newAddressParams, remainingAccounts, ); expect( packedParams.newAddressParamsPacked[0] .addressMerkleTreeAccountIndex, ).toBe(1); expect( packedParams.newAddressParamsPacked[0].addressQueueAccountIndex, ).toBe(0); }); }); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/state/compressed-account.ts
import { BN } from '@coral-xyz/anchor'; import { PublicKey } from '@solana/web3.js'; import { CompressedAccount, CompressedAccountData } from './types'; import { BN254, bn } from './BN254'; export type CompressedAccountWithMerkleContext = CompressedAccount & MerkleContext & { readOnly: boolean; }; /** * Context for compressed account inserted into a state Merkle tree * */ export type MerkleContext = { /** State Merkle tree */ merkleTree: PublicKey; /** The state nullfier queue belonging to merkleTree */ nullifierQueue: PublicKey; /** Poseidon hash of the utxo preimage. Is a leaf in state merkle tree */ hash: number[]; // TODO: BN254; /** 'hash' position within the Merkle tree */ leafIndex: number; }; export type MerkleContextWithMerkleProof = MerkleContext & { /** Recent valid 'hash' proof path, expires after n slots */ merkleProof: BN254[]; /** Index of state root the merkleproof is valid for, expires after n slots */ rootIndex: number; /** Current root */ root: BN254; }; export const createCompressedAccount = ( owner: PublicKey, lamports?: BN, data?: CompressedAccountData, address?: number[], ): CompressedAccount => ({ owner, lamports: lamports ?? bn(0), address: address ?? null, data: data ?? null, }); export const createCompressedAccountWithMerkleContext = ( merkleContext: MerkleContext, owner: PublicKey, lamports?: BN, data?: CompressedAccountData, address?: number[], ): CompressedAccountWithMerkleContext => ({ ...createCompressedAccount(owner, lamports, data, address), ...merkleContext, readOnly: false, }); export const createMerkleContext = ( merkleTree: PublicKey, nullifierQueue: PublicKey, hash: number[], // TODO: BN254, leafIndex: number, ): MerkleContext => ({ merkleTree, nullifierQueue, hash, leafIndex, });
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/state/types.ts
import { BN } from '@coral-xyz/anchor'; import { PublicKey } from '@solana/web3.js'; import { Buffer } from 'buffer'; import { NewAddressParamsPacked } from '../utils'; export interface PackedCompressedAccountWithMerkleContext { compressedAccount: CompressedAccount; merkleContext: PackedMerkleContext; rootIndex: number; // u16 readOnly: boolean; } export interface PackedMerkleContext { merkleTreePubkeyIndex: number; // u8 nullifierQueuePubkeyIndex: number; // u8 leafIndex: number; // u32 queueIndex: null | QueueIndex; // Option<QueueIndex> } export interface QueueIndex { queueId: number; // u8 index: number; // u16 } /** * Describe the generic compressed account details applicable to every * compressed account. * */ export interface CompressedAccount { /** Public key of program or user that owns the account */ owner: PublicKey; /** Lamports attached to the account */ lamports: BN; // u64 // FIXME: optional /** * TODO: use PublicKey. Optional unique account ID that is persistent across * transactions. */ address: number[] | null; // Option<PublicKey> /** Optional data attached to the account */ data: CompressedAccountData | null; // Option<CompressedAccountData> } /** * Describe the generic compressed account details applicable to every * compressed account. * */ export interface OutputCompressedAccountWithPackedContext { compressedAccount: CompressedAccount; merkleTreeIndex: number; } export interface CompressedAccountData { discriminator: number[]; // [u8; 8] // TODO: test with uint8Array instead data: Buffer; // bytes dataHash: number[]; // [u8; 32] } export interface PublicTransactionEvent { inputCompressedAccountHashes: number[][]; // Vec<[u8; 32]> outputCompressedAccountHashes: number[][]; // Vec<[u8; 32]> outputCompressedAccounts: OutputCompressedAccountWithPackedContext[]; outputLeafIndices: number[]; // Vec<u32> relayFee: BN | null; // Option<u64> isCompress: boolean; // bool compressOrDecompressLamports: BN | null; // Option<u64> pubkeyArray: PublicKey[]; // Vec<PublicKey> message: Uint8Array | null; // Option<bytes> } export interface InstructionDataInvoke { proof: CompressedProof | null; // Option<CompressedProof> inputCompressedAccountsWithMerkleContext: PackedCompressedAccountWithMerkleContext[]; outputCompressedAccounts: OutputCompressedAccountWithPackedContext[]; relayFee: BN | null; // Option<u64> newAddressParams: NewAddressParamsPacked[]; // Vec<NewAddressParamsPacked> compressOrDecompressLamports: BN | null; // Option<u64> isCompress: boolean; // bool } export interface CompressedProof { a: number[]; // [u8; 32] b: number[]; // [u8; 64] c: number[]; // [u8; 32] } /** * Compressed-token types * * TODO: Token-related code should ideally not have to go into stateless.js. * Find a better altnerative way to extend the RPC. * */ export type TokenTransferOutputData = { owner: PublicKey; amount: BN; lamports: BN | null; tlv: Buffer | null; }; export type CompressedTokenInstructionDataTransfer = { proof: CompressedProof | null; mint: PublicKey; delegatedTransfer: null; inputTokenDataWithContext: InputTokenDataWithContext[]; outputCompressedAccounts: TokenTransferOutputData[]; isCompress: boolean; compressOrDecompressAmount: BN | null; cpiContext: null; lamportsChangeAccountMerkleTreeIndex: number | null; }; export interface InputTokenDataWithContext { amount: BN; delegateIndex: number | null; // Option<u8> merkleContext: PackedMerkleContext; rootIndex: number; // u16 lamports: BN | null; tlv: Buffer | null; } export type TokenData = { /// The mint associated with this account mint: PublicKey; /// The owner of this account. owner: PublicKey; /// The amount of tokens this account holds. amount: BN; /// If `delegate` is `Some` then `delegated_amount` represents /// the amount authorized by the delegate delegate: PublicKey | null; /// The account's state state: number; // AccountState_IdlType; /// TokenExtension tlv tlv: Buffer | null; };
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/state/index.ts
export * from './BN254'; export * from './compressed-account'; export * from './types';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/state/BN254.ts
// TODO: consider implementing BN254 as wrapper class around _BN mirroring // PublicKey this would encapsulate our runtime checks and also enforce // typesafety at compile time import { FIELD_SIZE } from '../constants'; import { PublicKey } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { bs58 } from '@coral-xyz/anchor/dist/esm/utils/bytes'; import { Buffer } from 'buffer'; /** * bignumber with <254-bit max size. Anchor serialization doesn't support native * bigint yet, so we wrap BN. This wrapper has simple base10 encoding which is * needed for zk circuit compat, in addition to the base58 encoding that users * are used to from working with the web3.js PublicKey type. */ export type BN254 = BN; export const bn = ( number: string | number | BN | Buffer | Uint8Array | number[], base?: number | 'hex' | undefined, endian?: BN.Endianness | undefined, ): BN => new BN(number, base, endian); /** Create a bigint instance with <254-bit max size and base58 capabilities */ export const createBN254 = ( number: string | number | BN | Buffer | Uint8Array | number[], base?: number | 'hex' | 'base58' | undefined, ): BN254 => { if (base === 'base58') { if (typeof number !== 'string') throw new Error('Must be a base58 string'); return createBN254(bs58.decode(number)); } const bigintNumber = new BN(number, base); return enforceSize(bigintNumber); }; /** * Enforces a maximum size of <254 bits for bigint instances. This is necessary * for compatibility with zk-SNARKs, where hashes must be less than the field * modulus (~2^254). */ function enforceSize(bigintNumber: BN254): BN254 { if (bigintNumber.gte(FIELD_SIZE)) { throw new Error('Value is too large. Max <254 bits'); } return bigintNumber; } /** Convert <254-bit bigint to Base58 string. */ export function encodeBN254toBase58(bigintNumber: BN): string { /// enforce size const bn254 = createBN254(bigintNumber); const bn254Buffer = bn254.toArrayLike(Buffer, undefined, 32); return bs58.encode(bn254Buffer); }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/idls/account_compression.ts
export type AccountCompression = { version: '1.2.0'; name: 'account_compression'; constants: [ { name: 'CPI_AUTHORITY_PDA_SEED'; type: 'bytes'; value: '[99, 112, 105, 95, 97, 117, 116, 104, 111, 114, 105, 116, 121]'; }, { name: 'GROUP_AUTHORITY_SEED'; type: 'bytes'; value: '[103, 114, 111, 117, 112, 95, 97, 117, 116, 104, 111, 114, 105, 116, 121]'; }, { name: 'STATE_MERKLE_TREE_HEIGHT'; type: 'u64'; value: '26'; }, { name: 'STATE_MERKLE_TREE_CHANGELOG'; type: 'u64'; value: '1400'; }, { name: 'STATE_MERKLE_TREE_ROOTS'; type: 'u64'; value: '2400'; }, { name: 'STATE_MERKLE_TREE_CANOPY_DEPTH'; type: 'u64'; value: '10'; }, { name: 'STATE_NULLIFIER_QUEUE_VALUES'; type: 'u16'; value: '28_807'; }, { name: 'STATE_NULLIFIER_QUEUE_SEQUENCE_THRESHOLD'; type: 'u64'; value: '2400'; }, { name: 'ADDRESS_MERKLE_TREE_HEIGHT'; type: 'u64'; value: '26'; }, { name: 'ADDRESS_MERKLE_TREE_CHANGELOG'; type: 'u64'; value: '1400'; }, { name: 'ADDRESS_MERKLE_TREE_ROOTS'; type: 'u64'; value: '2400'; }, { name: 'ADDRESS_MERKLE_TREE_CANOPY_DEPTH'; type: 'u64'; value: '10'; }, { name: 'ADDRESS_MERKLE_TREE_INDEXED_CHANGELOG'; type: 'u64'; value: '1400'; }, { name: 'ADDRESS_QUEUE_VALUES'; type: 'u16'; value: '28_807'; }, { name: 'ADDRESS_QUEUE_SEQUENCE_THRESHOLD'; type: 'u64'; value: '2400'; }, { name: 'NOOP_PUBKEY'; type: { array: ['u8', 32]; }; value: '[11 , 188 , 15 , 192 , 187 , 71 , 202 , 47 , 116 , 196 , 17 , 46 , 148 , 171 , 19 , 207 , 163 , 198 , 52 , 229 , 220 , 23 , 234 , 203 , 3 , 205 , 26 , 35 , 205 , 126 , 120 , 124 ,]'; }, ]; instructions: [ { name: 'initializeAddressMerkleTreeAndQueue'; accounts: [ { name: 'authority'; isMut: true; isSigner: true; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'queue'; isMut: true; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; }, ]; args: [ { name: 'index'; type: 'u64'; }, { name: 'programOwner'; type: { option: 'publicKey'; }; }, { name: 'forester'; type: { option: 'publicKey'; }; }, { name: 'addressMerkleTreeConfig'; type: { defined: 'AddressMerkleTreeConfig'; }; }, { name: 'addressQueueConfig'; type: { defined: 'AddressQueueConfig'; }; }, ]; }, { name: 'insertAddresses'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['Fee payer pays rollover fee.']; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'addresses'; type: { vec: { array: ['u8', 32]; }; }; }, ]; }, { name: 'updateAddressMerkleTree'; docs: ['Updates the address Merkle tree with a new address.']; accounts: [ { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; }, { name: 'queue'; isMut: true; isSigner: false; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'logWrapper'; isMut: false; isSigner: false; }, ]; args: [ { name: 'changelogIndex'; type: 'u16'; }, { name: 'indexedChangelogIndex'; type: 'u16'; }, { name: 'value'; type: 'u16'; }, { name: 'lowAddressIndex'; type: 'u64'; }, { name: 'lowAddressValue'; type: { array: ['u8', 32]; }; }, { name: 'lowAddressNextIndex'; type: 'u64'; }, { name: 'lowAddressNextValue'; type: { array: ['u8', 32]; }; }, { name: 'lowAddressProof'; type: { array: [ { array: ['u8', 32]; }, 16, ]; }; }, ]; }, { name: 'rolloverAddressMerkleTreeAndQueue'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: [ 'Signer used to receive rollover accounts rentexemption reimbursement.', ]; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; }, { name: 'newAddressMerkleTree'; isMut: true; isSigner: false; }, { name: 'newQueue'; isMut: true; isSigner: false; }, { name: 'oldAddressMerkleTree'; isMut: true; isSigner: false; }, { name: 'oldQueue'; isMut: true; isSigner: false; }, ]; args: []; }, { name: 'initializeGroupAuthority'; docs: [ 'initialize group (a group can be used to give multiple programs access', 'to the same Merkle trees by registering the programs to the group)', ]; accounts: [ { name: 'authority'; isMut: true; isSigner: true; }, { name: 'seed'; isMut: false; isSigner: true; docs: [ 'Seed public key used to derive the group authority.', ]; }, { name: 'groupAuthority'; isMut: true; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'authority'; type: 'publicKey'; }, ]; }, { name: 'updateGroupAuthority'; accounts: [ { name: 'authority'; isMut: false; isSigner: true; }, { name: 'groupAuthority'; isMut: true; isSigner: false; }, ]; args: [ { name: 'authority'; type: 'publicKey'; }, ]; }, { name: 'registerProgramToGroup'; accounts: [ { name: 'authority'; isMut: true; isSigner: true; }, { name: 'programToBeRegistered'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: true; isSigner: false; }, { name: 'groupAuthorityPda'; isMut: false; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: []; }, { name: 'deregisterProgram'; accounts: [ { name: 'authority'; isMut: true; isSigner: true; }, { name: 'registeredProgramPda'; isMut: true; isSigner: false; }, { name: 'groupAuthorityPda'; isMut: false; isSigner: false; }, { name: 'closeRecipient'; isMut: true; isSigner: false; }, ]; args: []; }, { name: 'initializeStateMerkleTreeAndNullifierQueue'; docs: [ 'Initializes a new Merkle tree from config bytes.', 'Index is an optional identifier and not checked by the program.', ]; accounts: [ { name: 'authority'; isMut: true; isSigner: true; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'nullifierQueue'; isMut: true; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; }, ]; args: [ { name: 'index'; type: 'u64'; }, { name: 'programOwner'; type: { option: 'publicKey'; }; }, { name: 'forester'; type: { option: 'publicKey'; }; }, { name: 'stateMerkleTreeConfig'; type: { defined: 'StateMerkleTreeConfig'; }; }, { name: 'nullifierQueueConfig'; type: { defined: 'NullifierQueueConfig'; }; }, { name: 'additionalBytes'; type: 'u64'; }, ]; }, { name: 'appendLeavesToMerkleTrees'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['Fee payer pays rollover fee.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Checked whether instruction is accessed by a registered program or owner = authority.', ]; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; docs: [ 'Some assumes that the Merkle trees are accessed by a registered program.', 'None assumes that the Merkle trees are accessed by its owner.', ]; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'leaves'; type: { vec: { defined: '(u8,[u8;32])'; }; }; }, ]; }, { name: 'nullifyLeaves'; accounts: [ { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; }, { name: 'logWrapper'; isMut: false; isSigner: false; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'nullifierQueue'; isMut: true; isSigner: false; }, ]; args: [ { name: 'changeLogIndices'; type: { vec: 'u64'; }; }, { name: 'leavesQueueIndices'; type: { vec: 'u16'; }; }, { name: 'leafIndices'; type: { vec: 'u64'; }; }, { name: 'proofs'; type: { vec: { vec: { array: ['u8', 32]; }; }; }; }, ]; }, { name: 'insertIntoNullifierQueues'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['Fee payer pays rollover fee.']; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'nullifiers'; type: { vec: { array: ['u8', 32]; }; }; }, ]; }, { name: 'rolloverStateMerkleTreeAndNullifierQueue'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: [ 'Signer used to receive rollover accounts rentexemption reimbursement.', ]; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; isOptional: true; }, { name: 'newStateMerkleTree'; isMut: true; isSigner: false; }, { name: 'newNullifierQueue'; isMut: true; isSigner: false; }, { name: 'oldStateMerkleTree'; isMut: true; isSigner: false; }, { name: 'oldNullifierQueue'; isMut: true; isSigner: false; }, ]; args: []; }, ]; accounts: [ { name: 'registeredProgram'; type: { kind: 'struct'; fields: [ { name: 'registeredProgramId'; type: 'publicKey'; }, { name: 'groupAuthorityPda'; type: 'publicKey'; }, ]; }; }, { name: 'accessMetadata'; type: { kind: 'struct'; fields: [ { name: 'owner'; docs: ['Owner of the Merkle tree.']; type: 'publicKey'; }, { name: 'programOwner'; docs: [ 'Program owner of the Merkle tree. This will be used for program owned Merkle trees.', ]; type: 'publicKey'; }, { name: 'forester'; docs: [ 'Optional privileged forester pubkey, can be set for custom Merkle trees', 'without a network fee. Merkle trees without network fees are not', 'forested by light foresters. The variable is not used in the account', 'compression program but the registry program. The registry program', 'implements access control to prevent contention during forester. The', 'forester pubkey specified in this struct can bypass contention checks.', ]; type: 'publicKey'; }, ]; }; }, { name: 'addressMerkleTreeAccount'; type: { kind: 'struct'; fields: [ { name: 'metadata'; type: { defined: 'MerkleTreeMetadata'; }; }, ]; }; }, { name: 'groupAuthority'; type: { kind: 'struct'; fields: [ { name: 'authority'; type: 'publicKey'; }, { name: 'seed'; type: 'publicKey'; }, ]; }; }, { name: 'merkleTreeMetadata'; type: { kind: 'struct'; fields: [ { name: 'accessMetadata'; type: { defined: 'AccessMetadata'; }; }, { name: 'rolloverMetadata'; type: { defined: 'RolloverMetadata'; }; }, { name: 'associatedQueue'; type: 'publicKey'; }, { name: 'nextMerkleTree'; type: 'publicKey'; }, ]; }; }, { name: 'stateMerkleTreeAccount'; docs: [ 'Concurrent state Merkle tree used for public compressed transactions.', ]; type: { kind: 'struct'; fields: [ { name: 'metadata'; type: { defined: 'MerkleTreeMetadata'; }; }, ]; }; }, { name: 'queueMetadata'; type: { kind: 'struct'; fields: [ { name: 'accessMetadata'; type: { defined: 'AccessMetadata'; }; }, { name: 'rolloverMetadata'; type: { defined: 'RolloverMetadata'; }; }, { name: 'associatedMerkleTree'; type: 'publicKey'; }, { name: 'nextQueue'; type: 'publicKey'; }, { name: 'queueType'; type: 'u64'; }, ]; }; }, { name: 'queueAccount'; type: { kind: 'struct'; fields: [ { name: 'metadata'; type: { defined: 'QueueMetadata'; }; }, ]; }; }, { name: 'rolloverMetadata'; type: { kind: 'struct'; fields: [ { name: 'index'; docs: ['Unique index.']; type: 'u64'; }, { name: 'rolloverFee'; docs: [ 'This fee is used for rent for the next account.', 'It accumulates in the account so that once the corresponding Merkle tree account is full it can be rolled over', ]; type: 'u64'; }, { name: 'rolloverThreshold'; docs: [ 'The threshold in percentage points when the account should be rolled over (95 corresponds to 95% filled).', ]; type: 'u64'; }, { name: 'networkFee'; docs: ['Tip for maintaining the account.']; type: 'u64'; }, { name: 'rolledoverSlot'; docs: [ 'The slot when the account was rolled over, a rolled over account should not be written to.', ]; type: 'u64'; }, { name: 'closeThreshold'; docs: [ 'If current slot is greater than rolledover_slot + close_threshold and', "the account is empty it can be closed. No 'close' functionality has been", 'implemented yet.', ]; type: 'u64'; }, { name: 'additionalBytes'; docs: [ 'Placeholder for bytes of additional accounts which are tied to the', 'Merkle trees operation and need to be rolled over as well.', ]; type: 'u64'; }, ]; }; }, ]; types: [ { name: 'AddressMerkleTreeConfig'; type: { kind: 'struct'; fields: [ { name: 'height'; type: 'u32'; }, { name: 'changelogSize'; type: 'u64'; }, { name: 'rootsSize'; type: 'u64'; }, { name: 'canopyDepth'; type: 'u64'; }, { name: 'addressChangelogSize'; type: 'u64'; }, { name: 'networkFee'; type: { option: 'u64'; }; }, { name: 'rolloverThreshold'; type: { option: 'u64'; }; }, { name: 'closeThreshold'; type: { option: 'u64'; }; }, ]; }; }, { name: 'StateMerkleTreeConfig'; type: { kind: 'struct'; fields: [ { name: 'height'; type: 'u32'; }, { name: 'changelogSize'; type: 'u64'; }, { name: 'rootsSize'; type: 'u64'; }, { name: 'canopyDepth'; type: 'u64'; }, { name: 'networkFee'; type: { option: 'u64'; }; }, { name: 'rolloverThreshold'; type: { option: 'u64'; }; }, { name: 'closeThreshold'; type: { option: 'u64'; }; }, ]; }; }, { name: 'NullifierQueueConfig'; type: { kind: 'struct'; fields: [ { name: 'capacity'; type: 'u16'; }, { name: 'sequenceThreshold'; type: 'u64'; }, { name: 'networkFee'; type: { option: 'u64'; }; }, ]; }; }, { name: 'QueueType'; type: { kind: 'enum'; variants: [ { name: 'NullifierQueue'; }, { name: 'AddressQueue'; }, ]; }; }, { name: 'AddressQueueConfig'; type: { kind: 'alias'; value: { defined: 'NullifierQueueConfig'; }; }; }, ]; errors: [ { code: 6000; name: 'IntegerOverflow'; msg: 'Integer overflow'; }, { code: 6001; name: 'InvalidAuthority'; msg: 'InvalidAuthority'; }, { code: 6002; name: 'NumberOfLeavesMismatch'; msg: 'Leaves <> remaining accounts mismatch. The number of remaining accounts must match the number of leaves.'; }, { code: 6003; name: 'InvalidNoopPubkey'; msg: 'Provided noop program public key is invalid'; }, { code: 6004; name: 'NumberOfChangeLogIndicesMismatch'; msg: 'Number of change log indices mismatch'; }, { code: 6005; name: 'NumberOfIndicesMismatch'; msg: 'Number of indices mismatch'; }, { code: 6006; name: 'NumberOfProofsMismatch'; msg: 'NumberOfProofsMismatch'; }, { code: 6007; name: 'InvalidMerkleProof'; msg: 'InvalidMerkleProof'; }, { code: 6008; name: 'LeafNotFound'; msg: 'Could not find the leaf in the queue'; }, { code: 6009; name: 'MerkleTreeAndQueueNotAssociated'; msg: 'MerkleTreeAndQueueNotAssociated'; }, { code: 6010; name: 'MerkleTreeAlreadyRolledOver'; msg: 'MerkleTreeAlreadyRolledOver'; }, { code: 6011; name: 'NotReadyForRollover'; msg: 'NotReadyForRollover'; }, { code: 6012; name: 'RolloverNotConfigured'; msg: 'RolloverNotConfigured'; }, { code: 6013; name: 'NotAllLeavesProcessed'; msg: 'NotAllLeavesProcessed'; }, { code: 6014; name: 'InvalidQueueType'; msg: 'InvalidQueueType'; }, { code: 6015; name: 'InputElementsEmpty'; msg: 'InputElementsEmpty'; }, { code: 6016; name: 'NoLeavesForMerkleTree'; msg: 'NoLeavesForMerkleTree'; }, { code: 6017; name: 'InvalidAccountSize'; msg: 'InvalidAccountSize'; }, { code: 6018; name: 'InsufficientRolloverFee'; msg: 'InsufficientRolloverFee'; }, { code: 6019; name: 'UnsupportedHeight'; msg: 'Unsupported Merkle tree height'; }, { code: 6020; name: 'UnsupportedCanopyDepth'; msg: 'Unsupported canopy depth'; }, { code: 6021; name: 'InvalidSequenceThreshold'; msg: 'Invalid sequence threshold'; }, { code: 6022; name: 'UnsupportedCloseThreshold'; msg: 'Unsupported close threshold'; }, { code: 6023; name: 'InvalidAccountBalance'; msg: 'InvalidAccountBalance'; }, { code: 6024; name: 'UnsupportedAdditionalBytes'; }, { code: 6025; name: 'InvalidGroup'; }, { code: 6026; name: 'ProofLengthMismatch'; }, ]; }; export const IDL: AccountCompression = { version: '1.2.0', name: 'account_compression', constants: [ { name: 'CPI_AUTHORITY_PDA_SEED', type: 'bytes', value: '[99, 112, 105, 95, 97, 117, 116, 104, 111, 114, 105, 116, 121]', }, { name: 'GROUP_AUTHORITY_SEED', type: 'bytes', value: '[103, 114, 111, 117, 112, 95, 97, 117, 116, 104, 111, 114, 105, 116, 121]', }, { name: 'STATE_MERKLE_TREE_HEIGHT', type: 'u64', value: '26', }, { name: 'STATE_MERKLE_TREE_CHANGELOG', type: 'u64', value: '1400', }, { name: 'STATE_MERKLE_TREE_ROOTS', type: 'u64', value: '2400', }, { name: 'STATE_MERKLE_TREE_CANOPY_DEPTH', type: 'u64', value: '10', }, { name: 'STATE_NULLIFIER_QUEUE_VALUES', type: 'u16', value: '28_807', }, { name: 'STATE_NULLIFIER_QUEUE_SEQUENCE_THRESHOLD', type: 'u64', value: '2400', }, { name: 'ADDRESS_MERKLE_TREE_HEIGHT', type: 'u64', value: '26', }, { name: 'ADDRESS_MERKLE_TREE_CHANGELOG', type: 'u64', value: '1400', }, { name: 'ADDRESS_MERKLE_TREE_ROOTS', type: 'u64', value: '2400', }, { name: 'ADDRESS_MERKLE_TREE_CANOPY_DEPTH', type: 'u64', value: '10', }, { name: 'ADDRESS_MERKLE_TREE_INDEXED_CHANGELOG', type: 'u64', value: '1400', }, { name: 'ADDRESS_QUEUE_VALUES', type: 'u16', value: '28_807', }, { name: 'ADDRESS_QUEUE_SEQUENCE_THRESHOLD', type: 'u64', value: '2400', }, { name: 'NOOP_PUBKEY', type: { array: ['u8', 32], }, value: '[11 , 188 , 15 , 192 , 187 , 71 , 202 , 47 , 116 , 196 , 17 , 46 , 148 , 171 , 19 , 207 , 163 , 198 , 52 , 229 , 220 , 23 , 234 , 203 , 3 , 205 , 26 , 35 , 205 , 126 , 120 , 124 ,]', }, ], instructions: [ { name: 'initializeAddressMerkleTreeAndQueue', accounts: [ { name: 'authority', isMut: true, isSigner: true, }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'queue', isMut: true, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, }, ], args: [ { name: 'index', type: 'u64', }, { name: 'programOwner', type: { option: 'publicKey', }, }, { name: 'forester', type: { option: 'publicKey', }, }, { name: 'addressMerkleTreeConfig', type: { defined: 'AddressMerkleTreeConfig', }, }, { name: 'addressQueueConfig', type: { defined: 'AddressQueueConfig', }, }, ], }, { name: 'insertAddresses', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['Fee payer pays rollover fee.'], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'addresses', type: { vec: { array: ['u8', 32], }, }, }, ], }, { name: 'updateAddressMerkleTree', docs: ['Updates the address Merkle tree with a new address.'], accounts: [ { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, }, { name: 'queue', isMut: true, isSigner: false, }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'logWrapper', isMut: false, isSigner: false, }, ], args: [ { name: 'changelogIndex', type: 'u16', }, { name: 'indexedChangelogIndex', type: 'u16', }, { name: 'value', type: 'u16', }, { name: 'lowAddressIndex', type: 'u64', }, { name: 'lowAddressValue', type: { array: ['u8', 32], }, }, { name: 'lowAddressNextIndex', type: 'u64', }, { name: 'lowAddressNextValue', type: { array: ['u8', 32], }, }, { name: 'lowAddressProof', type: { array: [ { array: ['u8', 32], }, 16, ], }, }, ], }, { name: 'rolloverAddressMerkleTreeAndQueue', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: [ 'Signer used to receive rollover accounts rentexemption reimbursement.', ], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, }, { name: 'newAddressMerkleTree', isMut: true, isSigner: false, }, { name: 'newQueue', isMut: true, isSigner: false, }, { name: 'oldAddressMerkleTree', isMut: true, isSigner: false, }, { name: 'oldQueue', isMut: true, isSigner: false, }, ], args: [], }, { name: 'initializeGroupAuthority', docs: [ 'initialize group (a group can be used to give multiple programs access', 'to the same Merkle trees by registering the programs to the group)', ], accounts: [ { name: 'authority', isMut: true, isSigner: true, }, { name: 'seed', isMut: false, isSigner: true, docs: [ 'Seed public key used to derive the group authority.', ], }, { name: 'groupAuthority', isMut: true, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'authority', type: 'publicKey', }, ], }, { name: 'updateGroupAuthority', accounts: [ { name: 'authority', isMut: false, isSigner: true, }, { name: 'groupAuthority', isMut: true, isSigner: false, }, ], args: [ { name: 'authority', type: 'publicKey', }, ], }, { name: 'registerProgramToGroup', accounts: [ { name: 'authority', isMut: true, isSigner: true, }, { name: 'programToBeRegistered', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: true, isSigner: false, }, { name: 'groupAuthorityPda', isMut: false, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [], }, { name: 'deregisterProgram', accounts: [ { name: 'authority', isMut: true, isSigner: true, }, { name: 'registeredProgramPda', isMut: true, isSigner: false, }, { name: 'groupAuthorityPda', isMut: false, isSigner: false, }, { name: 'closeRecipient', isMut: true, isSigner: false, }, ], args: [], }, { name: 'initializeStateMerkleTreeAndNullifierQueue', docs: [ 'Initializes a new Merkle tree from config bytes.', 'Index is an optional identifier and not checked by the program.', ], accounts: [ { name: 'authority', isMut: true, isSigner: true, }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'nullifierQueue', isMut: true, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, }, ], args: [ { name: 'index', type: 'u64', }, { name: 'programOwner', type: { option: 'publicKey', }, }, { name: 'forester', type: { option: 'publicKey', }, }, { name: 'stateMerkleTreeConfig', type: { defined: 'StateMerkleTreeConfig', }, }, { name: 'nullifierQueueConfig', type: { defined: 'NullifierQueueConfig', }, }, { name: 'additionalBytes', type: 'u64', }, ], }, { name: 'appendLeavesToMerkleTrees', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['Fee payer pays rollover fee.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Checked whether instruction is accessed by a registered program or owner = authority.', ], }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, docs: [ 'Some assumes that the Merkle trees are accessed by a registered program.', 'None assumes that the Merkle trees are accessed by its owner.', ], }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'leaves', type: { vec: { defined: '(u8,[u8;32])', }, }, }, ], }, { name: 'nullifyLeaves', accounts: [ { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, }, { name: 'logWrapper', isMut: false, isSigner: false, }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'nullifierQueue', isMut: true, isSigner: false, }, ], args: [ { name: 'changeLogIndices', type: { vec: 'u64', }, }, { name: 'leavesQueueIndices', type: { vec: 'u16', }, }, { name: 'leafIndices', type: { vec: 'u64', }, }, { name: 'proofs', type: { vec: { vec: { array: ['u8', 32], }, }, }, }, ], }, { name: 'insertIntoNullifierQueues', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['Fee payer pays rollover fee.'], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'nullifiers', type: { vec: { array: ['u8', 32], }, }, }, ], }, { name: 'rolloverStateMerkleTreeAndNullifierQueue', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: [ 'Signer used to receive rollover accounts rentexemption reimbursement.', ], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, isOptional: true, }, { name: 'newStateMerkleTree', isMut: true, isSigner: false, }, { name: 'newNullifierQueue', isMut: true, isSigner: false, }, { name: 'oldStateMerkleTree', isMut: true, isSigner: false, }, { name: 'oldNullifierQueue', isMut: true, isSigner: false, }, ], args: [], }, ], accounts: [ { name: 'registeredProgram', type: { kind: 'struct', fields: [ { name: 'registeredProgramId', type: 'publicKey', }, { name: 'groupAuthorityPda', type: 'publicKey', }, ], }, }, { name: 'accessMetadata', type: { kind: 'struct', fields: [ { name: 'owner', docs: ['Owner of the Merkle tree.'], type: 'publicKey', }, { name: 'programOwner', docs: [ 'Program owner of the Merkle tree. This will be used for program owned Merkle trees.', ], type: 'publicKey', }, { name: 'forester', docs: [ 'Optional privileged forester pubkey, can be set for custom Merkle trees', 'without a network fee. Merkle trees without network fees are not', 'forested by light foresters. The variable is not used in the account', 'compression program but the registry program. The registry program', 'implements access control to prevent contention during forester. The', 'forester pubkey specified in this struct can bypass contention checks.', ], type: 'publicKey', }, ], }, }, { name: 'addressMerkleTreeAccount', type: { kind: 'struct', fields: [ { name: 'metadata', type: { defined: 'MerkleTreeMetadata', }, }, ], }, }, { name: 'groupAuthority', type: { kind: 'struct', fields: [ { name: 'authority', type: 'publicKey', }, { name: 'seed', type: 'publicKey', }, ], }, }, { name: 'merkleTreeMetadata', type: { kind: 'struct', fields: [ { name: 'accessMetadata', type: { defined: 'AccessMetadata', }, }, { name: 'rolloverMetadata', type: { defined: 'RolloverMetadata', }, }, { name: 'associatedQueue', type: 'publicKey', }, { name: 'nextMerkleTree', type: 'publicKey', }, ], }, }, { name: 'stateMerkleTreeAccount', docs: [ 'Concurrent state Merkle tree used for public compressed transactions.', ], type: { kind: 'struct', fields: [ { name: 'metadata', type: { defined: 'MerkleTreeMetadata', }, }, ], }, }, { name: 'queueMetadata', type: { kind: 'struct', fields: [ { name: 'accessMetadata', type: { defined: 'AccessMetadata', }, }, { name: 'rolloverMetadata', type: { defined: 'RolloverMetadata', }, }, { name: 'associatedMerkleTree', type: 'publicKey', }, { name: 'nextQueue', type: 'publicKey', }, { name: 'queueType', type: 'u64', }, ], }, }, { name: 'queueAccount', type: { kind: 'struct', fields: [ { name: 'metadata', type: { defined: 'QueueMetadata', }, }, ], }, }, { name: 'rolloverMetadata', type: { kind: 'struct', fields: [ { name: 'index', docs: ['Unique index.'], type: 'u64', }, { name: 'rolloverFee', docs: [ 'This fee is used for rent for the next account.', 'It accumulates in the account so that once the corresponding Merkle tree account is full it can be rolled over', ], type: 'u64', }, { name: 'rolloverThreshold', docs: [ 'The threshold in percentage points when the account should be rolled over (95 corresponds to 95% filled).', ], type: 'u64', }, { name: 'networkFee', docs: ['Tip for maintaining the account.'], type: 'u64', }, { name: 'rolledoverSlot', docs: [ 'The slot when the account was rolled over, a rolled over account should not be written to.', ], type: 'u64', }, { name: 'closeThreshold', docs: [ 'If current slot is greater than rolledover_slot + close_threshold and', "the account is empty it can be closed. No 'close' functionality has been", 'implemented yet.', ], type: 'u64', }, { name: 'additionalBytes', docs: [ 'Placeholder for bytes of additional accounts which are tied to the', 'Merkle trees operation and need to be rolled over as well.', ], type: 'u64', }, ], }, }, ], types: [ { name: 'AddressMerkleTreeConfig', type: { kind: 'struct', fields: [ { name: 'height', type: 'u32', }, { name: 'changelogSize', type: 'u64', }, { name: 'rootsSize', type: 'u64', }, { name: 'canopyDepth', type: 'u64', }, { name: 'addressChangelogSize', type: 'u64', }, { name: 'networkFee', type: { option: 'u64', }, }, { name: 'rolloverThreshold', type: { option: 'u64', }, }, { name: 'closeThreshold', type: { option: 'u64', }, }, ], }, }, { name: 'StateMerkleTreeConfig', type: { kind: 'struct', fields: [ { name: 'height', type: 'u32', }, { name: 'changelogSize', type: 'u64', }, { name: 'rootsSize', type: 'u64', }, { name: 'canopyDepth', type: 'u64', }, { name: 'networkFee', type: { option: 'u64', }, }, { name: 'rolloverThreshold', type: { option: 'u64', }, }, { name: 'closeThreshold', type: { option: 'u64', }, }, ], }, }, { name: 'NullifierQueueConfig', type: { kind: 'struct', fields: [ { name: 'capacity', type: 'u16', }, { name: 'sequenceThreshold', type: 'u64', }, { name: 'networkFee', type: { option: 'u64', }, }, ], }, }, { name: 'QueueType', type: { kind: 'enum', variants: [ { name: 'NullifierQueue', }, { name: 'AddressQueue', }, ], }, }, { name: 'AddressQueueConfig', type: { kind: 'alias', value: { defined: 'NullifierQueueConfig', }, }, }, ], errors: [ { code: 6000, name: 'IntegerOverflow', msg: 'Integer overflow', }, { code: 6001, name: 'InvalidAuthority', msg: 'InvalidAuthority', }, { code: 6002, name: 'NumberOfLeavesMismatch', msg: 'Leaves <> remaining accounts mismatch. The number of remaining accounts must match the number of leaves.', }, { code: 6003, name: 'InvalidNoopPubkey', msg: 'Provided noop program public key is invalid', }, { code: 6004, name: 'NumberOfChangeLogIndicesMismatch', msg: 'Number of change log indices mismatch', }, { code: 6005, name: 'NumberOfIndicesMismatch', msg: 'Number of indices mismatch', }, { code: 6006, name: 'NumberOfProofsMismatch', msg: 'NumberOfProofsMismatch', }, { code: 6007, name: 'InvalidMerkleProof', msg: 'InvalidMerkleProof', }, { code: 6008, name: 'LeafNotFound', msg: 'Could not find the leaf in the queue', }, { code: 6009, name: 'MerkleTreeAndQueueNotAssociated', msg: 'MerkleTreeAndQueueNotAssociated', }, { code: 6010, name: 'MerkleTreeAlreadyRolledOver', msg: 'MerkleTreeAlreadyRolledOver', }, { code: 6011, name: 'NotReadyForRollover', msg: 'NotReadyForRollover', }, { code: 6012, name: 'RolloverNotConfigured', msg: 'RolloverNotConfigured', }, { code: 6013, name: 'NotAllLeavesProcessed', msg: 'NotAllLeavesProcessed', }, { code: 6014, name: 'InvalidQueueType', msg: 'InvalidQueueType', }, { code: 6015, name: 'InputElementsEmpty', msg: 'InputElementsEmpty', }, { code: 6016, name: 'NoLeavesForMerkleTree', msg: 'NoLeavesForMerkleTree', }, { code: 6017, name: 'InvalidAccountSize', msg: 'InvalidAccountSize', }, { code: 6018, name: 'InsufficientRolloverFee', msg: 'InsufficientRolloverFee', }, { code: 6019, name: 'UnsupportedHeight', msg: 'Unsupported Merkle tree height', }, { code: 6020, name: 'UnsupportedCanopyDepth', msg: 'Unsupported canopy depth', }, { code: 6021, name: 'InvalidSequenceThreshold', msg: 'Invalid sequence threshold', }, { code: 6022, name: 'UnsupportedCloseThreshold', msg: 'Unsupported close threshold', }, { code: 6023, name: 'InvalidAccountBalance', msg: 'InvalidAccountBalance', }, { code: 6024, name: 'UnsupportedAdditionalBytes', }, { code: 6025, name: 'InvalidGroup', }, { code: 6026, name: 'ProofLengthMismatch', }, ], };
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/idls/light_system_program.ts
export type LightSystemProgram = { version: '1.2.0'; name: 'light_system_program'; constants: [ { name: 'SOL_POOL_PDA_SEED'; type: 'bytes'; value: '[115, 111, 108, 95, 112, 111, 111, 108, 95, 112, 100, 97]'; }, ]; instructions: [ { name: 'initCpiContextAccount'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; }, { name: 'cpiContextAccount'; isMut: true; isSigner: false; }, { name: 'associatedMerkleTree'; isMut: false; isSigner: false; }, ]; args: []; }, { name: 'invoke'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: [ 'Fee payer needs to be mutable to pay rollover and protocol fees.', ]; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; docs: [ 'This pda is used to invoke the account compression program.', ]; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; docs: ['Merkle trees.']; }, { name: 'solPoolPda'; isMut: true; isSigner: false; isOptional: true; docs: [ 'Sol pool pda is used to store the native sol that has been compressed.', "It's only required when compressing or decompressing sol.", ]; }, { name: 'decompressionRecipient'; isMut: true; isSigner: false; isOptional: true; docs: [ 'Only needs to be provided for decompression as a recipient for the', 'decompressed sol.', 'Compressed sol originate from authority.', ]; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'invokeCpi'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: [ 'Fee payer needs to be mutable to pay rollover and protocol fees.', ]; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'invokingProgram'; isMut: false; isSigner: false; }, { name: 'solPoolPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'decompressionRecipient'; isMut: true; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'cpiContextAccount'; isMut: true; isSigner: false; isOptional: true; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'stubIdlBuild'; docs: [ 'This function is a stub to allow Anchor to include the input types in', 'the IDL. It should not be included in production builds nor be called in', 'practice.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: [ 'Fee payer needs to be mutable to pay rollover and protocol fees.', ]; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; docs: [ 'This pda is used to invoke the account compression program.', ]; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; docs: ['Merkle trees.']; }, { name: 'solPoolPda'; isMut: true; isSigner: false; isOptional: true; docs: [ 'Sol pool pda is used to store the native sol that has been compressed.', "It's only required when compressing or decompressing sol.", ]; }, { name: 'decompressionRecipient'; isMut: true; isSigner: false; isOptional: true; docs: [ 'Only needs to be provided for decompression as a recipient for the', 'decompressed sol.', 'Compressed sol originate from authority.', ]; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs1'; type: { defined: 'InstructionDataInvoke'; }; }, { name: 'inputs2'; type: { defined: 'InstructionDataInvokeCpi'; }; }, { name: 'inputs3'; type: { defined: 'PublicTransactionEvent'; }; }, ]; }, ]; accounts: [ { name: 'stateMerkleTreeAccount'; docs: [ 'Concurrent state Merkle tree used for public compressed transactions.', ]; type: { kind: 'struct'; fields: [ { name: 'metadata'; type: { defined: 'MerkleTreeMetadata'; }; }, ]; }; }, { name: 'cpiContextAccount'; docs: [ 'Collects instruction data without executing a compressed transaction.', 'Signer checks are performed on instruction data.', 'Collected instruction data is combined with the instruction data of the executing cpi,', 'and executed as a single transaction.', 'This enables to use input compressed accounts that are owned by multiple programs,', 'with one zero-knowledge proof.', ]; type: { kind: 'struct'; fields: [ { name: 'feePayer'; type: 'publicKey'; }, { name: 'associatedMerkleTree'; type: 'publicKey'; }, { name: 'context'; type: { vec: { defined: 'InstructionDataInvokeCpi'; }; }; }, ]; }; }, ]; types: [ { name: 'AccessMetadata'; type: { kind: 'struct'; fields: [ { name: 'owner'; docs: ['Owner of the Merkle tree.']; type: 'publicKey'; }, { name: 'programOwner'; docs: [ 'Program owner of the Merkle tree. This will be used for program owned Merkle trees.', ]; type: 'publicKey'; }, { name: 'forester'; docs: [ 'Optional privileged forester pubkey, can be set for custom Merkle trees', 'without a network fee. Merkle trees without network fees are not', 'forested by light foresters. The variable is not used in the account', 'compression program but the registry program. The registry program', 'implements access control to prevent contention during forester. The', 'forester pubkey specified in this struct can bypass contention checks.', ]; type: 'publicKey'; }, ]; }; }, { name: 'MerkleTreeMetadata'; type: { kind: 'struct'; fields: [ { name: 'accessMetadata'; type: { defined: 'AccessMetadata'; }; }, { name: 'rolloverMetadata'; type: { defined: 'RolloverMetadata'; }; }, { name: 'associatedQueue'; type: 'publicKey'; }, { name: 'nextMerkleTree'; type: 'publicKey'; }, ]; }; }, { name: 'RolloverMetadata'; type: { kind: 'struct'; fields: [ { name: 'index'; docs: ['Unique index.']; type: 'u64'; }, { name: 'rolloverFee'; docs: [ 'This fee is used for rent for the next account.', 'It accumulates in the account so that once the corresponding Merkle tree account is full it can be rolled over', ]; type: 'u64'; }, { name: 'rolloverThreshold'; docs: [ 'The threshold in percentage points when the account should be rolled over (95 corresponds to 95% filled).', ]; type: 'u64'; }, { name: 'networkFee'; docs: ['Tip for maintaining the account.']; type: 'u64'; }, { name: 'rolledoverSlot'; docs: [ 'The slot when the account was rolled over, a rolled over account should not be written to.', ]; type: 'u64'; }, { name: 'closeThreshold'; docs: [ 'If current slot is greater than rolledover_slot + close_threshold and', "the account is empty it can be closed. No 'close' functionality has been", 'implemented yet.', ]; type: 'u64'; }, { name: 'additionalBytes'; docs: [ 'Placeholder for bytes of additional accounts which are tied to the', 'Merkle trees operation and need to be rolled over as well.', ]; type: 'u64'; }, ]; }; }, { name: 'InstructionDataInvoke'; type: { kind: 'struct'; fields: [ { name: 'proof'; type: { option: { defined: 'CompressedProof'; }; }; }, { name: 'inputCompressedAccountsWithMerkleContext'; type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext'; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'newAddressParams'; type: { vec: { defined: 'NewAddressParamsPacked'; }; }; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, ]; }; }, { name: 'NewAddressParamsPacked'; type: { kind: 'struct'; fields: [ { name: 'seed'; type: { array: ['u8', 32]; }; }, { name: 'addressQueueAccountIndex'; type: 'u8'; }, { name: 'addressMerkleTreeAccountIndex'; type: 'u8'; }, { name: 'addressMerkleTreeRootIndex'; type: 'u16'; }, ]; }; }, { name: 'OutputCompressedAccountWithPackedContext'; type: { kind: 'struct'; fields: [ { name: 'compressedAccount'; type: { defined: 'CompressedAccount'; }; }, { name: 'merkleTreeIndex'; type: 'u8'; }, ]; }; }, { name: 'CompressedProof'; type: { kind: 'struct'; fields: [ { name: 'a'; type: { array: ['u8', 32]; }; }, { name: 'b'; type: { array: ['u8', 64]; }; }, { name: 'c'; type: { array: ['u8', 32]; }; }, ]; }; }, { name: 'InstructionDataInvokeCpi'; type: { kind: 'struct'; fields: [ { name: 'proof'; type: { option: { defined: 'CompressedProof'; }; }; }, { name: 'newAddressParams'; type: { vec: { defined: 'NewAddressParamsPacked'; }; }; }, { name: 'inputCompressedAccountsWithMerkleContext'; type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext'; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, { name: 'cpiContext'; type: { option: { defined: 'CompressedCpiContext'; }; }; }, ]; }; }, { name: 'CompressedCpiContext'; type: { kind: 'struct'; fields: [ { name: 'setContext'; docs: [ 'Is set by the program that is invoking the CPI to signal that is should', 'set the cpi context.', ]; type: 'bool'; }, { name: 'firstSetContext'; docs: [ 'Is set to wipe the cpi context since someone could have set it before', 'with unrelated data.', ]; type: 'bool'; }, { name: 'cpiContextAccountIndex'; docs: [ 'Index of cpi context account in remaining accounts.', ]; type: 'u8'; }, ]; }; }, { name: 'CompressedAccount'; type: { kind: 'struct'; fields: [ { name: 'owner'; type: 'publicKey'; }, { name: 'lamports'; type: 'u64'; }, { name: 'address'; type: { option: { array: ['u8', 32]; }; }; }, { name: 'data'; type: { option: { defined: 'CompressedAccountData'; }; }; }, ]; }; }, { name: 'CompressedAccountData'; type: { kind: 'struct'; fields: [ { name: 'discriminator'; type: { array: ['u8', 8]; }; }, { name: 'data'; type: 'bytes'; }, { name: 'dataHash'; type: { array: ['u8', 32]; }; }, ]; }; }, { name: 'PackedCompressedAccountWithMerkleContext'; type: { kind: 'struct'; fields: [ { name: 'compressedAccount'; type: { defined: 'CompressedAccount'; }; }, { name: 'merkleContext'; type: { defined: 'PackedMerkleContext'; }; }, { name: 'rootIndex'; docs: [ 'Index of root used in inclusion validity proof.', ]; type: 'u16'; }, { name: 'readOnly'; docs: [ 'Placeholder to mark accounts read-only unimplemented set to false.', ]; type: 'bool'; }, ]; }; }, { name: 'PackedMerkleContext'; type: { kind: 'struct'; fields: [ { name: 'merkleTreePubkeyIndex'; type: 'u8'; }, { name: 'nullifierQueuePubkeyIndex'; type: 'u8'; }, { name: 'leafIndex'; type: 'u32'; }, { name: 'queueIndex'; docs: [ 'Index of leaf in queue. Placeholder of batched Merkle tree updates', 'currently unimplemented.', ]; type: { option: { defined: 'QueueIndex'; }; }; }, ]; }; }, { name: 'QueueIndex'; type: { kind: 'struct'; fields: [ { name: 'queueId'; docs: ['Id of queue in queue account.']; type: 'u8'; }, { name: 'index'; docs: ['Index of compressed account hash in queue.']; type: 'u16'; }, ]; }; }, { name: 'MerkleTreeSequenceNumber'; type: { kind: 'struct'; fields: [ { name: 'pubkey'; type: 'publicKey'; }, { name: 'seq'; type: 'u64'; }, ]; }; }, { name: 'PublicTransactionEvent'; type: { kind: 'struct'; fields: [ { name: 'inputCompressedAccountHashes'; type: { vec: { array: ['u8', 32]; }; }; }, { name: 'outputCompressedAccountHashes'; type: { vec: { array: ['u8', 32]; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'outputLeafIndices'; type: { vec: 'u32'; }; }, { name: 'sequenceNumbers'; type: { vec: { defined: 'MerkleTreeSequenceNumber'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'pubkeyArray'; type: { vec: 'publicKey'; }; }, { name: 'message'; type: { option: 'bytes'; }; }, ]; }; }, ]; errors: [ { code: 6000; name: 'SumCheckFailed'; msg: 'Sum check failed'; }, { code: 6001; name: 'SignerCheckFailed'; msg: 'Signer check failed'; }, { code: 6002; name: 'CpiSignerCheckFailed'; msg: 'Cpi signer check failed'; }, { code: 6003; name: 'ComputeInputSumFailed'; msg: 'Computing input sum failed.'; }, { code: 6004; name: 'ComputeOutputSumFailed'; msg: 'Computing output sum failed.'; }, { code: 6005; name: 'ComputeRpcSumFailed'; msg: 'Computing rpc sum failed.'; }, { code: 6006; name: 'InvalidAddress'; msg: 'InvalidAddress'; }, { code: 6007; name: 'DeriveAddressError'; msg: 'DeriveAddressError'; }, { code: 6008; name: 'CompressedSolPdaUndefinedForCompressSol'; msg: 'CompressedSolPdaUndefinedForCompressSol'; }, { code: 6009; name: 'DeCompressLamportsUndefinedForCompressSol'; msg: 'DeCompressLamportsUndefinedForCompressSol'; }, { code: 6010; name: 'CompressedSolPdaUndefinedForDecompressSol'; msg: 'CompressedSolPdaUndefinedForDecompressSol'; }, { code: 6011; name: 'DeCompressLamportsUndefinedForDecompressSol'; msg: 'DeCompressLamportsUndefinedForDecompressSol'; }, { code: 6012; name: 'DecompressRecipientUndefinedForDecompressSol'; msg: 'DecompressRecipientUndefinedForDecompressSol'; }, { code: 6013; name: 'WriteAccessCheckFailed'; msg: 'WriteAccessCheckFailed'; }, { code: 6014; name: 'InvokingProgramNotProvided'; msg: 'InvokingProgramNotProvided'; }, { code: 6015; name: 'InvalidCapacity'; msg: 'InvalidCapacity'; }, { code: 6016; name: 'InvalidMerkleTreeOwner'; msg: 'InvalidMerkleTreeOwner'; }, { code: 6017; name: 'ProofIsNone'; msg: 'ProofIsNone'; }, { code: 6018; name: 'ProofIsSome'; msg: 'Proof is some but no input compressed accounts or new addresses provided.'; }, { code: 6019; name: 'EmptyInputs'; msg: 'EmptyInputs'; }, { code: 6020; name: 'CpiContextAccountUndefined'; msg: 'CpiContextAccountUndefined'; }, { code: 6021; name: 'CpiContextEmpty'; msg: 'CpiContextEmpty'; }, { code: 6022; name: 'CpiContextMissing'; msg: 'CpiContextMissing'; }, { code: 6023; name: 'DecompressionRecipientDefined'; msg: 'DecompressionRecipientDefined'; }, { code: 6024; name: 'SolPoolPdaDefined'; msg: 'SolPoolPdaDefined'; }, { code: 6025; name: 'AppendStateFailed'; msg: 'AppendStateFailed'; }, { code: 6026; name: 'InstructionNotCallable'; msg: 'The instruction is not callable'; }, { code: 6027; name: 'CpiContextFeePayerMismatch'; msg: 'CpiContextFeePayerMismatch'; }, { code: 6028; name: 'CpiContextAssociatedMerkleTreeMismatch'; msg: 'CpiContextAssociatedMerkleTreeMismatch'; }, { code: 6029; name: 'NoInputs'; msg: 'NoInputs'; }, { code: 6030; name: 'InputMerkleTreeIndicesNotInOrder'; msg: 'Input merkle tree indices are not in ascending order.'; }, { code: 6031; name: 'OutputMerkleTreeIndicesNotInOrder'; msg: 'Output merkle tree indices are not in ascending order.'; }, { code: 6032; name: 'OutputMerkleTreeNotUnique'; }, { code: 6033; name: 'DataFieldUndefined'; }, ]; }; export const IDL: LightSystemProgram = { version: '1.2.0', name: 'light_system_program', constants: [ { name: 'SOL_POOL_PDA_SEED', type: 'bytes', value: '[115, 111, 108, 95, 112, 111, 111, 108, 95, 112, 100, 97]', }, ], instructions: [ { name: 'initCpiContextAccount', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, }, { name: 'cpiContextAccount', isMut: true, isSigner: false, }, { name: 'associatedMerkleTree', isMut: false, isSigner: false, }, ], args: [], }, { name: 'invoke', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: [ 'Fee payer needs to be mutable to pay rollover and protocol fees.', ], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, docs: [ 'This pda is used to invoke the account compression program.', ], }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, docs: ['Merkle trees.'], }, { name: 'solPoolPda', isMut: true, isSigner: false, isOptional: true, docs: [ 'Sol pool pda is used to store the native sol that has been compressed.', "It's only required when compressing or decompressing sol.", ], }, { name: 'decompressionRecipient', isMut: true, isSigner: false, isOptional: true, docs: [ 'Only needs to be provided for decompression as a recipient for the', 'decompressed sol.', 'Compressed sol originate from authority.', ], }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'invokeCpi', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: [ 'Fee payer needs to be mutable to pay rollover and protocol fees.', ], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'invokingProgram', isMut: false, isSigner: false, }, { name: 'solPoolPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'decompressionRecipient', isMut: true, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'cpiContextAccount', isMut: true, isSigner: false, isOptional: true, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'stubIdlBuild', docs: [ 'This function is a stub to allow Anchor to include the input types in', 'the IDL. It should not be included in production builds nor be called in', 'practice.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: [ 'Fee payer needs to be mutable to pay rollover and protocol fees.', ], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, docs: [ 'This pda is used to invoke the account compression program.', ], }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, docs: ['Merkle trees.'], }, { name: 'solPoolPda', isMut: true, isSigner: false, isOptional: true, docs: [ 'Sol pool pda is used to store the native sol that has been compressed.', "It's only required when compressing or decompressing sol.", ], }, { name: 'decompressionRecipient', isMut: true, isSigner: false, isOptional: true, docs: [ 'Only needs to be provided for decompression as a recipient for the', 'decompressed sol.', 'Compressed sol originate from authority.', ], }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs1', type: { defined: 'InstructionDataInvoke', }, }, { name: 'inputs2', type: { defined: 'InstructionDataInvokeCpi', }, }, { name: 'inputs3', type: { defined: 'PublicTransactionEvent', }, }, ], }, ], accounts: [ { name: 'stateMerkleTreeAccount', docs: [ 'Concurrent state Merkle tree used for public compressed transactions.', ], type: { kind: 'struct', fields: [ { name: 'metadata', type: { defined: 'MerkleTreeMetadata', }, }, ], }, }, { name: 'cpiContextAccount', docs: [ 'Collects instruction data without executing a compressed transaction.', 'Signer checks are performed on instruction data.', 'Collected instruction data is combined with the instruction data of the executing cpi,', 'and executed as a single transaction.', 'This enables to use input compressed accounts that are owned by multiple programs,', 'with one zero-knowledge proof.', ], type: { kind: 'struct', fields: [ { name: 'feePayer', type: 'publicKey', }, { name: 'associatedMerkleTree', type: 'publicKey', }, { name: 'context', type: { vec: { defined: 'InstructionDataInvokeCpi', }, }, }, ], }, }, ], types: [ { name: 'AccessMetadata', type: { kind: 'struct', fields: [ { name: 'owner', docs: ['Owner of the Merkle tree.'], type: 'publicKey', }, { name: 'programOwner', docs: [ 'Program owner of the Merkle tree. This will be used for program owned Merkle trees.', ], type: 'publicKey', }, { name: 'forester', docs: [ 'Optional privileged forester pubkey, can be set for custom Merkle trees', 'without a network fee. Merkle trees without network fees are not', 'forested by light foresters. The variable is not used in the account', 'compression program but the registry program. The registry program', 'implements access control to prevent contention during forester. The', 'forester pubkey specified in this struct can bypass contention checks.', ], type: 'publicKey', }, ], }, }, { name: 'MerkleTreeMetadata', type: { kind: 'struct', fields: [ { name: 'accessMetadata', type: { defined: 'AccessMetadata', }, }, { name: 'rolloverMetadata', type: { defined: 'RolloverMetadata', }, }, { name: 'associatedQueue', type: 'publicKey', }, { name: 'nextMerkleTree', type: 'publicKey', }, ], }, }, { name: 'RolloverMetadata', type: { kind: 'struct', fields: [ { name: 'index', docs: ['Unique index.'], type: 'u64', }, { name: 'rolloverFee', docs: [ 'This fee is used for rent for the next account.', 'It accumulates in the account so that once the corresponding Merkle tree account is full it can be rolled over', ], type: 'u64', }, { name: 'rolloverThreshold', docs: [ 'The threshold in percentage points when the account should be rolled over (95 corresponds to 95% filled).', ], type: 'u64', }, { name: 'networkFee', docs: ['Tip for maintaining the account.'], type: 'u64', }, { name: 'rolledoverSlot', docs: [ 'The slot when the account was rolled over, a rolled over account should not be written to.', ], type: 'u64', }, { name: 'closeThreshold', docs: [ 'If current slot is greater than rolledover_slot + close_threshold and', "the account is empty it can be closed. No 'close' functionality has been", 'implemented yet.', ], type: 'u64', }, { name: 'additionalBytes', docs: [ 'Placeholder for bytes of additional accounts which are tied to the', 'Merkle trees operation and need to be rolled over as well.', ], type: 'u64', }, ], }, }, { name: 'InstructionDataInvoke', type: { kind: 'struct', fields: [ { name: 'proof', type: { option: { defined: 'CompressedProof', }, }, }, { name: 'inputCompressedAccountsWithMerkleContext', type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext', }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'newAddressParams', type: { vec: { defined: 'NewAddressParamsPacked', }, }, }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, ], }, }, { name: 'NewAddressParamsPacked', type: { kind: 'struct', fields: [ { name: 'seed', type: { array: ['u8', 32], }, }, { name: 'addressQueueAccountIndex', type: 'u8', }, { name: 'addressMerkleTreeAccountIndex', type: 'u8', }, { name: 'addressMerkleTreeRootIndex', type: 'u16', }, ], }, }, { name: 'OutputCompressedAccountWithPackedContext', type: { kind: 'struct', fields: [ { name: 'compressedAccount', type: { defined: 'CompressedAccount', }, }, { name: 'merkleTreeIndex', type: 'u8', }, ], }, }, { name: 'CompressedProof', type: { kind: 'struct', fields: [ { name: 'a', type: { array: ['u8', 32], }, }, { name: 'b', type: { array: ['u8', 64], }, }, { name: 'c', type: { array: ['u8', 32], }, }, ], }, }, { name: 'InstructionDataInvokeCpi', type: { kind: 'struct', fields: [ { name: 'proof', type: { option: { defined: 'CompressedProof', }, }, }, { name: 'newAddressParams', type: { vec: { defined: 'NewAddressParamsPacked', }, }, }, { name: 'inputCompressedAccountsWithMerkleContext', type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext', }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, { name: 'cpiContext', type: { option: { defined: 'CompressedCpiContext', }, }, }, ], }, }, { name: 'CompressedCpiContext', type: { kind: 'struct', fields: [ { name: 'setContext', docs: [ 'Is set by the program that is invoking the CPI to signal that is should', 'set the cpi context.', ], type: 'bool', }, { name: 'firstSetContext', docs: [ 'Is set to wipe the cpi context since someone could have set it before', 'with unrelated data.', ], type: 'bool', }, { name: 'cpiContextAccountIndex', docs: [ 'Index of cpi context account in remaining accounts.', ], type: 'u8', }, ], }, }, { name: 'CompressedAccount', type: { kind: 'struct', fields: [ { name: 'owner', type: 'publicKey', }, { name: 'lamports', type: 'u64', }, { name: 'address', type: { option: { array: ['u8', 32], }, }, }, { name: 'data', type: { option: { defined: 'CompressedAccountData', }, }, }, ], }, }, { name: 'CompressedAccountData', type: { kind: 'struct', fields: [ { name: 'discriminator', type: { array: ['u8', 8], }, }, { name: 'data', type: 'bytes', }, { name: 'dataHash', type: { array: ['u8', 32], }, }, ], }, }, { name: 'PackedCompressedAccountWithMerkleContext', type: { kind: 'struct', fields: [ { name: 'compressedAccount', type: { defined: 'CompressedAccount', }, }, { name: 'merkleContext', type: { defined: 'PackedMerkleContext', }, }, { name: 'rootIndex', docs: [ 'Index of root used in inclusion validity proof.', ], type: 'u16', }, { name: 'readOnly', docs: [ 'Placeholder to mark accounts read-only unimplemented set to false.', ], type: 'bool', }, ], }, }, { name: 'PackedMerkleContext', type: { kind: 'struct', fields: [ { name: 'merkleTreePubkeyIndex', type: 'u8', }, { name: 'nullifierQueuePubkeyIndex', type: 'u8', }, { name: 'leafIndex', type: 'u32', }, { name: 'queueIndex', docs: [ 'Index of leaf in queue. Placeholder of batched Merkle tree updates', 'currently unimplemented.', ], type: { option: { defined: 'QueueIndex', }, }, }, ], }, }, { name: 'QueueIndex', type: { kind: 'struct', fields: [ { name: 'queueId', docs: ['Id of queue in queue account.'], type: 'u8', }, { name: 'index', docs: ['Index of compressed account hash in queue.'], type: 'u16', }, ], }, }, { name: 'MerkleTreeSequenceNumber', type: { kind: 'struct', fields: [ { name: 'pubkey', type: 'publicKey', }, { name: 'seq', type: 'u64', }, ], }, }, { name: 'PublicTransactionEvent', type: { kind: 'struct', fields: [ { name: 'inputCompressedAccountHashes', type: { vec: { array: ['u8', 32], }, }, }, { name: 'outputCompressedAccountHashes', type: { vec: { array: ['u8', 32], }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'outputLeafIndices', type: { vec: 'u32', }, }, { name: 'sequenceNumbers', type: { vec: { defined: 'MerkleTreeSequenceNumber', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'pubkeyArray', type: { vec: 'publicKey', }, }, { name: 'message', type: { option: 'bytes', }, }, ], }, }, ], errors: [ { code: 6000, name: 'SumCheckFailed', msg: 'Sum check failed', }, { code: 6001, name: 'SignerCheckFailed', msg: 'Signer check failed', }, { code: 6002, name: 'CpiSignerCheckFailed', msg: 'Cpi signer check failed', }, { code: 6003, name: 'ComputeInputSumFailed', msg: 'Computing input sum failed.', }, { code: 6004, name: 'ComputeOutputSumFailed', msg: 'Computing output sum failed.', }, { code: 6005, name: 'ComputeRpcSumFailed', msg: 'Computing rpc sum failed.', }, { code: 6006, name: 'InvalidAddress', msg: 'InvalidAddress', }, { code: 6007, name: 'DeriveAddressError', msg: 'DeriveAddressError', }, { code: 6008, name: 'CompressedSolPdaUndefinedForCompressSol', msg: 'CompressedSolPdaUndefinedForCompressSol', }, { code: 6009, name: 'DeCompressLamportsUndefinedForCompressSol', msg: 'DeCompressLamportsUndefinedForCompressSol', }, { code: 6010, name: 'CompressedSolPdaUndefinedForDecompressSol', msg: 'CompressedSolPdaUndefinedForDecompressSol', }, { code: 6011, name: 'DeCompressLamportsUndefinedForDecompressSol', msg: 'DeCompressLamportsUndefinedForDecompressSol', }, { code: 6012, name: 'DecompressRecipientUndefinedForDecompressSol', msg: 'DecompressRecipientUndefinedForDecompressSol', }, { code: 6013, name: 'WriteAccessCheckFailed', msg: 'WriteAccessCheckFailed', }, { code: 6014, name: 'InvokingProgramNotProvided', msg: 'InvokingProgramNotProvided', }, { code: 6015, name: 'InvalidCapacity', msg: 'InvalidCapacity', }, { code: 6016, name: 'InvalidMerkleTreeOwner', msg: 'InvalidMerkleTreeOwner', }, { code: 6017, name: 'ProofIsNone', msg: 'ProofIsNone', }, { code: 6018, name: 'ProofIsSome', msg: 'Proof is some but no input compressed accounts or new addresses provided.', }, { code: 6019, name: 'EmptyInputs', msg: 'EmptyInputs', }, { code: 6020, name: 'CpiContextAccountUndefined', msg: 'CpiContextAccountUndefined', }, { code: 6021, name: 'CpiContextEmpty', msg: 'CpiContextEmpty', }, { code: 6022, name: 'CpiContextMissing', msg: 'CpiContextMissing', }, { code: 6023, name: 'DecompressionRecipientDefined', msg: 'DecompressionRecipientDefined', }, { code: 6024, name: 'SolPoolPdaDefined', msg: 'SolPoolPdaDefined', }, { code: 6025, name: 'AppendStateFailed', msg: 'AppendStateFailed', }, { code: 6026, name: 'InstructionNotCallable', msg: 'The instruction is not callable', }, { code: 6027, name: 'CpiContextFeePayerMismatch', msg: 'CpiContextFeePayerMismatch', }, { code: 6028, name: 'CpiContextAssociatedMerkleTreeMismatch', msg: 'CpiContextAssociatedMerkleTreeMismatch', }, { code: 6029, name: 'NoInputs', msg: 'NoInputs', }, { code: 6030, name: 'InputMerkleTreeIndicesNotInOrder', msg: 'Input merkle tree indices are not in ascending order.', }, { code: 6031, name: 'OutputMerkleTreeIndicesNotInOrder', msg: 'Output merkle tree indices are not in ascending order.', }, { code: 6032, name: 'OutputMerkleTreeNotUnique', }, { code: 6033, name: 'DataFieldUndefined', }, ], };
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/idls/index.ts
import { IDL as AccountCompressionIDL, AccountCompression, } from './account_compression'; import { IDL as LightRegistryIDL, LightRegistry } from './light_registry'; import { IDL as LightSystemIDL, LightSystemProgram as LightSystem, } from './light_system_program'; import { IDL as LightCompressedTokenIDL, LightCompressedToken, } from './light_compressed_token'; export { AccountCompressionIDL, AccountCompression, LightRegistryIDL, LightRegistry, LightSystemIDL, LightSystem, LightCompressedTokenIDL, LightCompressedToken, };
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/idls/light_registry.ts
export type LightRegistry = { version: '1.2.0'; name: 'light_registry'; constants: [ { name: 'FORESTER_SEED'; type: 'bytes'; value: '[102, 111, 114, 101, 115, 116, 101, 114]'; }, { name: 'FORESTER_EPOCH_SEED'; type: 'bytes'; value: '[102, 111, 114, 101, 115, 116, 101, 114, 95, 101, 112, 111, 99, 104]'; }, { name: 'PROTOCOL_CONFIG_PDA_SEED'; type: 'bytes'; value: '[97, 117, 116, 104, 111, 114, 105, 116, 121]'; }, ]; instructions: [ { name: 'initializeProtocolConfig'; docs: [ 'Initializes the protocol config pda. Can only be called once by the', 'program account keypair.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'protocolConfigPda'; isMut: true; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'bump'; type: 'u8'; }, { name: 'protocolConfig'; type: { defined: 'ProtocolConfig'; }; }, ]; }, { name: 'updateProtocolConfig'; accounts: [ { name: 'feePayer'; isMut: false; isSigner: true; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'protocolConfigPda'; isMut: true; isSigner: false; }, { name: 'newAuthority'; isMut: false; isSigner: true; isOptional: true; }, ]; args: [ { name: 'protocolConfig'; type: { option: { defined: 'ProtocolConfig'; }; }; }, ]; }, { name: 'registerSystemProgram'; accounts: [ { name: 'authority'; isMut: true; isSigner: true; }, { name: 'protocolConfigPda'; isMut: true; isSigner: false; }, { name: 'cpiAuthority'; isMut: true; isSigner: false; }, { name: 'groupPda'; isMut: true; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: true; isSigner: false; }, { name: 'programToBeRegistered'; isMut: false; isSigner: true; docs: [ '- is signer so that only the program deployer can register a program.', ]; }, ]; args: [ { name: 'bump'; type: 'u8'; }, ]; }, { name: 'deregisterSystemProgram'; accounts: [ { name: 'authority'; isMut: true; isSigner: true; }, { name: 'protocolConfigPda'; isMut: true; isSigner: false; }, { name: 'cpiAuthority'; isMut: true; isSigner: false; }, { name: 'groupPda'; isMut: true; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: true; isSigner: false; }, ]; args: [ { name: 'bump'; type: 'u8'; }, ]; }, { name: 'registerForester'; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'protocolConfigPda'; isMut: false; isSigner: false; }, { name: 'foresterPda'; isMut: true; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'bump'; type: 'u8'; }, { name: 'authority'; type: 'publicKey'; }, { name: 'config'; type: { defined: 'ForesterConfig'; }; }, { name: 'weight'; type: { option: 'u64'; }; }, ]; }, { name: 'updateForesterPda'; accounts: [ { name: 'authority'; isMut: false; isSigner: true; }, { name: 'foresterPda'; isMut: true; isSigner: false; }, { name: 'newAuthority'; isMut: false; isSigner: true; isOptional: true; }, ]; args: [ { name: 'config'; type: { option: { defined: 'ForesterConfig'; }; }; }, ]; }, { name: 'updateForesterPdaWeight'; accounts: [ { name: 'authority'; isMut: false; isSigner: true; }, { name: 'protocolConfigPda'; isMut: false; isSigner: false; }, { name: 'foresterPda'; isMut: true; isSigner: false; }, ]; args: [ { name: 'newWeight'; type: 'u64'; }, ]; }, { name: 'registerForesterEpoch'; docs: [ 'Registers the forester for the epoch.', '1. Only the forester can register herself for the epoch.', '2. Protocol config is copied.', '3. Epoch account is created if needed.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'foresterPda'; isMut: false; isSigner: false; }, { name: 'foresterEpochPda'; isMut: true; isSigner: false; docs: [ 'Instruction checks that current_epoch is the the current epoch and that', 'the epoch is in registration phase.', ]; }, { name: 'protocolConfig'; isMut: false; isSigner: false; }, { name: 'epochPda'; isMut: true; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'epoch'; type: 'u64'; }, ]; }, { name: 'finalizeRegistration'; docs: [ 'This transaction can be included as additional instruction in the first', 'work instructions during the active phase.', 'Registration Period must be over.', ]; accounts: [ { name: 'authority'; isMut: false; isSigner: true; }, { name: 'foresterEpochPda'; isMut: true; isSigner: false; }, { name: 'epochPda'; isMut: false; isSigner: false; }, ]; args: []; }, { name: 'reportWork'; accounts: [ { name: 'authority'; isMut: false; isSigner: true; }, { name: 'foresterEpochPda'; isMut: true; isSigner: false; }, { name: 'epochPda'; isMut: true; isSigner: false; }, ]; args: []; }, { name: 'initializeAddressMerkleTree'; accounts: [ { name: 'authority'; isMut: true; isSigner: true; docs: [ 'Anyone can create new trees just the fees cannot be set arbitrarily.', ]; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'queue'; isMut: true; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'cpiAuthority'; isMut: true; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'protocolConfigPda'; isMut: false; isSigner: false; }, { name: 'cpiContextAccount'; isMut: false; isSigner: false; isOptional: true; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; isOptional: true; }, ]; args: [ { name: 'bump'; type: 'u8'; }, { name: 'programOwner'; type: { option: 'publicKey'; }; }, { name: 'forester'; type: { option: 'publicKey'; }; }, { name: 'merkleTreeConfig'; type: { defined: 'AddressMerkleTreeConfig'; }; }, { name: 'queueConfig'; type: { defined: 'AddressQueueConfig'; }; }, ]; }, { name: 'initializeStateMerkleTree'; accounts: [ { name: 'authority'; isMut: true; isSigner: true; docs: [ 'Anyone can create new trees just the fees cannot be set arbitrarily.', ]; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'queue'; isMut: true; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'cpiAuthority'; isMut: true; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'protocolConfigPda'; isMut: false; isSigner: false; }, { name: 'cpiContextAccount'; isMut: false; isSigner: false; isOptional: true; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; isOptional: true; }, ]; args: [ { name: 'bump'; type: 'u8'; }, { name: 'programOwner'; type: { option: 'publicKey'; }; }, { name: 'forester'; type: { option: 'publicKey'; }; }, { name: 'merkleTreeConfig'; type: { defined: 'StateMerkleTreeConfig'; }; }, { name: 'queueConfig'; type: { defined: 'NullifierQueueConfig'; }; }, ]; }, { name: 'nullify'; accounts: [ { name: 'registeredForesterPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'cpiAuthority'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'logWrapper'; isMut: false; isSigner: false; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'nullifierQueue'; isMut: true; isSigner: false; }, ]; args: [ { name: 'bump'; type: 'u8'; }, { name: 'changeLogIndices'; type: { vec: 'u64'; }; }, { name: 'leavesQueueIndices'; type: { vec: 'u16'; }; }, { name: 'indices'; type: { vec: 'u64'; }; }, { name: 'proofs'; type: { vec: { vec: { array: ['u8', 32]; }; }; }; }, ]; }, { name: 'updateAddressMerkleTree'; accounts: [ { name: 'registeredForesterPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'cpiAuthority'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'queue'; isMut: true; isSigner: false; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'logWrapper'; isMut: false; isSigner: false; }, ]; args: [ { name: 'bump'; type: 'u8'; }, { name: 'changelogIndex'; type: 'u16'; }, { name: 'indexedChangelogIndex'; type: 'u16'; }, { name: 'value'; type: 'u16'; }, { name: 'lowAddressIndex'; type: 'u64'; }, { name: 'lowAddressValue'; type: { array: ['u8', 32]; }; }, { name: 'lowAddressNextIndex'; type: 'u64'; }, { name: 'lowAddressNextValue'; type: { array: ['u8', 32]; }; }, { name: 'lowAddressProof'; type: { array: [ { array: ['u8', 32]; }, 16, ]; }; }, ]; }, { name: 'rolloverAddressMerkleTreeAndQueue'; accounts: [ { name: 'registeredForesterPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'authority'; isMut: true; isSigner: true; }, { name: 'cpiAuthority'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'newMerkleTree'; isMut: true; isSigner: false; }, { name: 'newQueue'; isMut: true; isSigner: false; }, { name: 'oldMerkleTree'; isMut: true; isSigner: false; }, { name: 'oldQueue'; isMut: true; isSigner: false; }, ]; args: [ { name: 'bump'; type: 'u8'; }, ]; }, { name: 'rolloverStateMerkleTreeAndQueue'; accounts: [ { name: 'registeredForesterPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'authority'; isMut: true; isSigner: true; }, { name: 'cpiAuthority'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'newMerkleTree'; isMut: true; isSigner: false; }, { name: 'newQueue'; isMut: true; isSigner: false; }, { name: 'oldMerkleTree'; isMut: true; isSigner: false; }, { name: 'oldQueue'; isMut: true; isSigner: false; }, { name: 'cpiContextAccount'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'protocolConfigPda'; isMut: false; isSigner: false; }, ]; args: [ { name: 'bump'; type: 'u8'; }, ]; }, ]; accounts: [ { name: 'epochPda'; docs: ['Is used for tallying and rewards calculation']; type: { kind: 'struct'; fields: [ { name: 'epoch'; type: 'u64'; }, { name: 'protocolConfig'; type: { defined: 'ProtocolConfig'; }; }, { name: 'totalWork'; type: 'u64'; }, { name: 'registeredWeight'; type: 'u64'; }, ]; }; }, { name: 'foresterEpochPda'; type: { kind: 'struct'; fields: [ { name: 'authority'; type: 'publicKey'; }, { name: 'config'; type: { defined: 'ForesterConfig'; }; }, { name: 'epoch'; type: 'u64'; }, { name: 'weight'; type: 'u64'; }, { name: 'workCounter'; type: 'u64'; }, { name: 'hasReportedWork'; docs: [ 'Work can be reported in an extra round to earn extra performance based', 'rewards.', ]; type: 'bool'; }, { name: 'foresterIndex'; docs: [ 'Start index of the range that determines when the forester is eligible to perform work.', 'End index is forester_start_index + weight', ]; type: 'u64'; }, { name: 'epochActivePhaseStartSlot'; type: 'u64'; }, { name: 'totalEpochWeight'; docs: [ 'Total epoch weight is registered weight of the epoch account after', 'registration is concluded and active epoch period starts.', ]; type: { option: 'u64'; }; }, { name: 'protocolConfig'; type: { defined: 'ProtocolConfig'; }; }, { name: 'finalizeCounter'; docs: [ 'Incremented every time finalize registration is called.', ]; type: 'u64'; }, ]; }; }, { name: 'protocolConfigPda'; type: { kind: 'struct'; fields: [ { name: 'authority'; type: 'publicKey'; }, { name: 'bump'; type: 'u8'; }, { name: 'config'; type: { defined: 'ProtocolConfig'; }; }, ]; }; }, { name: 'foresterPda'; type: { kind: 'struct'; fields: [ { name: 'authority'; type: 'publicKey'; }, { name: 'config'; type: { defined: 'ForesterConfig'; }; }, { name: 'activeWeight'; type: 'u64'; }, { name: 'pendingWeight'; docs: [ 'Pending weight which will get active once the next epoch starts.', ]; type: 'u64'; }, { name: 'currentEpoch'; type: 'u64'; }, { name: 'lastCompressedForesterEpochPdaHash'; docs: [ 'Link to previous compressed forester epoch account hash.', ]; type: { array: ['u8', 32]; }; }, { name: 'lastRegisteredEpoch'; type: 'u64'; }, ]; }; }, ]; types: [ { name: 'ProtocolConfig'; docs: [ 'Epoch Phases:', '1. Registration', '2. Active', '3. Report Work', '4. Post (Epoch has ended, and rewards can be claimed.)', '- There is always an active phase in progress, registration and report work', 'phases run in parallel to a currently active phase.', ]; type: { kind: 'struct'; fields: [ { name: 'genesisSlot'; docs: [ 'Solana slot when the protocol starts operating.', ]; type: 'u64'; }, { name: 'minWeight'; docs: [ 'Minimum weight required for a forester to register to an epoch.', ]; type: 'u64'; }, { name: 'slotLength'; docs: ['Light protocol slot length.']; type: 'u64'; }, { name: 'registrationPhaseLength'; docs: ['Foresters can register for this phase.']; type: 'u64'; }, { name: 'activePhaseLength'; docs: ['Foresters can perform work in this phase.']; type: 'u64'; }, { name: 'reportWorkPhaseLength'; docs: [ 'Foresters can report work to receive performance based rewards in this', 'phase.', ]; type: 'u64'; }, { name: 'networkFee'; type: 'u64'; }, { name: 'cpiContextSize'; type: 'u64'; }, { name: 'finalizeCounterLimit'; type: 'u64'; }, { name: 'placeHolder'; docs: ['Placeholder for future protocol updates.']; type: 'publicKey'; }, { name: 'placeHolderA'; type: 'u64'; }, { name: 'placeHolderB'; type: 'u64'; }, { name: 'placeHolderC'; type: 'u64'; }, { name: 'placeHolderD'; type: 'u64'; }, { name: 'placeHolderE'; type: 'u64'; }, { name: 'placeHolderF'; type: 'u64'; }, ]; }; }, { name: 'ForesterConfig'; type: { kind: 'struct'; fields: [ { name: 'fee'; docs: ['Fee in percentage points.']; type: 'u64'; }, ]; }; }, { name: 'EpochState'; type: { kind: 'enum'; variants: [ { name: 'Registration'; }, { name: 'Active'; }, { name: 'ReportWork'; }, { name: 'Post'; }, { name: 'Pre'; }, ]; }; }, ]; errors: [ { code: 6000; name: 'InvalidForester'; msg: 'InvalidForester'; }, { code: 6001; name: 'NotInReportWorkPhase'; }, { code: 6002; name: 'StakeAccountAlreadySynced'; }, { code: 6003; name: 'EpochEnded'; }, { code: 6004; name: 'ForesterNotEligible'; }, { code: 6005; name: 'NotInRegistrationPeriod'; }, { code: 6006; name: 'WeightInsuffient'; }, { code: 6007; name: 'ForesterAlreadyRegistered'; }, { code: 6008; name: 'InvalidEpochAccount'; }, { code: 6009; name: 'InvalidEpoch'; }, { code: 6010; name: 'EpochStillInProgress'; }, { code: 6011; name: 'NotInActivePhase'; }, { code: 6012; name: 'ForesterAlreadyReportedWork'; }, { code: 6013; name: 'InvalidNetworkFee'; }, { code: 6014; name: 'FinalizeCounterExceeded'; }, { code: 6015; name: 'CpiContextAccountMissing'; }, { code: 6016; name: 'ArithmeticUnderflow'; }, { code: 6017; name: 'RegistrationNotFinalized'; }, { code: 6018; name: 'CpiContextAccountInvalidDataLen'; }, { code: 6019; name: 'InvalidConfigUpdate'; }, { code: 6020; name: 'InvalidSigner'; }, { code: 6021; name: 'GetLatestRegisterEpochFailed'; }, { code: 6022; name: 'GetCurrentActiveEpochFailed'; }, { code: 6023; name: 'ForesterUndefined'; }, { code: 6024; name: 'ForesterDefined'; }, ]; }; export const IDL: LightRegistry = { version: '1.2.0', name: 'light_registry', constants: [ { name: 'FORESTER_SEED', type: 'bytes', value: '[102, 111, 114, 101, 115, 116, 101, 114]', }, { name: 'FORESTER_EPOCH_SEED', type: 'bytes', value: '[102, 111, 114, 101, 115, 116, 101, 114, 95, 101, 112, 111, 99, 104]', }, { name: 'PROTOCOL_CONFIG_PDA_SEED', type: 'bytes', value: '[97, 117, 116, 104, 111, 114, 105, 116, 121]', }, ], instructions: [ { name: 'initializeProtocolConfig', docs: [ 'Initializes the protocol config pda. Can only be called once by the', 'program account keypair.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'protocolConfigPda', isMut: true, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'bump', type: 'u8', }, { name: 'protocolConfig', type: { defined: 'ProtocolConfig', }, }, ], }, { name: 'updateProtocolConfig', accounts: [ { name: 'feePayer', isMut: false, isSigner: true, }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'protocolConfigPda', isMut: true, isSigner: false, }, { name: 'newAuthority', isMut: false, isSigner: true, isOptional: true, }, ], args: [ { name: 'protocolConfig', type: { option: { defined: 'ProtocolConfig', }, }, }, ], }, { name: 'registerSystemProgram', accounts: [ { name: 'authority', isMut: true, isSigner: true, }, { name: 'protocolConfigPda', isMut: true, isSigner: false, }, { name: 'cpiAuthority', isMut: true, isSigner: false, }, { name: 'groupPda', isMut: true, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: true, isSigner: false, }, { name: 'programToBeRegistered', isMut: false, isSigner: true, docs: [ '- is signer so that only the program deployer can register a program.', ], }, ], args: [ { name: 'bump', type: 'u8', }, ], }, { name: 'deregisterSystemProgram', accounts: [ { name: 'authority', isMut: true, isSigner: true, }, { name: 'protocolConfigPda', isMut: true, isSigner: false, }, { name: 'cpiAuthority', isMut: true, isSigner: false, }, { name: 'groupPda', isMut: true, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: true, isSigner: false, }, ], args: [ { name: 'bump', type: 'u8', }, ], }, { name: 'registerForester', accounts: [ { name: 'feePayer', isMut: true, isSigner: true, }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'protocolConfigPda', isMut: false, isSigner: false, }, { name: 'foresterPda', isMut: true, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'bump', type: 'u8', }, { name: 'authority', type: 'publicKey', }, { name: 'config', type: { defined: 'ForesterConfig', }, }, { name: 'weight', type: { option: 'u64', }, }, ], }, { name: 'updateForesterPda', accounts: [ { name: 'authority', isMut: false, isSigner: true, }, { name: 'foresterPda', isMut: true, isSigner: false, }, { name: 'newAuthority', isMut: false, isSigner: true, isOptional: true, }, ], args: [ { name: 'config', type: { option: { defined: 'ForesterConfig', }, }, }, ], }, { name: 'updateForesterPdaWeight', accounts: [ { name: 'authority', isMut: false, isSigner: true, }, { name: 'protocolConfigPda', isMut: false, isSigner: false, }, { name: 'foresterPda', isMut: true, isSigner: false, }, ], args: [ { name: 'newWeight', type: 'u64', }, ], }, { name: 'registerForesterEpoch', docs: [ 'Registers the forester for the epoch.', '1. Only the forester can register herself for the epoch.', '2. Protocol config is copied.', '3. Epoch account is created if needed.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'foresterPda', isMut: false, isSigner: false, }, { name: 'foresterEpochPda', isMut: true, isSigner: false, docs: [ 'Instruction checks that current_epoch is the the current epoch and that', 'the epoch is in registration phase.', ], }, { name: 'protocolConfig', isMut: false, isSigner: false, }, { name: 'epochPda', isMut: true, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'epoch', type: 'u64', }, ], }, { name: 'finalizeRegistration', docs: [ 'This transaction can be included as additional instruction in the first', 'work instructions during the active phase.', 'Registration Period must be over.', ], accounts: [ { name: 'authority', isMut: false, isSigner: true, }, { name: 'foresterEpochPda', isMut: true, isSigner: false, }, { name: 'epochPda', isMut: false, isSigner: false, }, ], args: [], }, { name: 'reportWork', accounts: [ { name: 'authority', isMut: false, isSigner: true, }, { name: 'foresterEpochPda', isMut: true, isSigner: false, }, { name: 'epochPda', isMut: true, isSigner: false, }, ], args: [], }, { name: 'initializeAddressMerkleTree', accounts: [ { name: 'authority', isMut: true, isSigner: true, docs: [ 'Anyone can create new trees just the fees cannot be set arbitrarily.', ], }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'queue', isMut: true, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'cpiAuthority', isMut: true, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'protocolConfigPda', isMut: false, isSigner: false, }, { name: 'cpiContextAccount', isMut: false, isSigner: false, isOptional: true, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, isOptional: true, }, ], args: [ { name: 'bump', type: 'u8', }, { name: 'programOwner', type: { option: 'publicKey', }, }, { name: 'forester', type: { option: 'publicKey', }, }, { name: 'merkleTreeConfig', type: { defined: 'AddressMerkleTreeConfig', }, }, { name: 'queueConfig', type: { defined: 'AddressQueueConfig', }, }, ], }, { name: 'initializeStateMerkleTree', accounts: [ { name: 'authority', isMut: true, isSigner: true, docs: [ 'Anyone can create new trees just the fees cannot be set arbitrarily.', ], }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'queue', isMut: true, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'cpiAuthority', isMut: true, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'protocolConfigPda', isMut: false, isSigner: false, }, { name: 'cpiContextAccount', isMut: false, isSigner: false, isOptional: true, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, isOptional: true, }, ], args: [ { name: 'bump', type: 'u8', }, { name: 'programOwner', type: { option: 'publicKey', }, }, { name: 'forester', type: { option: 'publicKey', }, }, { name: 'merkleTreeConfig', type: { defined: 'StateMerkleTreeConfig', }, }, { name: 'queueConfig', type: { defined: 'NullifierQueueConfig', }, }, ], }, { name: 'nullify', accounts: [ { name: 'registeredForesterPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'cpiAuthority', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'logWrapper', isMut: false, isSigner: false, }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'nullifierQueue', isMut: true, isSigner: false, }, ], args: [ { name: 'bump', type: 'u8', }, { name: 'changeLogIndices', type: { vec: 'u64', }, }, { name: 'leavesQueueIndices', type: { vec: 'u16', }, }, { name: 'indices', type: { vec: 'u64', }, }, { name: 'proofs', type: { vec: { vec: { array: ['u8', 32], }, }, }, }, ], }, { name: 'updateAddressMerkleTree', accounts: [ { name: 'registeredForesterPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'cpiAuthority', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'queue', isMut: true, isSigner: false, }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'logWrapper', isMut: false, isSigner: false, }, ], args: [ { name: 'bump', type: 'u8', }, { name: 'changelogIndex', type: 'u16', }, { name: 'indexedChangelogIndex', type: 'u16', }, { name: 'value', type: 'u16', }, { name: 'lowAddressIndex', type: 'u64', }, { name: 'lowAddressValue', type: { array: ['u8', 32], }, }, { name: 'lowAddressNextIndex', type: 'u64', }, { name: 'lowAddressNextValue', type: { array: ['u8', 32], }, }, { name: 'lowAddressProof', type: { array: [ { array: ['u8', 32], }, 16, ], }, }, ], }, { name: 'rolloverAddressMerkleTreeAndQueue', accounts: [ { name: 'registeredForesterPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'authority', isMut: true, isSigner: true, }, { name: 'cpiAuthority', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'newMerkleTree', isMut: true, isSigner: false, }, { name: 'newQueue', isMut: true, isSigner: false, }, { name: 'oldMerkleTree', isMut: true, isSigner: false, }, { name: 'oldQueue', isMut: true, isSigner: false, }, ], args: [ { name: 'bump', type: 'u8', }, ], }, { name: 'rolloverStateMerkleTreeAndQueue', accounts: [ { name: 'registeredForesterPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'authority', isMut: true, isSigner: true, }, { name: 'cpiAuthority', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'newMerkleTree', isMut: true, isSigner: false, }, { name: 'newQueue', isMut: true, isSigner: false, }, { name: 'oldMerkleTree', isMut: true, isSigner: false, }, { name: 'oldQueue', isMut: true, isSigner: false, }, { name: 'cpiContextAccount', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'protocolConfigPda', isMut: false, isSigner: false, }, ], args: [ { name: 'bump', type: 'u8', }, ], }, ], accounts: [ { name: 'epochPda', docs: ['Is used for tallying and rewards calculation'], type: { kind: 'struct', fields: [ { name: 'epoch', type: 'u64', }, { name: 'protocolConfig', type: { defined: 'ProtocolConfig', }, }, { name: 'totalWork', type: 'u64', }, { name: 'registeredWeight', type: 'u64', }, ], }, }, { name: 'foresterEpochPda', type: { kind: 'struct', fields: [ { name: 'authority', type: 'publicKey', }, { name: 'config', type: { defined: 'ForesterConfig', }, }, { name: 'epoch', type: 'u64', }, { name: 'weight', type: 'u64', }, { name: 'workCounter', type: 'u64', }, { name: 'hasReportedWork', docs: [ 'Work can be reported in an extra round to earn extra performance based', 'rewards.', ], type: 'bool', }, { name: 'foresterIndex', docs: [ 'Start index of the range that determines when the forester is eligible to perform work.', 'End index is forester_start_index + weight', ], type: 'u64', }, { name: 'epochActivePhaseStartSlot', type: 'u64', }, { name: 'totalEpochWeight', docs: [ 'Total epoch weight is registered weight of the epoch account after', 'registration is concluded and active epoch period starts.', ], type: { option: 'u64', }, }, { name: 'protocolConfig', type: { defined: 'ProtocolConfig', }, }, { name: 'finalizeCounter', docs: [ 'Incremented every time finalize registration is called.', ], type: 'u64', }, ], }, }, { name: 'protocolConfigPda', type: { kind: 'struct', fields: [ { name: 'authority', type: 'publicKey', }, { name: 'bump', type: 'u8', }, { name: 'config', type: { defined: 'ProtocolConfig', }, }, ], }, }, { name: 'foresterPda', type: { kind: 'struct', fields: [ { name: 'authority', type: 'publicKey', }, { name: 'config', type: { defined: 'ForesterConfig', }, }, { name: 'activeWeight', type: 'u64', }, { name: 'pendingWeight', docs: [ 'Pending weight which will get active once the next epoch starts.', ], type: 'u64', }, { name: 'currentEpoch', type: 'u64', }, { name: 'lastCompressedForesterEpochPdaHash', docs: [ 'Link to previous compressed forester epoch account hash.', ], type: { array: ['u8', 32], }, }, { name: 'lastRegisteredEpoch', type: 'u64', }, ], }, }, ], types: [ { name: 'ProtocolConfig', docs: [ 'Epoch Phases:', '1. Registration', '2. Active', '3. Report Work', '4. Post (Epoch has ended, and rewards can be claimed.)', '- There is always an active phase in progress, registration and report work', 'phases run in parallel to a currently active phase.', ], type: { kind: 'struct', fields: [ { name: 'genesisSlot', docs: [ 'Solana slot when the protocol starts operating.', ], type: 'u64', }, { name: 'minWeight', docs: [ 'Minimum weight required for a forester to register to an epoch.', ], type: 'u64', }, { name: 'slotLength', docs: ['Light protocol slot length.'], type: 'u64', }, { name: 'registrationPhaseLength', docs: ['Foresters can register for this phase.'], type: 'u64', }, { name: 'activePhaseLength', docs: ['Foresters can perform work in this phase.'], type: 'u64', }, { name: 'reportWorkPhaseLength', docs: [ 'Foresters can report work to receive performance based rewards in this', 'phase.', ], type: 'u64', }, { name: 'networkFee', type: 'u64', }, { name: 'cpiContextSize', type: 'u64', }, { name: 'finalizeCounterLimit', type: 'u64', }, { name: 'placeHolder', docs: ['Placeholder for future protocol updates.'], type: 'publicKey', }, { name: 'placeHolderA', type: 'u64', }, { name: 'placeHolderB', type: 'u64', }, { name: 'placeHolderC', type: 'u64', }, { name: 'placeHolderD', type: 'u64', }, { name: 'placeHolderE', type: 'u64', }, { name: 'placeHolderF', type: 'u64', }, ], }, }, { name: 'ForesterConfig', type: { kind: 'struct', fields: [ { name: 'fee', docs: ['Fee in percentage points.'], type: 'u64', }, ], }, }, { name: 'EpochState', type: { kind: 'enum', variants: [ { name: 'Registration', }, { name: 'Active', }, { name: 'ReportWork', }, { name: 'Post', }, { name: 'Pre', }, ], }, }, ], errors: [ { code: 6000, name: 'InvalidForester', msg: 'InvalidForester', }, { code: 6001, name: 'NotInReportWorkPhase', }, { code: 6002, name: 'StakeAccountAlreadySynced', }, { code: 6003, name: 'EpochEnded', }, { code: 6004, name: 'ForesterNotEligible', }, { code: 6005, name: 'NotInRegistrationPeriod', }, { code: 6006, name: 'WeightInsuffient', }, { code: 6007, name: 'ForesterAlreadyRegistered', }, { code: 6008, name: 'InvalidEpochAccount', }, { code: 6009, name: 'InvalidEpoch', }, { code: 6010, name: 'EpochStillInProgress', }, { code: 6011, name: 'NotInActivePhase', }, { code: 6012, name: 'ForesterAlreadyReportedWork', }, { code: 6013, name: 'InvalidNetworkFee', }, { code: 6014, name: 'FinalizeCounterExceeded', }, { code: 6015, name: 'CpiContextAccountMissing', }, { code: 6016, name: 'ArithmeticUnderflow', }, { code: 6017, name: 'RegistrationNotFinalized', }, { code: 6018, name: 'CpiContextAccountInvalidDataLen', }, { code: 6019, name: 'InvalidConfigUpdate', }, { code: 6020, name: 'InvalidSigner', }, { code: 6021, name: 'GetLatestRegisterEpochFailed', }, { code: 6022, name: 'GetCurrentActiveEpochFailed', }, { code: 6023, name: 'ForesterUndefined', }, { code: 6024, name: 'ForesterDefined', }, ], };
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/idls/light_compressed_token.ts
export type LightCompressedToken = { version: '1.2.0'; name: 'light_compressed_token'; instructions: [ { name: 'createTokenPool'; docs: [ 'This instruction creates a token pool for a given mint. Every spl mint', 'can have one token pool. When a token is compressed the tokens are', 'transferrred to the token pool, and their compressed equivalent is', 'minted into a Merkle tree.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: true; isSigner: false; }, { name: 'tokenProgram'; isMut: false; isSigner: false; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, ]; args: []; }, { name: 'mintTo'; docs: [ 'Mints tokens from an spl token mint to a list of compressed accounts.', 'Minted tokens are transferred to a pool account owned by the compressed', 'token program. The instruction creates one compressed output account for', 'every amount and pubkey input pair. A constant amount of lamports can be', 'transferred to each output account to enable. A use case to add lamports', 'to a compressed token account is to prevent spam. This is the only way', 'to add lamports to a compressed token account.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: true; isSigner: false; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; }, { name: 'tokenProgram'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; docs: ['programs']; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'solPoolPda'; isMut: true; isSigner: false; isOptional: true; }, ]; args: [ { name: 'publicKeys'; type: { vec: 'publicKey'; }; }, { name: 'amounts'; type: { vec: 'u64'; }; }, { name: 'lamports'; type: { option: 'u64'; }; }, ]; }, { name: 'compressSplTokenAccount'; docs: [ 'Compresses the balance of an spl token account sub an optional remaining', 'amount. This instruction does not close the spl token account. To close', 'the account bundle a close spl account instruction in your transaction.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'compressOrDecompressTokenAccount'; isMut: true; isSigner: false; isOptional: true; }, { name: 'tokenProgram'; isMut: false; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'owner'; type: 'publicKey'; }, { name: 'remainingAmount'; type: { option: 'u64'; }; }, { name: 'cpiContext'; type: { option: { defined: 'CompressedCpiContext'; }; }; }, ]; }, { name: 'transfer'; docs: [ 'Transfers compressed tokens from one account to another. All accounts', 'must be of the same mint. Additional spl tokens can be compressed or', 'decompressed. In one transaction only compression or decompression is', 'possible. Lamports can be transferred alongside tokens. If output token', 'accounts specify less lamports than inputs the remaining lamports are', 'transferred to an output compressed account. Signer must be owner or', 'delegate. If a delegated token account is transferred the delegate is', 'not preserved.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'compressOrDecompressTokenAccount'; isMut: true; isSigner: false; isOptional: true; }, { name: 'tokenProgram'; isMut: false; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'approve'; docs: [ 'Delegates an amount to a delegate. A compressed token account is either', 'completely delegated or not. Prior delegates are not preserved. Cannot', 'be called by a delegate.', 'The instruction creates two output accounts:', '1. one account with delegated amount', '2. one account with remaining(change) amount', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'revoke'; docs: [ 'Revokes a delegation. The instruction merges all inputs into one output', 'account. Cannot be called by a delegate. Delegates are not preserved.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'freeze'; docs: [ 'Freezes compressed token accounts. Inputs must not be frozen. Creates as', 'many outputs as inputs. Balances and delegates are preserved.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['that this program is the signer of the cpi.']; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'thaw'; docs: [ 'Thaws frozen compressed token accounts. Inputs must be frozen. Creates', 'as many outputs as inputs. Balances and delegates are preserved.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['that this program is the signer of the cpi.']; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'burn'; docs: [ 'Burns compressed tokens and spl tokens from the pool account. Delegates', 'can burn tokens. The output compressed token account remains delegated.', 'Creates one output compressed token account.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: true; isSigner: false; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; }, { name: 'tokenProgram'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'stubIdlBuild'; docs: [ 'This function is a stub to allow Anchor to include the input types in', 'the IDL. It should not be included in production builds nor be called in', 'practice.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'compressOrDecompressTokenAccount'; isMut: true; isSigner: false; isOptional: true; }, { name: 'tokenProgram'; isMut: false; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs1'; type: { defined: 'CompressedTokenInstructionDataTransfer'; }; }, { name: 'inputs2'; type: { defined: 'TokenData'; }; }, ]; }, ]; types: [ { name: 'AccessMetadata'; type: { kind: 'struct'; fields: [ { name: 'owner'; docs: ['Owner of the Merkle tree.']; type: 'publicKey'; }, { name: 'programOwner'; docs: [ 'Program owner of the Merkle tree. This will be used for program owned Merkle trees.', ]; type: 'publicKey'; }, { name: 'forester'; docs: [ 'Optional privileged forester pubkey, can be set for custom Merkle trees', 'without a network fee. Merkle trees without network fees are not', 'forested by light foresters. The variable is not used in the account', 'compression program but the registry program. The registry program', 'implements access control to prevent contention during forester. The', 'forester pubkey specified in this struct can bypass contention checks.', ]; type: 'publicKey'; }, ]; }; }, { name: 'AccountState'; type: { kind: 'enum'; variants: [ { name: 'Initialized'; }, { name: 'Frozen'; }, ]; }; }, { name: 'CompressedAccount'; type: { kind: 'struct'; fields: [ { name: 'owner'; type: 'publicKey'; }, { name: 'lamports'; type: 'u64'; }, { name: 'address'; type: { option: { array: ['u8', 32]; }; }; }, { name: 'data'; type: { option: { defined: 'CompressedAccountData'; }; }; }, ]; }; }, { name: 'CompressedAccountData'; type: { kind: 'struct'; fields: [ { name: 'discriminator'; type: { array: ['u8', 8]; }; }, { name: 'data'; type: 'bytes'; }, { name: 'dataHash'; type: { array: ['u8', 32]; }; }, ]; }; }, { name: 'CompressedCpiContext'; type: { kind: 'struct'; fields: [ { name: 'setContext'; docs: [ 'Is set by the program that is invoking the CPI to signal that is should', 'set the cpi context.', ]; type: 'bool'; }, { name: 'firstSetContext'; docs: [ 'Is set to wipe the cpi context since someone could have set it before', 'with unrelated data.', ]; type: 'bool'; }, { name: 'cpiContextAccountIndex'; docs: [ 'Index of cpi context account in remaining accounts.', ]; type: 'u8'; }, ]; }; }, { name: 'CompressedProof'; type: { kind: 'struct'; fields: [ { name: 'a'; type: { array: ['u8', 32]; }; }, { name: 'b'; type: { array: ['u8', 64]; }; }, { name: 'c'; type: { array: ['u8', 32]; }; }, ]; }; }, { name: 'CompressedTokenInstructionDataTransfer'; type: { kind: 'struct'; fields: [ { name: 'proof'; type: { option: { defined: 'CompressedProof'; }; }; }, { name: 'mint'; type: 'publicKey'; }, { name: 'delegatedTransfer'; docs: [ 'Is required if the signer is delegate,', '-> delegate is authority account,', 'owner = Some(owner) is the owner of the token account.', ]; type: { option: { defined: 'DelegatedTransfer'; }; }; }, { name: 'inputTokenDataWithContext'; type: { vec: { defined: 'InputTokenDataWithContext'; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'PackedTokenTransferOutputData'; }; }; }, { name: 'isCompress'; type: 'bool'; }, { name: 'compressOrDecompressAmount'; type: { option: 'u64'; }; }, { name: 'cpiContext'; type: { option: { defined: 'CompressedCpiContext'; }; }; }, { name: 'lamportsChangeAccountMerkleTreeIndex'; type: { option: 'u8'; }; }, ]; }; }, { name: 'DelegatedTransfer'; docs: [ 'Struct to provide the owner when the delegate is signer of the transaction.', ]; type: { kind: 'struct'; fields: [ { name: 'owner'; type: 'publicKey'; }, { name: 'delegateChangeAccountIndex'; docs: [ 'Index of change compressed account in output compressed accounts. In', "case that the delegate didn't spend the complete delegated compressed", 'account balance the change compressed account will be delegated to her', 'as well.', ]; type: { option: 'u8'; }; }, ]; }; }, { name: 'InputTokenDataWithContext'; type: { kind: 'struct'; fields: [ { name: 'amount'; type: 'u64'; }, { name: 'delegateIndex'; type: { option: 'u8'; }; }, { name: 'merkleContext'; type: { defined: 'PackedMerkleContext'; }; }, { name: 'rootIndex'; type: 'u16'; }, { name: 'lamports'; type: { option: 'u64'; }; }, { name: 'tlv'; docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ]; type: { option: 'bytes'; }; }, ]; }; }, { name: 'InstructionDataInvoke'; type: { kind: 'struct'; fields: [ { name: 'proof'; type: { option: { defined: 'CompressedProof'; }; }; }, { name: 'inputCompressedAccountsWithMerkleContext'; type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext'; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'newAddressParams'; type: { vec: { defined: 'NewAddressParamsPacked'; }; }; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, ]; }; }, { name: 'InstructionDataInvokeCpi'; type: { kind: 'struct'; fields: [ { name: 'proof'; type: { option: { defined: 'CompressedProof'; }; }; }, { name: 'newAddressParams'; type: { vec: { defined: 'NewAddressParamsPacked'; }; }; }, { name: 'inputCompressedAccountsWithMerkleContext'; type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext'; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, { name: 'cpiContext'; type: { option: { defined: 'CompressedCpiContext'; }; }; }, ]; }; }, { name: 'MerkleTreeMetadata'; type: { kind: 'struct'; fields: [ { name: 'accessMetadata'; type: { defined: 'AccessMetadata'; }; }, { name: 'rolloverMetadata'; type: { defined: 'RolloverMetadata'; }; }, { name: 'associatedQueue'; type: 'publicKey'; }, { name: 'nextMerkleTree'; type: 'publicKey'; }, ]; }; }, { name: 'MerkleTreeSequenceNumber'; type: { kind: 'struct'; fields: [ { name: 'pubkey'; type: 'publicKey'; }, { name: 'seq'; type: 'u64'; }, ]; }; }, { name: 'NewAddressParamsPacked'; type: { kind: 'struct'; fields: [ { name: 'seed'; type: { array: ['u8', 32]; }; }, { name: 'addressQueueAccountIndex'; type: 'u8'; }, { name: 'addressMerkleTreeAccountIndex'; type: 'u8'; }, { name: 'addressMerkleTreeRootIndex'; type: 'u16'; }, ]; }; }, { name: 'OutputCompressedAccountWithPackedContext'; type: { kind: 'struct'; fields: [ { name: 'compressedAccount'; type: { defined: 'CompressedAccount'; }; }, { name: 'merkleTreeIndex'; type: 'u8'; }, ]; }; }, { name: 'PackedCompressedAccountWithMerkleContext'; type: { kind: 'struct'; fields: [ { name: 'compressedAccount'; type: { defined: 'CompressedAccount'; }; }, { name: 'merkleContext'; type: { defined: 'PackedMerkleContext'; }; }, { name: 'rootIndex'; docs: [ 'Index of root used in inclusion validity proof.', ]; type: 'u16'; }, { name: 'readOnly'; docs: [ 'Placeholder to mark accounts read-only unimplemented set to false.', ]; type: 'bool'; }, ]; }; }, { name: 'PackedMerkleContext'; type: { kind: 'struct'; fields: [ { name: 'merkleTreePubkeyIndex'; type: 'u8'; }, { name: 'nullifierQueuePubkeyIndex'; type: 'u8'; }, { name: 'leafIndex'; type: 'u32'; }, { name: 'queueIndex'; docs: [ 'Index of leaf in queue. Placeholder of batched Merkle tree updates', 'currently unimplemented.', ]; type: { option: { defined: 'QueueIndex'; }; }; }, ]; }; }, { name: 'PackedTokenTransferOutputData'; type: { kind: 'struct'; fields: [ { name: 'owner'; type: 'publicKey'; }, { name: 'amount'; type: 'u64'; }, { name: 'lamports'; type: { option: 'u64'; }; }, { name: 'merkleTreeIndex'; type: 'u8'; }, { name: 'tlv'; docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ]; type: { option: 'bytes'; }; }, ]; }; }, { name: 'PublicTransactionEvent'; type: { kind: 'struct'; fields: [ { name: 'inputCompressedAccountHashes'; type: { vec: { array: ['u8', 32]; }; }; }, { name: 'outputCompressedAccountHashes'; type: { vec: { array: ['u8', 32]; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'outputLeafIndices'; type: { vec: 'u32'; }; }, { name: 'sequenceNumbers'; type: { vec: { defined: 'MerkleTreeSequenceNumber'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'pubkeyArray'; type: { vec: 'publicKey'; }; }, { name: 'message'; type: { option: 'bytes'; }; }, ]; }; }, { name: 'QueueIndex'; type: { kind: 'struct'; fields: [ { name: 'queueId'; docs: ['Id of queue in queue account.']; type: 'u8'; }, { name: 'index'; docs: ['Index of compressed account hash in queue.']; type: 'u16'; }, ]; }; }, { name: 'RolloverMetadata'; type: { kind: 'struct'; fields: [ { name: 'index'; docs: ['Unique index.']; type: 'u64'; }, { name: 'rolloverFee'; docs: [ 'This fee is used for rent for the next account.', 'It accumulates in the account so that once the corresponding Merkle tree account is full it can be rolled over', ]; type: 'u64'; }, { name: 'rolloverThreshold'; docs: [ 'The threshold in percentage points when the account should be rolled over (95 corresponds to 95% filled).', ]; type: 'u64'; }, { name: 'networkFee'; docs: ['Tip for maintaining the account.']; type: 'u64'; }, { name: 'rolledoverSlot'; docs: [ 'The slot when the account was rolled over, a rolled over account should not be written to.', ]; type: 'u64'; }, { name: 'closeThreshold'; docs: [ 'If current slot is greater than rolledover_slot + close_threshold and', "the account is empty it can be closed. No 'close' functionality has been", 'implemented yet.', ]; type: 'u64'; }, { name: 'additionalBytes'; docs: [ 'Placeholder for bytes of additional accounts which are tied to the', 'Merkle trees operation and need to be rolled over as well.', ]; type: 'u64'; }, ]; }; }, { name: 'TokenData'; type: { kind: 'struct'; fields: [ { name: 'mint'; docs: ['The mint associated with this account']; type: 'publicKey'; }, { name: 'owner'; docs: ['The owner of this account.']; type: 'publicKey'; }, { name: 'amount'; docs: ['The amount of tokens this account holds.']; type: 'u64'; }, { name: 'delegate'; docs: [ 'If `delegate` is `Some` then `delegated_amount` represents', 'the amount authorized by the delegate', ]; type: { option: 'publicKey'; }; }, { name: 'state'; docs: ["The account's state"]; type: { defined: 'AccountState'; }; }, { name: 'tlv'; docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ]; type: { option: 'bytes'; }; }, ]; }; }, ]; errors: [ { code: 6000; name: 'SignerCheckFailed'; msg: 'Signer check failed'; }, { code: 6001; name: 'CreateTransferInstructionFailed'; msg: 'Create transfer instruction failed'; }, { code: 6002; name: 'AccountNotFound'; msg: 'Account not found'; }, { code: 6003; name: 'SerializationError'; msg: 'Serialization error'; }, ]; }; export const IDL: LightCompressedToken = { version: '1.2.0', name: 'light_compressed_token', instructions: [ { name: 'createTokenPool', docs: [ 'This instruction creates a token pool for a given mint. Every spl mint', 'can have one token pool. When a token is compressed the tokens are', 'transferrred to the token pool, and their compressed equivalent is', 'minted into a Merkle tree.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'tokenPoolPda', isMut: true, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'mint', isMut: true, isSigner: false, }, { name: 'tokenProgram', isMut: false, isSigner: false, }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, ], args: [], }, { name: 'mintTo', docs: [ 'Mints tokens from an spl token mint to a list of compressed accounts.', 'Minted tokens are transferred to a pool account owned by the compressed', 'token program. The instruction creates one compressed output account for', 'every amount and pubkey input pair. A constant amount of lamports can be', 'transferred to each output account to enable. A use case to add lamports', 'to a compressed token account is to prevent spam. This is the only way', 'to add lamports to a compressed token account.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'mint', isMut: true, isSigner: false, }, { name: 'tokenPoolPda', isMut: true, isSigner: false, }, { name: 'tokenProgram', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, docs: ['programs'], }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'solPoolPda', isMut: true, isSigner: false, isOptional: true, }, ], args: [ { name: 'publicKeys', type: { vec: 'publicKey', }, }, { name: 'amounts', type: { vec: 'u64', }, }, { name: 'lamports', type: { option: 'u64', }, }, ], }, { name: 'compressSplTokenAccount', docs: [ 'Compresses the balance of an spl token account sub an optional remaining', 'amount. This instruction does not close the spl token account. To close', 'the account bundle a close spl account instruction in your transaction.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'tokenPoolPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'compressOrDecompressTokenAccount', isMut: true, isSigner: false, isOptional: true, }, { name: 'tokenProgram', isMut: false, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'owner', type: 'publicKey', }, { name: 'remainingAmount', type: { option: 'u64', }, }, { name: 'cpiContext', type: { option: { defined: 'CompressedCpiContext', }, }, }, ], }, { name: 'transfer', docs: [ 'Transfers compressed tokens from one account to another. All accounts', 'must be of the same mint. Additional spl tokens can be compressed or', 'decompressed. In one transaction only compression or decompression is', 'possible. Lamports can be transferred alongside tokens. If output token', 'accounts specify less lamports than inputs the remaining lamports are', 'transferred to an output compressed account. Signer must be owner or', 'delegate. If a delegated token account is transferred the delegate is', 'not preserved.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'tokenPoolPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'compressOrDecompressTokenAccount', isMut: true, isSigner: false, isOptional: true, }, { name: 'tokenProgram', isMut: false, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'approve', docs: [ 'Delegates an amount to a delegate. A compressed token account is either', 'completely delegated or not. Prior delegates are not preserved. Cannot', 'be called by a delegate.', 'The instruction creates two output accounts:', '1. one account with delegated amount', '2. one account with remaining(change) amount', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'revoke', docs: [ 'Revokes a delegation. The instruction merges all inputs into one output', 'account. Cannot be called by a delegate. Delegates are not preserved.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'freeze', docs: [ 'Freezes compressed token accounts. Inputs must not be frozen. Creates as', 'many outputs as inputs. Balances and delegates are preserved.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['that this program is the signer of the cpi.'], }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'mint', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'thaw', docs: [ 'Thaws frozen compressed token accounts. Inputs must be frozen. Creates', 'as many outputs as inputs. Balances and delegates are preserved.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['that this program is the signer of the cpi.'], }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'mint', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'burn', docs: [ 'Burns compressed tokens and spl tokens from the pool account. Delegates', 'can burn tokens. The output compressed token account remains delegated.', 'Creates one output compressed token account.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'mint', isMut: true, isSigner: false, }, { name: 'tokenPoolPda', isMut: true, isSigner: false, }, { name: 'tokenProgram', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'stubIdlBuild', docs: [ 'This function is a stub to allow Anchor to include the input types in', 'the IDL. It should not be included in production builds nor be called in', 'practice.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'tokenPoolPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'compressOrDecompressTokenAccount', isMut: true, isSigner: false, isOptional: true, }, { name: 'tokenProgram', isMut: false, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs1', type: { defined: 'CompressedTokenInstructionDataTransfer', }, }, { name: 'inputs2', type: { defined: 'TokenData', }, }, ], }, ], types: [ { name: 'AccessMetadata', type: { kind: 'struct', fields: [ { name: 'owner', docs: ['Owner of the Merkle tree.'], type: 'publicKey', }, { name: 'programOwner', docs: [ 'Program owner of the Merkle tree. This will be used for program owned Merkle trees.', ], type: 'publicKey', }, { name: 'forester', docs: [ 'Optional privileged forester pubkey, can be set for custom Merkle trees', 'without a network fee. Merkle trees without network fees are not', 'forested by light foresters. The variable is not used in the account', 'compression program but the registry program. The registry program', 'implements access control to prevent contention during forester. The', 'forester pubkey specified in this struct can bypass contention checks.', ], type: 'publicKey', }, ], }, }, { name: 'AccountState', type: { kind: 'enum', variants: [ { name: 'Initialized', }, { name: 'Frozen', }, ], }, }, { name: 'CompressedAccount', type: { kind: 'struct', fields: [ { name: 'owner', type: 'publicKey', }, { name: 'lamports', type: 'u64', }, { name: 'address', type: { option: { array: ['u8', 32], }, }, }, { name: 'data', type: { option: { defined: 'CompressedAccountData', }, }, }, ], }, }, { name: 'CompressedAccountData', type: { kind: 'struct', fields: [ { name: 'discriminator', type: { array: ['u8', 8], }, }, { name: 'data', type: 'bytes', }, { name: 'dataHash', type: { array: ['u8', 32], }, }, ], }, }, { name: 'CompressedCpiContext', type: { kind: 'struct', fields: [ { name: 'setContext', docs: [ 'Is set by the program that is invoking the CPI to signal that is should', 'set the cpi context.', ], type: 'bool', }, { name: 'firstSetContext', docs: [ 'Is set to wipe the cpi context since someone could have set it before', 'with unrelated data.', ], type: 'bool', }, { name: 'cpiContextAccountIndex', docs: [ 'Index of cpi context account in remaining accounts.', ], type: 'u8', }, ], }, }, { name: 'CompressedProof', type: { kind: 'struct', fields: [ { name: 'a', type: { array: ['u8', 32], }, }, { name: 'b', type: { array: ['u8', 64], }, }, { name: 'c', type: { array: ['u8', 32], }, }, ], }, }, { name: 'CompressedTokenInstructionDataTransfer', type: { kind: 'struct', fields: [ { name: 'proof', type: { option: { defined: 'CompressedProof', }, }, }, { name: 'mint', type: 'publicKey', }, { name: 'delegatedTransfer', docs: [ 'Is required if the signer is delegate,', '-> delegate is authority account,', 'owner = Some(owner) is the owner of the token account.', ], type: { option: { defined: 'DelegatedTransfer', }, }, }, { name: 'inputTokenDataWithContext', type: { vec: { defined: 'InputTokenDataWithContext', }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'PackedTokenTransferOutputData', }, }, }, { name: 'isCompress', type: 'bool', }, { name: 'compressOrDecompressAmount', type: { option: 'u64', }, }, { name: 'cpiContext', type: { option: { defined: 'CompressedCpiContext', }, }, }, { name: 'lamportsChangeAccountMerkleTreeIndex', type: { option: 'u8', }, }, ], }, }, { name: 'DelegatedTransfer', docs: [ 'Struct to provide the owner when the delegate is signer of the transaction.', ], type: { kind: 'struct', fields: [ { name: 'owner', type: 'publicKey', }, { name: 'delegateChangeAccountIndex', docs: [ 'Index of change compressed account in output compressed accounts. In', "case that the delegate didn't spend the complete delegated compressed", 'account balance the change compressed account will be delegated to her', 'as well.', ], type: { option: 'u8', }, }, ], }, }, { name: 'InputTokenDataWithContext', type: { kind: 'struct', fields: [ { name: 'amount', type: 'u64', }, { name: 'delegateIndex', type: { option: 'u8', }, }, { name: 'merkleContext', type: { defined: 'PackedMerkleContext', }, }, { name: 'rootIndex', type: 'u16', }, { name: 'lamports', type: { option: 'u64', }, }, { name: 'tlv', docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ], type: { option: 'bytes', }, }, ], }, }, { name: 'InstructionDataInvoke', type: { kind: 'struct', fields: [ { name: 'proof', type: { option: { defined: 'CompressedProof', }, }, }, { name: 'inputCompressedAccountsWithMerkleContext', type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext', }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'newAddressParams', type: { vec: { defined: 'NewAddressParamsPacked', }, }, }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, ], }, }, { name: 'InstructionDataInvokeCpi', type: { kind: 'struct', fields: [ { name: 'proof', type: { option: { defined: 'CompressedProof', }, }, }, { name: 'newAddressParams', type: { vec: { defined: 'NewAddressParamsPacked', }, }, }, { name: 'inputCompressedAccountsWithMerkleContext', type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext', }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, { name: 'cpiContext', type: { option: { defined: 'CompressedCpiContext', }, }, }, ], }, }, { name: 'MerkleTreeMetadata', type: { kind: 'struct', fields: [ { name: 'accessMetadata', type: { defined: 'AccessMetadata', }, }, { name: 'rolloverMetadata', type: { defined: 'RolloverMetadata', }, }, { name: 'associatedQueue', type: 'publicKey', }, { name: 'nextMerkleTree', type: 'publicKey', }, ], }, }, { name: 'MerkleTreeSequenceNumber', type: { kind: 'struct', fields: [ { name: 'pubkey', type: 'publicKey', }, { name: 'seq', type: 'u64', }, ], }, }, { name: 'NewAddressParamsPacked', type: { kind: 'struct', fields: [ { name: 'seed', type: { array: ['u8', 32], }, }, { name: 'addressQueueAccountIndex', type: 'u8', }, { name: 'addressMerkleTreeAccountIndex', type: 'u8', }, { name: 'addressMerkleTreeRootIndex', type: 'u16', }, ], }, }, { name: 'OutputCompressedAccountWithPackedContext', type: { kind: 'struct', fields: [ { name: 'compressedAccount', type: { defined: 'CompressedAccount', }, }, { name: 'merkleTreeIndex', type: 'u8', }, ], }, }, { name: 'PackedCompressedAccountWithMerkleContext', type: { kind: 'struct', fields: [ { name: 'compressedAccount', type: { defined: 'CompressedAccount', }, }, { name: 'merkleContext', type: { defined: 'PackedMerkleContext', }, }, { name: 'rootIndex', docs: [ 'Index of root used in inclusion validity proof.', ], type: 'u16', }, { name: 'readOnly', docs: [ 'Placeholder to mark accounts read-only unimplemented set to false.', ], type: 'bool', }, ], }, }, { name: 'PackedMerkleContext', type: { kind: 'struct', fields: [ { name: 'merkleTreePubkeyIndex', type: 'u8', }, { name: 'nullifierQueuePubkeyIndex', type: 'u8', }, { name: 'leafIndex', type: 'u32', }, { name: 'queueIndex', docs: [ 'Index of leaf in queue. Placeholder of batched Merkle tree updates', 'currently unimplemented.', ], type: { option: { defined: 'QueueIndex', }, }, }, ], }, }, { name: 'PackedTokenTransferOutputData', type: { kind: 'struct', fields: [ { name: 'owner', type: 'publicKey', }, { name: 'amount', type: 'u64', }, { name: 'lamports', type: { option: 'u64', }, }, { name: 'merkleTreeIndex', type: 'u8', }, { name: 'tlv', docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ], type: { option: 'bytes', }, }, ], }, }, { name: 'PublicTransactionEvent', type: { kind: 'struct', fields: [ { name: 'inputCompressedAccountHashes', type: { vec: { array: ['u8', 32], }, }, }, { name: 'outputCompressedAccountHashes', type: { vec: { array: ['u8', 32], }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'outputLeafIndices', type: { vec: 'u32', }, }, { name: 'sequenceNumbers', type: { vec: { defined: 'MerkleTreeSequenceNumber', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'pubkeyArray', type: { vec: 'publicKey', }, }, { name: 'message', type: { option: 'bytes', }, }, ], }, }, { name: 'QueueIndex', type: { kind: 'struct', fields: [ { name: 'queueId', docs: ['Id of queue in queue account.'], type: 'u8', }, { name: 'index', docs: ['Index of compressed account hash in queue.'], type: 'u16', }, ], }, }, { name: 'RolloverMetadata', type: { kind: 'struct', fields: [ { name: 'index', docs: ['Unique index.'], type: 'u64', }, { name: 'rolloverFee', docs: [ 'This fee is used for rent for the next account.', 'It accumulates in the account so that once the corresponding Merkle tree account is full it can be rolled over', ], type: 'u64', }, { name: 'rolloverThreshold', docs: [ 'The threshold in percentage points when the account should be rolled over (95 corresponds to 95% filled).', ], type: 'u64', }, { name: 'networkFee', docs: ['Tip for maintaining the account.'], type: 'u64', }, { name: 'rolledoverSlot', docs: [ 'The slot when the account was rolled over, a rolled over account should not be written to.', ], type: 'u64', }, { name: 'closeThreshold', docs: [ 'If current slot is greater than rolledover_slot + close_threshold and', "the account is empty it can be closed. No 'close' functionality has been", 'implemented yet.', ], type: 'u64', }, { name: 'additionalBytes', docs: [ 'Placeholder for bytes of additional accounts which are tied to the', 'Merkle trees operation and need to be rolled over as well.', ], type: 'u64', }, ], }, }, { name: 'TokenData', type: { kind: 'struct', fields: [ { name: 'mint', docs: ['The mint associated with this account'], type: 'publicKey', }, { name: 'owner', docs: ['The owner of this account.'], type: 'publicKey', }, { name: 'amount', docs: ['The amount of tokens this account holds.'], type: 'u64', }, { name: 'delegate', docs: [ 'If `delegate` is `Some` then `delegated_amount` represents', 'the amount authorized by the delegate', ], type: { option: 'publicKey', }, }, { name: 'state', docs: ["The account's state"], type: { defined: 'AccountState', }, }, { name: 'tlv', docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ], type: { option: 'bytes', }, }, ], }, }, ], errors: [ { code: 6000, name: 'SignerCheckFailed', msg: 'Signer check failed', }, { code: 6001, name: 'CreateTransferInstructionFailed', msg: 'Create transfer instruction failed', }, { code: 6002, name: 'AccountNotFound', msg: 'Account not found', }, { code: 6003, name: 'SerializationError', msg: 'Serialization error', }, ], };
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/programs/index.ts
export * from './system';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/programs/system.ts
import { Program, AnchorProvider, setProvider, BN } from '@coral-xyz/anchor'; import { PublicKey, Keypair, Connection, TransactionInstruction, SystemProgram, } from '@solana/web3.js'; import { Buffer } from 'buffer'; import { IDL, LightSystemProgram as LightSystemProgramIDL, } from '../idls/light_system_program'; import { useWallet } from '../wallet'; import { CompressedAccount, CompressedAccountWithMerkleContext, CompressedProof, InstructionDataInvoke, bn, createCompressedAccount, } from '../state'; import { packCompressedAccounts, toAccountMetas } from '../instruction'; import { defaultStaticAccountsStruct, defaultTestStateTreeAccounts, } from '../constants'; import { validateSameOwner, validateSufficientBalance, } from '../utils/validation'; import { packNewAddressParams, NewAddressParams } from '../utils'; export const sumUpLamports = ( accounts: CompressedAccountWithMerkleContext[], ): BN => { return accounts.reduce( (acc, account) => acc.add(bn(account.lamports)), bn(0), ); }; /** * Create compressed account system transaction params */ type CreateAccountWithSeedParams = { /** * The payer of the transaction. */ payer: PublicKey; /** * Address params for the new compressed account */ newAddressParams: NewAddressParams; newAddress: number[]; /** * Recent validity proof proving that there's no existing compressed account * registered with newAccountAddress */ recentValidityProof: CompressedProof; /** * State tree pubkey. Defaults to a public state tree if unspecified. */ outputStateTree?: PublicKey; /** * Public key of the program to assign as the owner of the created account */ programId?: PublicKey; /** * Optional input accounts to transfer lamports from into the new compressed * account. */ inputCompressedAccounts?: CompressedAccountWithMerkleContext[]; /** * Optional input state root indices of 'inputCompressedAccounts'. The * expiry is tied to the 'recentValidityProof'. */ inputStateRootIndices?: number[]; /** * Optional lamports to transfer into the new compressed account. */ lamports?: number | BN; }; /** * Defines the parameters for the transfer method */ type TransferParams = { /** * The payer of the transaction. */ payer: PublicKey; /** * The input state to be consumed. */ inputCompressedAccounts: CompressedAccountWithMerkleContext[]; /** * Recipient address */ toAddress: PublicKey; /** * amount of lamports to transfer. */ lamports: number | BN; /** * The recent state root indices of the input state. The expiry is tied to * the proof. * * TODO: Add support for passing recent-values after instruction creation. */ recentInputStateRootIndices: number[]; /** * The recent validity proof for state inclusion of the input state. It * expires after n slots. */ recentValidityProof: CompressedProof; /** * The state trees that the tx output should be inserted into. This can be a * single PublicKey or an array of PublicKey. Defaults to the 0th state tree * of input state. */ outputStateTrees?: PublicKey[] | PublicKey; }; /// TODO: /// - add option to compress to another owner /// - add option to merge with input state /** * Defines the parameters for the transfer method */ type CompressParams = { /** * The payer of the transaction. */ payer: PublicKey; /** * address that the lamports are attached to. also defaults to the recipient owner */ toAddress: PublicKey; /** * amount of lamports to compress. */ lamports: number | BN; /** * The state tree that the tx output should be inserted into. Defaults to a * public state tree if unspecified. */ outputStateTree?: PublicKey; }; /** * Defines the parameters for the transfer method */ type DecompressParams = { /** * The payer of the transaction. */ payer: PublicKey; /** * The input state to be consumed. */ inputCompressedAccounts: CompressedAccountWithMerkleContext[]; /** * Recipient address of uncompressed lamports */ toAddress: PublicKey; /** * amount of lamports to decompress. */ lamports: number | BN; /** * The recent state root indices of the input state. The expiry is tied to * the proof. * * TODO: Add support for passing recent-values after instruction creation. */ recentInputStateRootIndices: number[]; /** * The recent validity proof for state inclusion of the input state. It * expires after n slots. */ recentValidityProof: CompressedProof; /** * The state trees that the tx output should be inserted into. This can be a * single PublicKey or an array of PublicKey. Defaults to the 0th state tree * of input state. */ outputStateTree?: PublicKey; }; const SOL_POOL_PDA_SEED = Buffer.from('sol_pool_pda'); export class LightSystemProgram { /** * @internal */ constructor() {} /** * Public key that identifies the CompressedPda program */ static programId: PublicKey = new PublicKey( // TODO: can add check to ensure its consistent with the idl 'SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7', ); private static _program: Program<LightSystemProgramIDL> | null = null; static get program(): Program<LightSystemProgramIDL> { if (!this._program) { this.initializeProgram(); } return this._program!; } /** * @internal * Cwct1kQLwJm8Z3HetLu8m4SXkhD6FZ5fXbJQCxTxPnGY * */ static deriveCompressedSolPda(): PublicKey { const seeds = [SOL_POOL_PDA_SEED]; const [address, _] = PublicKey.findProgramAddressSync( seeds, this.programId, ); return address; } /** * Initializes the program statically if not already initialized. */ private static initializeProgram() { if (!this._program) { const mockKeypair = Keypair.generate(); const mockConnection = new Connection( 'http://127.0.0.1:8899', 'confirmed', ); const mockProvider = new AnchorProvider( mockConnection, useWallet(mockKeypair), { commitment: 'confirmed', preflightCommitment: 'confirmed', }, ); setProvider(mockProvider); this._program = new Program(IDL, this.programId, mockProvider); } } static createTransferOutputState( inputCompressedAccounts: CompressedAccountWithMerkleContext[], toAddress: PublicKey, lamports: number | BN, ): CompressedAccount[] { lamports = bn(lamports); const inputLamports = sumUpLamports(inputCompressedAccounts); const changeLamports = inputLamports.sub(lamports); validateSufficientBalance(changeLamports); if (changeLamports.eq(bn(0))) { return [createCompressedAccount(toAddress, lamports)]; } validateSameOwner(inputCompressedAccounts); const outputCompressedAccounts: CompressedAccount[] = [ createCompressedAccount( inputCompressedAccounts[0].owner, changeLamports, ), createCompressedAccount(toAddress, lamports), ]; return outputCompressedAccounts; } static createDecompressOutputState( inputCompressedAccounts: CompressedAccountWithMerkleContext[], lamports: number | BN, ): CompressedAccount[] { lamports = bn(lamports); const inputLamports = sumUpLamports(inputCompressedAccounts); const changeLamports = inputLamports.sub(lamports); validateSufficientBalance(changeLamports); /// lamports gets decompressed if (changeLamports.eq(bn(0))) { return []; } validateSameOwner(inputCompressedAccounts); const outputCompressedAccounts: CompressedAccount[] = [ createCompressedAccount( inputCompressedAccounts[0].owner, changeLamports, ), ]; return outputCompressedAccounts; } /** * No data by default */ static createNewAddressOutputState( address: number[], owner: PublicKey, lamports?: BN | number, inputCompressedAccounts?: CompressedAccountWithMerkleContext[], ): CompressedAccount[] { lamports = bn(lamports ?? 0); const inputLamports = sumUpLamports(inputCompressedAccounts ?? []); const changeLamports = inputLamports.sub(lamports); validateSufficientBalance(changeLamports); if (changeLamports.eq(bn(0)) || !inputCompressedAccounts) { return [ createCompressedAccount(owner, lamports, undefined, address), ]; } validateSameOwner(inputCompressedAccounts); const outputCompressedAccounts: CompressedAccount[] = [ createCompressedAccount( inputCompressedAccounts[0].owner, changeLamports, ), createCompressedAccount(owner, lamports, undefined, address), ]; return outputCompressedAccounts; } /** * Creates instruction to create compressed account with PDA. * Cannot write data. * * TODO: support transfer of lamports to the new account. */ static async createAccount({ payer, newAddressParams, newAddress, recentValidityProof, outputStateTree, inputCompressedAccounts, inputStateRootIndices, lamports, }: CreateAccountWithSeedParams): Promise<TransactionInstruction> { const outputCompressedAccounts = this.createNewAddressOutputState( newAddress, payer, lamports, inputCompressedAccounts, ); /// Pack accounts const { packedInputCompressedAccounts, packedOutputCompressedAccounts, remainingAccounts: _remainingAccounts, } = packCompressedAccounts( inputCompressedAccounts ?? [], inputStateRootIndices ?? [], outputCompressedAccounts, outputStateTree, ); const { newAddressParamsPacked, remainingAccounts } = packNewAddressParams([newAddressParams], _remainingAccounts); const rawData: InstructionDataInvoke = { proof: recentValidityProof, inputCompressedAccountsWithMerkleContext: packedInputCompressedAccounts, outputCompressedAccounts: packedOutputCompressedAccounts, relayFee: null, newAddressParams: newAddressParamsPacked, compressOrDecompressLamports: null, isCompress: false, }; /// Encode instruction data const ixData = this.program.coder.types.encode( 'InstructionDataInvoke', rawData, ); /// Build anchor instruction const instruction = await this.program.methods .invoke(ixData) .accounts({ ...defaultStaticAccountsStruct(), feePayer: payer, authority: payer, solPoolPda: null, decompressionRecipient: null, systemProgram: SystemProgram.programId, }) .remainingAccounts(toAccountMetas(remainingAccounts)) .instruction(); return instruction; } /** * Creates a transaction instruction that transfers compressed lamports from * one owner to another. */ static async transfer({ payer, inputCompressedAccounts, toAddress, lamports, recentInputStateRootIndices, recentValidityProof, outputStateTrees, }: TransferParams): Promise<TransactionInstruction> { /// Create output state const outputCompressedAccounts = this.createTransferOutputState( inputCompressedAccounts, toAddress, lamports, ); /// Pack accounts const { packedInputCompressedAccounts, packedOutputCompressedAccounts, remainingAccounts, } = packCompressedAccounts( inputCompressedAccounts, recentInputStateRootIndices, outputCompressedAccounts, outputStateTrees, ); /// Encode instruction data const data = this.program.coder.types.encode('InstructionDataInvoke', { proof: recentValidityProof, inputCompressedAccountsWithMerkleContext: packedInputCompressedAccounts, outputCompressedAccounts: packedOutputCompressedAccounts, relayFee: null, /// TODO: here and on-chain: option<newAddressInputs> or similar. newAddressParams: [], compressOrDecompressLamports: null, isCompress: false, }); /// Build anchor instruction const instruction = await this.program.methods .invoke(data) .accounts({ ...defaultStaticAccountsStruct(), feePayer: payer, authority: payer, solPoolPda: null, decompressionRecipient: null, systemProgram: SystemProgram.programId, }) .remainingAccounts(toAccountMetas(remainingAccounts)) .instruction(); return instruction; } /** * Creates a transaction instruction that transfers compressed lamports from * one owner to another. */ // TODO: add support for non-fee-payer owner static async compress({ payer, toAddress, lamports, outputStateTree, }: CompressParams): Promise<TransactionInstruction> { /// Create output state lamports = bn(lamports); const outputCompressedAccount = createCompressedAccount( toAddress, lamports, ); /// Pack accounts const { packedInputCompressedAccounts, packedOutputCompressedAccounts, remainingAccounts, } = packCompressedAccounts( [], [], [outputCompressedAccount], outputStateTree, ); /// Encode instruction data const rawInputs: InstructionDataInvoke = { proof: null, inputCompressedAccountsWithMerkleContext: packedInputCompressedAccounts, outputCompressedAccounts: packedOutputCompressedAccounts, relayFee: null, /// TODO: here and on-chain: option<newAddressInputs> or similar. newAddressParams: [], compressOrDecompressLamports: lamports, isCompress: true, }; const data = this.program.coder.types.encode( 'InstructionDataInvoke', rawInputs, ); /// Build anchor instruction const instruction = await this.program.methods .invoke(data) .accounts({ ...defaultStaticAccountsStruct(), feePayer: payer, authority: payer, solPoolPda: this.deriveCompressedSolPda(), decompressionRecipient: null, systemProgram: SystemProgram.programId, }) .remainingAccounts(toAccountMetas(remainingAccounts)) .instruction(); return instruction; } /** * Creates a transaction instruction that transfers compressed lamports from * one owner to another. */ static async decompress({ payer, inputCompressedAccounts, toAddress, lamports, recentInputStateRootIndices, recentValidityProof, outputStateTree, }: DecompressParams): Promise<TransactionInstruction> { /// Create output state lamports = bn(lamports); const outputCompressedAccounts = this.createDecompressOutputState( inputCompressedAccounts, lamports, ); /// Pack accounts const { packedInputCompressedAccounts, packedOutputCompressedAccounts, remainingAccounts, } = packCompressedAccounts( inputCompressedAccounts, recentInputStateRootIndices, outputCompressedAccounts, outputStateTree, ); /// Encode instruction data const data = this.program.coder.types.encode('InstructionDataInvoke', { proof: recentValidityProof, inputCompressedAccountsWithMerkleContext: packedInputCompressedAccounts, outputCompressedAccounts: packedOutputCompressedAccounts, relayFee: null, /// TODO: here and on-chain: option<newAddressInputs> or similar. newAddressParams: [], compressOrDecompressLamports: lamports, isCompress: false, }); /// Build anchor instruction const instruction = await this.program.methods .invoke(data) .accounts({ ...defaultStaticAccountsStruct(), feePayer: payer, authority: payer, solPoolPda: this.deriveCompressedSolPda(), decompressionRecipient: toAddress, systemProgram: SystemProgram.programId, }) .remainingAccounts(toAccountMetas(remainingAccounts)) .instruction(); return instruction; } } /** * Selects the minimal number of compressed SOL accounts for a transfer. * * 1. Sorts the accounts by amount in descending order * 2. Accumulates the amount until it is greater than or equal to the transfer * amount */ export function selectMinCompressedSolAccountsForTransfer( accounts: CompressedAccountWithMerkleContext[], transferLamports: BN | number, ): [selectedAccounts: CompressedAccountWithMerkleContext[], total: BN] { let accumulatedLamports = bn(0); transferLamports = bn(transferLamports); const selectedAccounts: CompressedAccountWithMerkleContext[] = []; accounts.sort((a, b) => b.lamports.cmp(a.lamports)); for (const account of accounts) { if (accumulatedLamports.gte(bn(transferLamports))) break; accumulatedLamports = accumulatedLamports.add(account.lamports); selectedAccounts.push(account); } if (accumulatedLamports.lt(bn(transferLamports))) { throw new Error( `Not enough balance for transfer. Required: ${transferLamports.toString()}, available: ${accumulatedLamports.toString()}`, ); } return [selectedAccounts, accumulatedLamports]; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/actions/create-account.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { LightSystemProgram, selectMinCompressedSolAccountsForTransfer, } from '../programs'; import { Rpc } from '../rpc'; import { NewAddressParams, buildAndSignTx, deriveAddress, deriveAddressSeed, sendAndConfirmTx, } from '../utils'; import { defaultTestStateTreeAccounts } from '../constants'; import { bn } from '../state'; import { BN } from '@coral-xyz/anchor'; /** * Create compressed account with address * * @param rpc RPC to use * @param payer Payer of the transaction and initialization fees * @param seeds Seeds to derive the new account address * @param programId Owner of the new account * @param addressTree Optional address tree. Defaults to a current shared * address tree. * @param addressQueue Optional address queue. Defaults to a current shared * address queue. * @param outputStateTree Optional output state tree. Defaults to a current * shared state tree. * @param confirmOptions Options for confirming the transaction * * @return Transaction signature */ export async function createAccount( rpc: Rpc, payer: Signer, seeds: Uint8Array[], programId: PublicKey, addressTree?: PublicKey, addressQueue?: PublicKey, outputStateTree?: PublicKey, confirmOptions?: ConfirmOptions, ): Promise<TransactionSignature> { const { blockhash } = await rpc.getLatestBlockhash(); addressTree = addressTree ?? defaultTestStateTreeAccounts().addressTree; addressQueue = addressQueue ?? defaultTestStateTreeAccounts().addressQueue; const seed = deriveAddressSeed(seeds, programId); const address = deriveAddress(seed, addressTree); const proof = await rpc.getValidityProofV0(undefined, [ { address: bn(address.toBytes()), tree: addressTree, queue: addressQueue, }, ]); const params: NewAddressParams = { seed: seed, addressMerkleTreeRootIndex: proof.rootIndices[0], addressMerkleTreePubkey: proof.merkleTrees[0], addressQueuePubkey: proof.nullifierQueues[0], }; const ix = await LightSystemProgram.createAccount({ payer: payer.publicKey, newAddressParams: params, newAddress: Array.from(address.toBytes()), recentValidityProof: proof.compressedProof, programId, outputStateTree, }); const tx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ix], payer, blockhash, [], ); const txId = await sendAndConfirmTx(rpc, tx, confirmOptions); return txId; } /** * Create compressed account with address and lamports * * @param rpc RPC to use * @param payer Payer of the transaction and initialization fees * @param seeds Seeds to derive the new account address * @param lamports Number of compressed lamports to initialize the * account with * @param programId Owner of the new account * @param addressTree Optional address tree. Defaults to a current shared * address tree. * @param addressQueue Optional address queue. Defaults to a current shared * address queue. * @param outputStateTree Optional output state tree. Defaults to a current * shared state tree. * @param confirmOptions Options for confirming the transaction * * @return Transaction signature */ // TODO: add support for payer != user owner export async function createAccountWithLamports( rpc: Rpc, payer: Signer, seeds: Uint8Array[], lamports: number | BN, programId: PublicKey, addressTree?: PublicKey, addressQueue?: PublicKey, outputStateTree?: PublicKey, confirmOptions?: ConfirmOptions, ): Promise<TransactionSignature> { lamports = bn(lamports); const compressedAccounts = await rpc.getCompressedAccountsByOwner( payer.publicKey, ); const [inputAccounts] = selectMinCompressedSolAccountsForTransfer( compressedAccounts.items, lamports, ); const { blockhash } = await rpc.getLatestBlockhash(); addressTree = addressTree ?? defaultTestStateTreeAccounts().addressTree; addressQueue = addressQueue ?? defaultTestStateTreeAccounts().addressQueue; const seed = deriveAddressSeed(seeds, programId); const address = deriveAddress(seed, addressTree); const proof = await rpc.getValidityProof( inputAccounts.map(account => bn(account.hash)), [bn(address.toBytes())], ); /// TODO(crank): Adapt before supporting addresses in rpc / cranked address trees. /// Currently expects address roots to be consistent with one another and /// static. See test-rpc.ts for more details. const params: NewAddressParams = { seed: seed, addressMerkleTreeRootIndex: proof.rootIndices[proof.rootIndices.length - 1], addressMerkleTreePubkey: proof.merkleTrees[proof.merkleTrees.length - 1], addressQueuePubkey: proof.nullifierQueues[proof.nullifierQueues.length - 1], }; const ix = await LightSystemProgram.createAccount({ payer: payer.publicKey, newAddressParams: params, newAddress: Array.from(address.toBytes()), recentValidityProof: proof.compressedProof, inputCompressedAccounts: inputAccounts, inputStateRootIndices: proof.rootIndices, programId, outputStateTree, }); const tx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ix], payer, blockhash, [], ); const txId = await sendAndConfirmTx(rpc, tx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/actions/compress.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { LightSystemProgram } from '../programs'; import { Rpc } from '../rpc'; import { buildAndSignTx, sendAndConfirmTx } from '../utils'; import { BN } from '@coral-xyz/anchor'; import { defaultTestStateTreeAccounts } from '../constants'; /** * Compress lamports to a solana address * * @param rpc RPC to use * @param payer Payer of the transaction and initialization fees * @param lamports Amount of lamports to compress * @param toAddress Address of the recipient compressed account * @param outputStateTree Optional output state tree. Defaults to a current shared state tree. * @param confirmOptions Options for confirming the transaction * * @return Transaction signature */ /// TODO: add multisig support /// TODO: add support for payer != owner export async function compress( rpc: Rpc, payer: Signer, lamports: number | BN, toAddress: PublicKey, outputStateTree?: PublicKey, confirmOptions?: ConfirmOptions, ): Promise<TransactionSignature> { const { blockhash } = await rpc.getLatestBlockhash(); const ix = await LightSystemProgram.compress({ payer: payer.publicKey, toAddress, lamports, outputStateTree, }); const tx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ix], payer, blockhash, [], ); const txId = await sendAndConfirmTx(rpc, tx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/actions/common.ts
import { Signer } from '@solana/web3.js'; /** @internal remove signer from signers if part of signers */ export function dedupeSigner(signer: Signer, signers: Signer[]): Signer[] { if (signers.includes(signer)) { return signers.filter( s => s.publicKey.toString() !== signer.publicKey.toString(), ); } return signers; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/actions/transfer.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { LightSystemProgram, selectMinCompressedSolAccountsForTransfer, } from '../programs'; import { Rpc } from '../rpc'; import { bn, CompressedAccountWithMerkleContext } from '../state'; import { buildAndSignTx, sendAndConfirmTx } from '../utils'; import { GetCompressedAccountsByOwnerConfig } from '../rpc-interface'; /** * Transfer compressed lamports from one owner to another * * @param rpc Rpc to use * @param payer Payer of transaction fees * @param lamports Number of lamports to transfer * @param owner Owner of the compressed lamports * @param toAddress Destination address of the recipient * @param merkleTree State tree account that the compressed lamports should be * inserted into. Defaults to the default state tree account. * @param confirmOptions Options for confirming the transaction * @param config Configuration for fetching compressed accounts * * * @return Signature of the confirmed transaction */ export async function transfer( rpc: Rpc, payer: Signer, lamports: number | BN, owner: Signer, toAddress: PublicKey, /// TODO: allow multiple merkleTree?: PublicKey, confirmOptions?: ConfirmOptions, ): Promise<TransactionSignature> { let accumulatedLamports = bn(0); const compressedAccounts: CompressedAccountWithMerkleContext[] = []; let cursor: string | undefined; const batchSize = 1000; // Maximum allowed by the API lamports = bn(lamports); while (accumulatedLamports.lt(lamports)) { const batchConfig: GetCompressedAccountsByOwnerConfig = { filters: undefined, dataSlice: undefined, cursor, limit: new BN(batchSize), }; const batch = await rpc.getCompressedAccountsByOwner( owner.publicKey, batchConfig, ); for (const account of batch.items) { if (account.lamports.gt(new BN(0))) { compressedAccounts.push(account); accumulatedLamports = accumulatedLamports.add(account.lamports); } } cursor = batch.cursor ?? undefined; if (batch.items.length < batchSize || accumulatedLamports.gte(lamports)) break; } if (accumulatedLamports.lt(lamports)) { throw new Error( `Not enough balance for transfer. Required: ${lamports.toString()}, available: ${accumulatedLamports.toString()}`, ); } const [inputAccounts] = selectMinCompressedSolAccountsForTransfer( compressedAccounts, lamports, ); const proof = await rpc.getValidityProof( inputAccounts.map(account => bn(account.hash)), ); const ix = await LightSystemProgram.transfer({ payer: payer.publicKey, inputCompressedAccounts: inputAccounts, toAddress, lamports, recentInputStateRootIndices: proof.rootIndices, recentValidityProof: proof.compressedProof, outputStateTrees: merkleTree, }); const { blockhash } = await rpc.getLatestBlockhash(); const signedTx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ix], payer, blockhash, ); const txId = await sendAndConfirmTx(rpc, signedTx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/actions/index.ts
export * from './compress'; export * from './create-account'; export * from './decompress'; export * from './common'; export * from './transfer';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/actions/decompress.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { LightSystemProgram, sumUpLamports } from '../programs'; import { Rpc } from '../rpc'; import { buildAndSignTx, sendAndConfirmTx } from '../utils'; import { BN } from '@coral-xyz/anchor'; import { CompressedAccountWithMerkleContext, bn } from '../state'; /** * Decompress lamports into a solana account * * @param rpc RPC to use * @param payer Payer of the transaction and initialization fees * @param lamports Amount of lamports to compress * @param toAddress Address of the recipient compressed account * @param outputStateTree Optional output state tree. Defaults to a current shared state tree. * @param confirmOptions Options for confirming the transaction * * @return Transaction signature */ /// TODO: add multisig support /// TODO: add support for payer != owner export async function decompress( rpc: Rpc, payer: Signer, lamports: number | BN, recipient: PublicKey, outputStateTree?: PublicKey, confirmOptions?: ConfirmOptions, ): Promise<TransactionSignature> { /// TODO: use dynamic state tree and nullifier queue const userCompressedAccountsWithMerkleContext: CompressedAccountWithMerkleContext[] = (await rpc.getCompressedAccountsByOwner(payer.publicKey)).items; lamports = bn(lamports); const inputLamports = sumUpLamports( userCompressedAccountsWithMerkleContext, ); if (lamports.gt(inputLamports)) { throw new Error( `Not enough compressed lamports. Expected ${lamports}, got ${inputLamports}`, ); } const proof = await rpc.getValidityProof( userCompressedAccountsWithMerkleContext.map(x => bn(x.hash)), ); const { blockhash } = await rpc.getLatestBlockhash(); const ix = await LightSystemProgram.decompress({ payer: payer.publicKey, toAddress: recipient, outputStateTree: outputStateTree, inputCompressedAccounts: userCompressedAccountsWithMerkleContext, recentValidityProof: proof.compressedProof, recentInputStateRootIndices: proof.rootIndices, lamports, }); const tx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ix], payer, blockhash, [], ); const txId = await sendAndConfirmTx(rpc, tx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/wallet/interface.ts
/// TODO: extract wallet into its own npm package import { Commitment, Connection, Keypair, VersionedTransaction, sendAndConfirmTransaction, } from '@solana/web3.js'; import { PublicKey, Transaction } from '@solana/web3.js'; import nacl from 'tweetnacl'; const { sign } = nacl; export type InclusionProofPublicInputs = { root: string; leaf: string; }; export type InclusionProofPrivateInputs = { merkleProof: string[]; leaf: string; leafIndex: string; }; /// On the system level, we're proving simple inclusion proofs in a /// state tree, for each utxo used as input into a transaction. export type InclusionProofInputs = (InclusionProofPublicInputs & InclusionProofPrivateInputs)[]; /// Mock Solana web3 library export class Wallet { _publicKey: PublicKey; _keypair: Keypair; _connection: Connection; _url: string; _commitment: Commitment; constructor(keypair: Keypair, url: string, commitment: Commitment) { this._publicKey = keypair.publicKey; this._keypair = keypair; this._connection = new Connection(url); this._url = url; this._commitment = commitment; } signTransaction = async (tx: any): Promise<any> => { await tx.sign([this._keypair!]); return tx; }; sendTransaction = async ( transaction: VersionedTransaction, ): Promise<string> => { const signature = await this._connection.sendTransaction(transaction); return signature; }; signAllTransactions = async <T extends Transaction | VersionedTransaction>( transactions: T[], ): Promise<T[]> => { const signedTxs = await Promise.all( transactions.map(async tx => { return await this.signTransaction(tx); }), ); return signedTxs; }; signMessage = async (message: Uint8Array): Promise<Uint8Array> => { return sign.detached(message, this._keypair.secretKey); }; sendAndConfirmTransaction = async ( transaction: Transaction, signers = [], ): Promise<any> => { const response = await sendAndConfirmTransaction( this._connection, transaction, [this._keypair, ...signers], { commitment: this._commitment, }, ); return response; }; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/wallet/index.ts
export * from './use-wallet';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/wallet/use-wallet.ts
import { Keypair, Commitment } from '@solana/web3.js'; import { Wallet } from './interface'; // TODO consider adding isNodeWallet export const useWallet = ( keypair: Keypair, url: string = 'http://127.0.0.1:8899', commitment: Commitment = 'confirmed', ) => { url = url !== 'mock' ? url : 'http://127.0.0.1:8899'; const wallet = new Wallet(keypair, url, commitment); return { publicKey: wallet._publicKey, sendAndConfirmTransaction: wallet.sendAndConfirmTransaction, signMessage: wallet.signMessage, signTransaction: wallet.signTransaction, signAllTransactions: wallet.signAllTransactions, sendTransaction: wallet.sendTransaction, }; };
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/instruction/index.ts
export * from './pack-compressed-accounts';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/instruction/pack-compressed-accounts.ts
import { AccountMeta, PublicKey } from '@solana/web3.js'; import { CompressedAccount, OutputCompressedAccountWithPackedContext, PackedCompressedAccountWithMerkleContext, } from '../state'; import { CompressedAccountWithMerkleContext } from '../state/compressed-account'; import { toArray } from '../utils/conversion'; import { defaultTestStateTreeAccounts } from '../constants'; /** * @internal Finds the index of a PublicKey in an array, or adds it if not * present * */ export function getIndexOrAdd( accountsArray: PublicKey[], key: PublicKey, ): number { const index = accountsArray.findIndex(existingKey => existingKey.equals(key), ); if (index === -1) { accountsArray.push(key); return accountsArray.length - 1; } return index; } /** @internal */ export function padOutputStateMerkleTrees( outputStateMerkleTrees: PublicKey[] | PublicKey | undefined, numberOfOutputCompressedAccounts: number, inputCompressedAccountsWithMerkleContext: CompressedAccountWithMerkleContext[], ): PublicKey[] { if (numberOfOutputCompressedAccounts <= 0) { return []; } /// Default: use the 0th state tree of input state for all output accounts if (outputStateMerkleTrees === undefined) { if (inputCompressedAccountsWithMerkleContext.length === 0) { return new Array(numberOfOutputCompressedAccounts).fill( defaultTestStateTreeAccounts().merkleTree, ); } return new Array(numberOfOutputCompressedAccounts).fill( inputCompressedAccountsWithMerkleContext[0].merkleTree, ); /// Align the number of output state trees with the number of output /// accounts, and fill up with 0th output state tree } else { /// Into array const treesArray = toArray(outputStateMerkleTrees); if (treesArray.length >= numberOfOutputCompressedAccounts) { return treesArray.slice(0, numberOfOutputCompressedAccounts); } else { return treesArray.concat( new Array( numberOfOutputCompressedAccounts - treesArray.length, ).fill(treesArray[0]), ); } } } export function toAccountMetas(remainingAccounts: PublicKey[]): AccountMeta[] { return remainingAccounts.map( (account): AccountMeta => ({ pubkey: account, isWritable: true, isSigner: false, }), ); } // TODO: include owner and lamports in packing. /** * Packs Compressed Accounts. * * Replaces PublicKey with index pointer to remaining accounts. * * @param inputCompressedAccounts Ix input state to be consumed * @param inputStateRootIndices The recent state root indices of the * input state. The expiry is tied to * the proof. * @param outputCompressedAccounts Ix output state to be created * @param outputStateMerkleTrees Optional output state trees to be * inserted into the output state. * Defaults to the 0th state tree of * the input state. Gets padded to the * length of outputCompressedAccounts. * * @param remainingAccounts Optional existing array of accounts * to append to. **/ export function packCompressedAccounts( inputCompressedAccounts: CompressedAccountWithMerkleContext[], inputStateRootIndices: number[], outputCompressedAccounts: CompressedAccount[], outputStateMerkleTrees?: PublicKey[] | PublicKey, remainingAccounts: PublicKey[] = [], ): { packedInputCompressedAccounts: PackedCompressedAccountWithMerkleContext[]; packedOutputCompressedAccounts: OutputCompressedAccountWithPackedContext[]; remainingAccounts: PublicKey[]; } { const _remainingAccounts = remainingAccounts.slice(); const packedInputCompressedAccounts: PackedCompressedAccountWithMerkleContext[] = []; const packedOutputCompressedAccounts: OutputCompressedAccountWithPackedContext[] = []; /// input inputCompressedAccounts.forEach((account, index) => { const merkleTreePubkeyIndex = getIndexOrAdd( _remainingAccounts, account.merkleTree, ); const nullifierQueuePubkeyIndex = getIndexOrAdd( _remainingAccounts, account.nullifierQueue, ); packedInputCompressedAccounts.push({ compressedAccount: { owner: account.owner, lamports: account.lamports, address: account.address, data: account.data, }, merkleContext: { merkleTreePubkeyIndex, nullifierQueuePubkeyIndex, leafIndex: account.leafIndex, queueIndex: null, }, rootIndex: inputStateRootIndices[index], readOnly: false, }); }); /// output const paddedOutputStateMerkleTrees = padOutputStateMerkleTrees( outputStateMerkleTrees, outputCompressedAccounts.length, inputCompressedAccounts, ); outputCompressedAccounts.forEach((account, index) => { const merkleTreePubkeyIndex = getIndexOrAdd( _remainingAccounts, paddedOutputStateMerkleTrees[index], ); packedOutputCompressedAccounts.push({ compressedAccount: { owner: account.owner, lamports: account.lamports, address: account.address, data: account.data, }, merkleTreeIndex: merkleTreePubkeyIndex, }); }); return { packedInputCompressedAccounts, packedOutputCompressedAccounts, remainingAccounts: _remainingAccounts, }; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/index.ts
export * from './merkle-tree'; export * from './test-rpc';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/merkle-tree/merkle-tree.ts
import { LightWasm } from '../test-rpc/test-rpc'; export const DEFAULT_ZERO = '0'; /** * @callback hashFunction * @param left Left leaf * @param right Right leaf */ /** * Merkle tree */ export class MerkleTree { /** * Constructor * @param {number} levels Number of levels in the tree * @param {Array} [elements] Initial elements * @param {Object} options * @param {hashFunction} [options.hashFunction] Function used to hash 2 leaves * @param [options.zeroElement] Value for non-existent leaves */ levels: number; capacity: number; zeroElement; _zeros: string[]; _layers: string[][]; _lightWasm: LightWasm; constructor( levels: number, lightWasm: LightWasm, elements: string[] = [], { zeroElement = DEFAULT_ZERO } = {}, ) { this.levels = levels; this.capacity = 2 ** levels; this.zeroElement = zeroElement; this._lightWasm = lightWasm; if (elements.length > this.capacity) { throw new Error('Tree is full'); } this._zeros = []; this._layers = []; this._layers[0] = elements; this._zeros[0] = this.zeroElement; for (let i = 1; i <= levels; i++) { this._zeros[i] = this._lightWasm.poseidonHashString([ this._zeros[i - 1], this._zeros[i - 1], ]); } this._rebuild(); } _rebuild() { for (let level = 1; level <= this.levels; level++) { this._layers[level] = []; for ( let i = 0; i < Math.ceil(this._layers[level - 1].length / 2); i++ ) { this._layers[level][i] = this._lightWasm.poseidonHashString([ this._layers[level - 1][i * 2], i * 2 + 1 < this._layers[level - 1].length ? this._layers[level - 1][i * 2 + 1] : this._zeros[level - 1], ]); } } } /** * Get tree root * @returns {*} */ root() { return this._layers[this.levels].length > 0 ? this._layers[this.levels][0] : this._zeros[this.levels]; } /** * Insert new element into the tree * @param element Element to insert */ insert(element: string) { if (this._layers[0].length >= this.capacity) { throw new Error('Tree is full'); } this.update(this._layers[0].length, element); } /** * Insert multiple elements into the tree. Tree will be fully rebuilt during this operation. * @param {Array} elements Elements to insert */ bulkInsert(elements: string[]) { if (this._layers[0].length + elements.length > this.capacity) { throw new Error('Tree is full'); } this._layers[0].push(...elements); this._rebuild(); } // TODO: update does not work debug /** * Change an element in the tree * @param {number} index Index of element to change * @param element Updated element value */ update(index: number, element: string) { // index 0 and 1 and element is the commitment hash if ( isNaN(Number(index)) || index < 0 || index > this._layers[0].length || index >= this.capacity ) { throw new Error('Insert index out of bounds: ' + index); } this._layers[0][index] = element; for (let level = 1; level <= this.levels; level++) { index >>= 1; this._layers[level][index] = this._lightWasm.poseidonHashString([ this._layers[level - 1][index * 2], index * 2 + 1 < this._layers[level - 1].length ? this._layers[level - 1][index * 2 + 1] : this._zeros[level - 1], ]); } } /** * Get merkle path to a leaf * @param {number} index Leaf index to generate path for * @returns {{pathElements: number[], pathIndex: number[]}} An object containing adjacent elements and left-right index */ path(index: number) { if ( isNaN(Number(index)) || index < 0 || index >= this._layers[0].length ) { throw new Error('Index out of bounds: ' + index); } const pathElements: string[] = []; const pathIndices: number[] = []; for (let level = 0; level < this.levels; level++) { pathIndices[level] = index % 2; pathElements[level] = (index ^ 1) < this._layers[level].length ? this._layers[level][index ^ 1] : this._zeros[level]; index >>= 1; } return { pathElements, pathIndices, }; } /** * Find an element in the tree * @param element An element to find * @param comparator A function that checks leaf value equality * @returns {number} Index if element is found, otherwise -1 */ indexOf( element: string, comparator: ((element: string, el: string) => boolean) | null = null, ) { if (comparator) { return this._layers[0].findIndex((el: string) => comparator(element, el), ); } else { return this._layers[0].indexOf(element); } } /** * Returns a copy of non-zero tree elements * @returns {Object[]} */ elements() { return this._layers[0].slice(); } /** * Serialize entire tree state including intermediate layers into a plain object * Deserializing it back will not require to recompute any hashes * Elements are not converted to a plain type, this is responsibility of the caller */ serialize() { return { levels: this.levels, _zeros: this._zeros, _layers: this._layers, }; } /** * Deserialize data into a MerkleTree instance * Make sure to provide the same hashFunction as was used in the source tree, * otherwise the tree state will be invalid * * @param data * @param hashFunction * @returns {MerkleTree} */ static deserialize( data: any, hashFunction: (left: string, right: string) => string, ) { const instance = Object.assign(Object.create(this.prototype), data); instance._hash = hashFunction; instance.capacity = 2 ** instance.levels; instance.zeroElement = instance._zeros[0]; return instance; } }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/merkle-tree/indexed-array.ts
import { LightWasm } from '../test-rpc/test-rpc'; import { BN } from '@coral-xyz/anchor'; import { bn } from '../../state'; import { HIGHEST_ADDRESS_PLUS_ONE } from '../../constants'; export class IndexedElement { public index: number; public value: BN; public nextIndex: number; constructor(index: number, value: BN, nextIndex: number) { this.index = index; this.value = value; this.nextIndex = nextIndex; } public equals(other: IndexedElement): boolean { return this.value.eq(other.value); } public compareTo(other: IndexedElement): number { return this.value.cmp(other.value); } public hash(lightWasm: LightWasm, nextValue: BN): Uint8Array { try { const hash = lightWasm.poseidonHash([ bn(this.value.toArray('be', 32)).toString(), bn(this.nextIndex).toString(), bn(nextValue.toArray('be', 32)).toString(), ]); return hash; } catch (error) { throw new Error('Hashing failed'); } } } export class IndexedElementBundle { public newLowElement: IndexedElement; public newElement: IndexedElement; public newElementNextValue: BN; constructor( newLowElement: IndexedElement, newElement: IndexedElement, newElementNextValue: BN, ) { this.newLowElement = newLowElement; this.newElement = newElement; this.newElementNextValue = newElementNextValue; } } /** * This indexed array implementation mirrors the rust implementation of the * indexed merkle tree. It stores the elements of the indexed merkle tree. */ export class IndexedArray { public elements: Array<IndexedElement>; public currentNodeIndex: number; public highestElementIndex: number; constructor( elements: Array<IndexedElement>, currentNodeIndex: number, highestElementIndex: number, ) { this.elements = elements; this.currentNodeIndex = currentNodeIndex; this.highestElementIndex = highestElementIndex; } public static default(): IndexedArray { return new IndexedArray([new IndexedElement(0, bn(0), 0)], 0, 0); } public get(index: number): IndexedElement | undefined { return this.elements[index]; } public length(): number { return Number(this.currentNodeIndex); } public isEmpty(): boolean { return this.currentNodeIndex === 0; } public findElement(value: BN): IndexedElement | undefined { return this.elements .slice(0, this.length() + 1) .find(node => node.value === value); } public init(): IndexedElementBundle { try { const init_value = HIGHEST_ADDRESS_PLUS_ONE; return this.append(init_value); } catch (error) { throw new Error(`Failed to initialize IndexedArray: ${error}`); } } /** * Finds the index of the low element for the given `value` which should not be part of the array. * Low element is the greatest element which still has a lower value than the provided one. * Low elements are used in non-membership proofs. */ public findLowElementIndex(value: BN): number | undefined { // Try to find element whose next element is higher than the provided value. for (let i = 0; i <= this.length(); i++) { const node = this.elements[i]; if ( this.elements[node.nextIndex].value.gt(value) && node.value.lt(value) ) { return i; } else if (node.value.eq(value)) { throw new Error('Element already exists in the array'); } } // If no such element was found, it means that our value is going to be the greatest in the array. // This means that the currently greatest element is going to be the low element of our value. return this.highestElementIndex; } /** * Returns the low element for the given value and the next value for that low element. * Low element is the greatest element which still has lower value than the provided one. * Low elements are used in non-membership proofs. */ public findLowElement( value: BN, ): [IndexedElement | undefined, BN | undefined] { const lowElementIndex = this.findLowElementIndex(value); if (lowElementIndex === undefined) return [undefined, undefined]; const lowElement = this.elements[lowElementIndex]; return [lowElement, this.elements[lowElement.nextIndex].value]; } // /** // * Returns the index of the low element for the given `value`, which should be the part of the array. // * Low element is the greatest element which still has lower value than the provided one. // * Low elements are used in non-membership proofs. // */ // public findLowElementIndexForExistingElement( // value: BN, // ): number | undefined { // for (let i = 0; i <= this.length(); i++) { // const node = this.elements[i]; // if (this.elements[node.nextIndex].value === value) { // return i; // } // } // return undefined; // } /** * Returns the hash of the given element. That hash consists of: * - The value of the given element. * - The `nextIndex` of the given element. * - The value of the element pointed by `nextIndex`. */ public hashElement( lightWasm: LightWasm, index: number, ): Uint8Array | undefined { const element = this.elements[index]; if (!element) return undefined; const nextElement = this.elements[element.nextIndex]; if (!nextElement) return undefined; const hash = lightWasm.poseidonHash([ bn(element.value.toArray('be', 32)).toString(), bn(element.nextIndex).toString(), bn(nextElement.value.toArray('be', 32)).toString(), ]); return hash; } /** * Appends a new element with the given value to the indexed array. * It finds the low element index and uses it to append the new element correctly. * @param value The value of the new element to append. * @returns The new element and its low element after insertion. */ public append(value: BN): IndexedElementBundle { const lowElementIndex = this.findLowElementIndex(value); if (lowElementIndex === undefined) { throw new Error('Low element index not found.'); } return this.appendWithLowElementIndex(lowElementIndex, value); } /** * Appends a new element with the given value to the indexed array using a specific low element index. * This method ensures the new element is placed correctly relative to the low element. * @param lowElementIndex The index of the low element. * @param value The value of the new element to append. * @returns The new element and its updated low element. */ public appendWithLowElementIndex( lowElementIndex: number, value: BN, ): IndexedElementBundle { const lowElement = this.elements[lowElementIndex]; if (lowElement.nextIndex === 0) { if (value.lte(lowElement.value)) { throw new Error( 'New element value must be greater than the low element value.', ); } } else { const nextElement = this.elements[lowElement.nextIndex]; if (value.lte(lowElement.value)) { throw new Error( 'New element value must be greater than the low element value.', ); } if (value.gte(nextElement.value)) { throw new Error( 'New element value must be less than the next element value.', ); } } const newElementBundle = this.newElementWithLowElementIndex( lowElementIndex, value, ); // If the old low element wasn't pointing to any element, it means that: // // * It used to be the highest element. // * Our new element, which we are appending, is going the be the // highest element. // // Therefore, we need to save the new element index as the highest // index. if (lowElement.nextIndex === 0) { this.highestElementIndex = newElementBundle.newElement.index; } // Insert new node. this.currentNodeIndex = newElementBundle.newElement.index; this.elements[this.length()] = newElementBundle.newElement; // Update low element. this.elements[lowElementIndex] = newElementBundle.newLowElement; return newElementBundle; } /** * Finds the lowest element in the array. * @returns The lowest element or undefined if the array is empty. */ public lowest(): IndexedElement | undefined { return this.elements.length > 0 ? this.elements[0] : undefined; } /** * Creates a new element with the specified value and updates the low element index accordingly. * @param lowElementIndex The index of the low element. * @param value The value for the new element. * @returns A bundle containing the new element, the updated low element, and the value of the next element. */ public newElementWithLowElementIndex( lowElementIndex: number, value: BN, ): IndexedElementBundle { const newLowElement = this.elements[lowElementIndex]; const newElementIndex = this.currentNodeIndex + 1; const newElement = new IndexedElement( newElementIndex, value, newLowElement.nextIndex, ); newLowElement.nextIndex = newElementIndex; const newElementNextValue = this.elements[newElement.nextIndex].value; return new IndexedElementBundle( newLowElement, newElement, newElementNextValue, ); } /** * Creates a new element with the specified value by first finding the appropriate low element index. * @param value The value for the new element. * @returns A bundle containing the new element, the updated low element, and the value of the next element. */ public newElement(value: BN): IndexedElementBundle { const lowElementIndex = this.findLowElementIndex(value); if (lowElementIndex === undefined) { throw new Error('Low element index not found.'); } return this.newElementWithLowElementIndex(lowElementIndex, value); } }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/merkle-tree/index.ts
export * from './indexed-array'; export * from './merkle-tree';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/test-rpc/get-compressed-accounts.ts
import { PublicKey } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { getParsedEvents } from './get-parsed-events'; import { defaultTestStateTreeAccounts } from '../../constants'; import { Rpc } from '../../rpc'; import { CompressedAccountWithMerkleContext, bn, MerkleContext, createCompressedAccountWithMerkleContext, } from '../../state'; export async function getCompressedAccountsByOwnerTest( rpc: Rpc, owner: PublicKey, ) { const unspentAccounts = await getCompressedAccountsForTest(rpc); const byOwner = unspentAccounts.filter(acc => acc.owner.equals(owner)); return byOwner; } export async function getCompressedAccountByHashTest( rpc: Rpc, hash: BN, ): Promise<CompressedAccountWithMerkleContext | undefined> { const unspentAccounts = await getCompressedAccountsForTest(rpc); return unspentAccounts.find(acc => bn(acc.hash).eq(hash)); } export async function getMultipleCompressedAccountsByHashTest( rpc: Rpc, hashes: BN[], ): Promise<CompressedAccountWithMerkleContext[]> { const unspentAccounts = await getCompressedAccountsForTest(rpc); return unspentAccounts .filter(acc => hashes.some(hash => bn(acc.hash).eq(hash))) .sort((a, b) => b.leafIndex - a.leafIndex); } /// Returns all unspent compressed accounts async function getCompressedAccountsForTest(rpc: Rpc) { const events = (await getParsedEvents(rpc)).reverse(); const allOutputAccounts: CompressedAccountWithMerkleContext[] = []; const allInputAccountHashes: BN[] = []; for (const event of events) { for ( let index = 0; index < event.outputCompressedAccounts.length; index++ ) { const account = event.outputCompressedAccounts[index]; const merkleContext: MerkleContext = { merkleTree: defaultTestStateTreeAccounts().merkleTree, nullifierQueue: defaultTestStateTreeAccounts().nullifierQueue, hash: event.outputCompressedAccountHashes[index], leafIndex: event.outputLeafIndices[index], }; const withCtx: CompressedAccountWithMerkleContext = createCompressedAccountWithMerkleContext( merkleContext, account.compressedAccount.owner, account.compressedAccount.lamports, account.compressedAccount.data ?? undefined, account.compressedAccount.address ?? undefined, ); allOutputAccounts.push(withCtx); } for ( let index = 0; index < event.inputCompressedAccountHashes.length; index++ ) { const hash = event.inputCompressedAccountHashes[index]; allInputAccountHashes.push(bn(hash)); } } const unspentAccounts = allOutputAccounts.filter( account => !allInputAccountHashes.some(hash => hash.eq(bn(account.hash))), ); unspentAccounts.sort((a, b) => b.leafIndex - a.leafIndex); return unspentAccounts; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/test-rpc/get-compressed-token-accounts.ts
import { PublicKey } from '@solana/web3.js'; import { getParsedEvents } from './get-parsed-events'; import { BN, BorshCoder } from '@coral-xyz/anchor'; import { IDL } from '../../idls/light_compressed_token'; import { defaultTestStateTreeAccounts } from '../../constants'; import { Rpc } from '../../rpc'; import { ParsedTokenAccount, WithCursor } from '../../rpc-interface'; import { CompressedAccount, PublicTransactionEvent, MerkleContext, createCompressedAccountWithMerkleContext, bn, } from '../../state'; const tokenProgramId: PublicKey = new PublicKey( // TODO: can add check to ensure its consistent with the idl 'cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m', ); type TokenData = { mint: PublicKey; owner: PublicKey; amount: BN; delegate: PublicKey | null; state: number; tlv: Buffer | null; }; export type EventWithParsedTokenTlvData = { inputCompressedAccountHashes: number[][]; outputCompressedAccounts: ParsedTokenAccount[]; }; /** * Manually parse the compressed token layout for a given compressed account. * @param compressedAccount - The compressed account * @returns The parsed token data */ export function parseTokenLayoutWithIdl( compressedAccount: CompressedAccount, programId: PublicKey = tokenProgramId, ): TokenData | null { if (compressedAccount.data === null) return null; const { data } = compressedAccount.data; if (data.length === 0) return null; if (compressedAccount.owner.toBase58() !== programId.toBase58()) { throw new Error( `Invalid owner ${compressedAccount.owner.toBase58()} for token layout`, ); } const decodedLayout = new BorshCoder(IDL).types.decode( 'TokenData', Buffer.from(data), ); return decodedLayout; } /** * parse compressed accounts of an event with token layout * @internal * TODO: refactor */ async function parseEventWithTokenTlvData( event: PublicTransactionEvent, ): Promise<EventWithParsedTokenTlvData> { const pubkeyArray = event.pubkeyArray; const outputHashes = event.outputCompressedAccountHashes; const outputCompressedAccountsWithParsedTokenData: ParsedTokenAccount[] = event.outputCompressedAccounts.map((compressedAccount, i) => { const merkleContext: MerkleContext = { merkleTree: pubkeyArray[ event.outputCompressedAccounts[i].merkleTreeIndex ], nullifierQueue: // FIXME: fix make dynamic defaultTestStateTreeAccounts().nullifierQueue, hash: outputHashes[i], leafIndex: event.outputLeafIndices[i], }; if (!compressedAccount.compressedAccount.data) throw new Error('No data'); const parsedData = parseTokenLayoutWithIdl( compressedAccount.compressedAccount, ); if (!parsedData) throw new Error('Invalid token data'); const withMerkleContext = createCompressedAccountWithMerkleContext( merkleContext, compressedAccount.compressedAccount.owner, compressedAccount.compressedAccount.lamports, compressedAccount.compressedAccount.data, compressedAccount.compressedAccount.address ?? undefined, ); return { compressedAccount: withMerkleContext, parsed: parsedData, }; }); return { inputCompressedAccountHashes: event.inputCompressedAccountHashes, outputCompressedAccounts: outputCompressedAccountsWithParsedTokenData, }; } /** * Retrieves all compressed token accounts for a given mint and owner. * * Note: This function is intended for testing purposes only. For production, use rpc.getCompressedTokenAccounts. * * @param events Public transaction events * @param owner PublicKey of the token owner * @param mint PublicKey of the token mint */ export async function getCompressedTokenAccounts( events: PublicTransactionEvent[], ): Promise<ParsedTokenAccount[]> { const eventsWithParsedTokenTlvData: EventWithParsedTokenTlvData[] = await Promise.all( events.map(event => parseEventWithTokenTlvData(event)), ); /// strip spent compressed accounts if an output compressed account of tx n is /// an input compressed account of tx n+m, it is spent const allOutCompressedAccounts = eventsWithParsedTokenTlvData.flatMap( event => event.outputCompressedAccounts, ); const allInCompressedAccountHashes = eventsWithParsedTokenTlvData.flatMap( event => event.inputCompressedAccountHashes, ); const unspentCompressedAccounts = allOutCompressedAccounts.filter( outputCompressedAccount => !allInCompressedAccountHashes.some(hash => { return ( JSON.stringify(hash) === JSON.stringify( outputCompressedAccount.compressedAccount.hash, ) ); }), ); return unspentCompressedAccounts; } /** @internal */ export async function getCompressedTokenAccountsByOwnerTest( rpc: Rpc, owner: PublicKey, mint: PublicKey, ): Promise<WithCursor<ParsedTokenAccount[]>> { const events = await getParsedEvents(rpc); const compressedTokenAccounts = await getCompressedTokenAccounts(events); const accounts = compressedTokenAccounts.filter( acc => acc.parsed.owner.equals(owner) && acc.parsed.mint.equals(mint), ); return { items: accounts.sort( (a, b) => b.compressedAccount.leafIndex - a.compressedAccount.leafIndex, ), cursor: null, }; } export async function getCompressedTokenAccountsByDelegateTest( rpc: Rpc, delegate: PublicKey, mint: PublicKey, ): Promise<WithCursor<ParsedTokenAccount[]>> { const events = await getParsedEvents(rpc); const compressedTokenAccounts = await getCompressedTokenAccounts(events); return { items: compressedTokenAccounts.filter( acc => acc.parsed.delegate?.equals(delegate) && acc.parsed.mint.equals(mint), ), cursor: null, }; } export async function getCompressedTokenAccountByHashTest( rpc: Rpc, hash: BN, ): Promise<ParsedTokenAccount> { const events = await getParsedEvents(rpc); const compressedTokenAccounts = await getCompressedTokenAccounts(events); const filtered = compressedTokenAccounts.filter(acc => bn(acc.compressedAccount.hash).eq(hash), ); if (filtered.length === 0) { throw new Error('No compressed account found'); } return filtered[0]; }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/test-rpc/test-rpc.ts
import { Connection, ConnectionConfig, PublicKey } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { getCompressedAccountByHashTest, getCompressedAccountsByOwnerTest, getMultipleCompressedAccountsByHashTest, } from './get-compressed-accounts'; import { getCompressedTokenAccountByHashTest, getCompressedTokenAccountsByDelegateTest, getCompressedTokenAccountsByOwnerTest, } from './get-compressed-token-accounts'; import { MerkleTree } from '../merkle-tree/merkle-tree'; import { getParsedEvents } from './get-parsed-events'; import { defaultTestStateTreeAccounts } from '../../constants'; import { AddressWithTree, CompressedMintTokenHolders, CompressedTransaction, GetCompressedAccountsByOwnerConfig, PaginatedOptions, HashWithTree, LatestNonVotingSignatures, LatestNonVotingSignaturesPaginated, SignatureWithMetadata, WithContext, WithCursor, } from '../../rpc-interface'; import { CompressedProofWithContext, CompressionApiInterface, GetCompressedTokenAccountsByOwnerOrDelegateOptions, ParsedTokenAccount, TokenBalance, } from '../../rpc-interface'; import { BN254, CompressedAccountWithMerkleContext, MerkleContextWithMerkleProof, PublicTransactionEvent, bn, } from '../../state'; import { IndexedArray } from '../merkle-tree'; import { MerkleContextWithNewAddressProof, convertMerkleProofsWithContextToHex, convertNonInclusionMerkleProofInputsToHex, proverRequest, } from '../../rpc'; export interface TestRpcConfig { /** * Address of the state tree to index. Default: public default test state * tree. */ merkleTreeAddress?: PublicKey; /** * Nullifier queue associated with merkleTreeAddress */ nullifierQueueAddress?: PublicKey; /** * Depth of state tree. Defaults to the public default test state tree depth */ depth?: number; /** * Log proof generation time */ log?: boolean; /** * Address of the address tree to index. Default: public default test * address tree. */ addressTreeAddress?: PublicKey; /** * Address queue associated with addressTreeAddress */ addressQueueAddress?: PublicKey; } export interface LightWasm { blakeHash(input: string | Uint8Array, hashLength: number): Uint8Array; poseidonHash(input: string[] | BN[]): Uint8Array; poseidonHashString(input: string[] | BN[]): string; poseidonHashBN(input: string[] | BN[]): BN; } /** * Returns a mock RPC instance for use in unit tests. * * @param lightWasm Wasm hasher instance. * @param endpoint RPC endpoint URL. Defaults to * 'http://127.0.0.1:8899'. * @param proverEndpoint Prover server endpoint URL. Defaults to * 'http://localhost:3001'. * @param merkleTreeAddress Address of the merkle tree to index. Defaults * to the public default test state tree. * @param nullifierQueueAddress Optional address of the associated nullifier * queue. * @param depth Depth of the merkle tree. * @param log Log proof generation time. */ export async function getTestRpc( lightWasm: LightWasm, endpoint: string = 'http://127.0.0.1:8899', compressionApiEndpoint: string = 'http://127.0.0.1:8784', proverEndpoint: string = 'http://127.0.0.1:3001', merkleTreeAddress?: PublicKey, nullifierQueueAddress?: PublicKey, depth?: number, log = false, ) { const defaultAccounts = defaultTestStateTreeAccounts(); return new TestRpc( endpoint, lightWasm, compressionApiEndpoint, proverEndpoint, undefined, { merkleTreeAddress: merkleTreeAddress || defaultAccounts.merkleTree, nullifierQueueAddress: nullifierQueueAddress || defaultAccounts.nullifierQueue, depth: depth || defaultAccounts.merkleTreeHeight, log, }, ); } /** * Simple mock rpc for unit tests that simulates the compression rpc interface. * Fetches, parses events and builds merkletree on-demand, i.e. it does not persist state. * Constraints: * - Can only index 1 merkletree * - Can only index up to 1000 transactions * * For advanced testing use photon: https://github.com/helius-labs/photon */ export class TestRpc extends Connection implements CompressionApiInterface { compressionApiEndpoint: string; proverEndpoint: string; merkleTreeAddress: PublicKey; nullifierQueueAddress: PublicKey; addressTreeAddress: PublicKey; addressQueueAddress: PublicKey; lightWasm: LightWasm; depth: number; log = false; /** * Establish a Compression-compatible JSON RPC mock-connection * * @param endpoint endpoint to the solana cluster (use for * localnet only) * @param hasher light wasm hasher instance * @param compressionApiEndpoint Endpoint to the compression server. * @param proverEndpoint Endpoint to the prover server. defaults * to endpoint * @param connectionConfig Optional connection config * @param testRpcConfig Config for the mock rpc */ constructor( endpoint: string, hasher: LightWasm, compressionApiEndpoint: string, proverEndpoint: string, connectionConfig?: ConnectionConfig, testRpcConfig?: TestRpcConfig, ) { super(endpoint, connectionConfig || 'confirmed'); this.compressionApiEndpoint = compressionApiEndpoint; this.proverEndpoint = proverEndpoint; const { merkleTreeAddress, nullifierQueueAddress, depth, log, addressTreeAddress, addressQueueAddress, } = testRpcConfig ?? {}; const { merkleTree, nullifierQueue, merkleTreeHeight, addressQueue, addressTree, } = defaultTestStateTreeAccounts(); this.lightWasm = hasher; this.merkleTreeAddress = merkleTreeAddress ?? merkleTree; this.nullifierQueueAddress = nullifierQueueAddress ?? nullifierQueue; this.addressTreeAddress = addressTreeAddress ?? addressTree; this.addressQueueAddress = addressQueueAddress ?? addressQueue; this.depth = depth ?? merkleTreeHeight; this.log = log ?? false; } /** * Fetch the compressed account for the specified account hash */ async getCompressedAccount( address?: BN254, hash?: BN254, ): Promise<CompressedAccountWithMerkleContext | null> { if (address) { throw new Error('address is not supported in test-rpc'); } if (!hash) { throw new Error('hash is required'); } const account = await getCompressedAccountByHashTest(this, hash); return account ?? null; } /** * Fetch the compressed balance for the specified account hash */ async getCompressedBalance(address?: BN254, hash?: BN254): Promise<BN> { if (address) { throw new Error('address is not supported in test-rpc'); } if (!hash) { throw new Error('hash is required'); } const account = await getCompressedAccountByHashTest(this, hash); if (!account) { throw new Error('Account not found'); } return bn(account.lamports); } /** * Fetch the total compressed balance for the specified owner public key */ async getCompressedBalanceByOwner(owner: PublicKey): Promise<BN> { const accounts = await this.getCompressedAccountsByOwner(owner); return accounts.items.reduce( (acc, account) => acc.add(account.lamports), bn(0), ); } /** * Fetch the latest merkle proof for the specified account hash from the * cluster */ async getCompressedAccountProof( hash: BN254, ): Promise<MerkleContextWithMerkleProof> { const proofs = await this.getMultipleCompressedAccountProofs([hash]); return proofs[0]; } /** * Fetch all the account info for multiple compressed accounts specified by * an array of account hashes */ async getMultipleCompressedAccounts( hashes: BN254[], ): Promise<CompressedAccountWithMerkleContext[]> { return await getMultipleCompressedAccountsByHashTest(this, hashes); } /** * Ensure that the Compression Indexer has already indexed the transaction */ async confirmTransactionIndexed(_slot: number): Promise<boolean> { return true; } /** * Fetch the latest merkle proofs for multiple compressed accounts specified * by an array account hashes */ async getMultipleCompressedAccountProofs( hashes: BN254[], ): Promise<MerkleContextWithMerkleProof[]> { /// Build tree const events: PublicTransactionEvent[] = await getParsedEvents( this, ).then(events => events.reverse()); const allLeaves: number[][] = []; const allLeafIndices: number[] = []; for (const event of events) { for ( let index = 0; index < event.outputCompressedAccounts.length; index++ ) { const hash = event.outputCompressedAccountHashes[index]; allLeaves.push(hash); allLeafIndices.push(event.outputLeafIndices[index]); } } const tree = new MerkleTree( this.depth, this.lightWasm, allLeaves.map(leaf => bn(leaf).toString()), ); /// create merkle proofs and assemble return type const merkleProofs: MerkleContextWithMerkleProof[] = []; for (let i = 0; i < hashes.length; i++) { const leafIndex = tree.indexOf(hashes[i].toString()); const pathElements = tree.path(leafIndex).pathElements; const bnPathElements = pathElements.map(value => bn(value)); const root = bn(tree.root()); const merkleProof: MerkleContextWithMerkleProof = { hash: hashes[i].toArray('be', 32), merkleTree: this.merkleTreeAddress, leafIndex: leafIndex, merkleProof: bnPathElements, nullifierQueue: this.nullifierQueueAddress, rootIndex: allLeaves.length, root: root, }; merkleProofs.push(merkleProof); } /// Validate merkleProofs.forEach((proof, index) => { const leafIndex = proof.leafIndex; const computedHash = tree.elements()[leafIndex]; const hashArr = bn(computedHash).toArray('be', 32); if (!hashArr.every((val, index) => val === proof.hash[index])) { throw new Error( `Mismatch at index ${index}: expected ${proof.hash.toString()}, got ${hashArr.toString()}`, ); } }); return merkleProofs; } /** * Fetch all the compressed accounts owned by the specified public key. * Owner can be a program or user account */ async getCompressedAccountsByOwner( owner: PublicKey, _config?: GetCompressedAccountsByOwnerConfig, ): Promise<WithCursor<CompressedAccountWithMerkleContext[]>> { // TODO(swen): revisit // if (_config) { // throw new Error( // 'dataSlice or filters are not supported in test-rpc. Please use rpc.ts instead.', // ); // } const accounts = await getCompressedAccountsByOwnerTest(this, owner); return { items: accounts, cursor: null, }; } /** * Fetch the latest compression signatures on the cluster. Results are * paginated. */ async getLatestCompressionSignatures( _cursor?: string, _limit?: number, ): Promise<LatestNonVotingSignaturesPaginated> { throw new Error( 'getLatestNonVotingSignaturesWithContext not supported in test-rpc', ); } /** * Fetch the latest non-voting signatures on the cluster. Results are * not paginated. */ async getLatestNonVotingSignatures( _limit?: number, ): Promise<LatestNonVotingSignatures> { throw new Error( 'getLatestNonVotingSignaturesWithContext not supported in test-rpc', ); } /** * Fetch all the compressed token accounts owned by the specified public * key. Owner can be a program or user account */ async getCompressedTokenAccountsByOwner( owner: PublicKey, options: GetCompressedTokenAccountsByOwnerOrDelegateOptions, ): Promise<WithCursor<ParsedTokenAccount[]>> { return await getCompressedTokenAccountsByOwnerTest( this, owner, options!.mint!, ); } /** * Fetch all the compressed accounts delegated to the specified public key. */ async getCompressedTokenAccountsByDelegate( delegate: PublicKey, options: GetCompressedTokenAccountsByOwnerOrDelegateOptions, ): Promise<WithCursor<ParsedTokenAccount[]>> { return await getCompressedTokenAccountsByDelegateTest( this, delegate, options.mint!, ); } /** * Fetch the compressed token balance for the specified account hash */ async getCompressedTokenAccountBalance( hash: BN254, ): Promise<{ amount: BN }> { const account = await getCompressedTokenAccountByHashTest(this, hash); return { amount: bn(account.parsed.amount) }; } /** * @deprecated use {@link getCompressedTokenBalancesByOwnerV2}. * Fetch all the compressed token balances owned by the specified public * key. Can filter by mint. */ async getCompressedTokenBalancesByOwner( publicKey: PublicKey, options: GetCompressedTokenAccountsByOwnerOrDelegateOptions, ): Promise<WithCursor<{ balance: BN; mint: PublicKey }[]>> { const accounts = await getCompressedTokenAccountsByOwnerTest( this, publicKey, options.mint!, ); return { items: accounts.items.map(account => ({ balance: bn(account.parsed.amount), mint: account.parsed.mint, })), cursor: null, }; } /** * Fetch all the compressed token balances owned by the specified public * key. Can filter by mint. Uses context. */ async getCompressedTokenBalancesByOwnerV2( publicKey: PublicKey, options: GetCompressedTokenAccountsByOwnerOrDelegateOptions, ): Promise<WithContext<WithCursor<TokenBalance[]>>> { const accounts = await getCompressedTokenAccountsByOwnerTest( this, publicKey, options.mint!, ); return { context: { slot: 1 }, value: { items: accounts.items.map(account => ({ balance: bn(account.parsed.amount), mint: account.parsed.mint, })), cursor: null, }, }; } /** * Returns confirmed signatures for transactions involving the specified * account hash forward in time from genesis to the most recent confirmed * block * * @param hash queried account hash */ async getCompressionSignaturesForAccount( _hash: BN254, ): Promise<SignatureWithMetadata[]> { throw new Error( 'getCompressionSignaturesForAccount not implemented in test-rpc', ); } /** * Fetch a confirmed or finalized transaction from the cluster. Return with * CompressionInfo */ async getTransactionWithCompressionInfo( _signature: string, ): Promise<CompressedTransaction | null> { throw new Error('getCompressedTransaction not implemented in test-rpc'); } /** * Returns confirmed signatures for transactions involving the specified * address forward in time from genesis to the most recent confirmed * block * * @param address queried compressed account address */ async getCompressionSignaturesForAddress( _address: PublicKey, _options?: PaginatedOptions, ): Promise<WithCursor<SignatureWithMetadata[]>> { throw new Error('getSignaturesForAddress3 not implemented'); } /** * Returns confirmed signatures for compression transactions involving the * specified account owner forward in time from genesis to the * most recent confirmed block * * @param owner queried owner public key */ async getCompressionSignaturesForOwner( _owner: PublicKey, _options?: PaginatedOptions, ): Promise<WithCursor<SignatureWithMetadata[]>> { throw new Error('getSignaturesForOwner not implemented'); } /** * Returns confirmed signatures for compression transactions involving the * specified token account owner forward in time from genesis to the most * recent confirmed block */ async getCompressionSignaturesForTokenOwner( _owner: PublicKey, _options?: PaginatedOptions, ): Promise<WithCursor<SignatureWithMetadata[]>> { throw new Error('getSignaturesForTokenOwner not implemented'); } /** * Fetch the current indexer health status */ async getIndexerHealth(): Promise<string> { return 'ok'; } /** * Fetch the current slot that the node is processing */ async getIndexerSlot(): Promise<number> { return 1; } /** * Fetch the latest address proofs for new unique addresses specified by an * array of addresses. * * the proof states that said address have not yet been created in respective address tree. * @param addresses Array of BN254 new addresses * @returns Array of validity proofs for new addresses */ async getMultipleNewAddressProofs(addresses: BN254[]) { /// Build tree const indexedArray = IndexedArray.default(); const allAddresses: BN[] = []; indexedArray.init(); const hashes: BN[] = []; // TODO(crank): add support for cranked address tree in 'allAddresses'. // The Merkle tree root doesnt actually advance beyond init() unless we // start emptying the address queue. for (let i = 0; i < allAddresses.length; i++) { indexedArray.append(bn(allAddresses[i])); } for (let i = 0; i < indexedArray.elements.length; i++) { const hash = indexedArray.hashElement(this.lightWasm, i); hashes.push(bn(hash!)); } const tree = new MerkleTree( this.depth, this.lightWasm, hashes.map(hash => bn(hash).toString()), ); /// Creates proof for each address const newAddressProofs: MerkleContextWithNewAddressProof[] = []; for (let i = 0; i < addresses.length; i++) { const [lowElement] = indexedArray.findLowElement(addresses[i]); if (!lowElement) throw new Error('Address not found'); const leafIndex = lowElement.index; const pathElements: string[] = tree.path(leafIndex).pathElements; const bnPathElements = pathElements.map(value => bn(value)); const higherRangeValue = indexedArray.get( lowElement.nextIndex, )!.value; const root = bn(tree.root()); const proof: MerkleContextWithNewAddressProof = { root, rootIndex: 3, value: addresses[i], leafLowerRangeValue: lowElement.value, leafHigherRangeValue: higherRangeValue, nextIndex: bn(lowElement.nextIndex), merkleProofHashedIndexedElementLeaf: bnPathElements, indexHashedIndexedElementLeaf: bn(lowElement.index), merkleTree: this.addressTreeAddress, nullifierQueue: this.addressQueueAddress, }; newAddressProofs.push(proof); } return newAddressProofs; } async getCompressedMintTokenHolders( _mint: PublicKey, _options?: PaginatedOptions, ): Promise<WithContext<WithCursor<CompressedMintTokenHolders[]>>> { throw new Error( 'getCompressedMintTokenHolders not implemented in test-rpc', ); } /** * Advanced usage of getValidityProof: fetches ZKP directly from a custom * non-rpcprover. Note: This uses the proverEndpoint specified in the * constructor. For normal usage, please use {@link getValidityProof} * instead. * * Note: Use RPC class for forested trees. TestRpc is only for custom * testing purposes. */ async getValidityProofDirect( hashes: BN254[] = [], newAddresses: BN254[] = [], ): Promise<CompressedProofWithContext> { return this.getValidityProof(hashes, newAddresses); } /** * @deprecated This method is not available for TestRpc. Please use * {@link getValidityProof} instead. */ async getValidityProofAndRpcContext( hashes: HashWithTree[] = [], newAddresses: AddressWithTree[] = [], ): Promise<WithContext<CompressedProofWithContext>> { if (newAddresses.some(address => !(address instanceof BN))) { throw new Error('AddressWithTree is not supported in test-rpc'); } return { value: await this.getValidityProofV0(hashes, newAddresses), context: { slot: 1 }, }; } /** * Fetch the latest validity proof for (1) compressed accounts specified by * an array of account hashes. (2) new unique addresses specified by an * array of addresses. * * Validity proofs prove the presence of compressed accounts in state trees * and the non-existence of addresses in address trees, respectively. They * enable verification without recomputing the merkle proof path, thus * lowering verification and data costs. * * @param hashes Array of BN254 hashes. * @param newAddresses Array of BN254 new addresses. * @returns validity proof with context */ async getValidityProof( hashes: BN254[] = [], newAddresses: BN254[] = [], ): Promise<CompressedProofWithContext> { if (newAddresses.some(address => !(address instanceof BN))) { throw new Error('AddressWithTree is not supported in test-rpc'); } let validityProof: CompressedProofWithContext; if (hashes.length === 0 && newAddresses.length === 0) { throw new Error( 'Empty input. Provide hashes and/or new addresses.', ); } else if (hashes.length > 0 && newAddresses.length === 0) { /// inclusion const merkleProofsWithContext = await this.getMultipleCompressedAccountProofs(hashes); const inputs = convertMerkleProofsWithContextToHex( merkleProofsWithContext, ); const compressedProof = await proverRequest( this.proverEndpoint, 'inclusion', inputs, this.log, ); validityProof = { compressedProof, roots: merkleProofsWithContext.map(proof => proof.root), rootIndices: merkleProofsWithContext.map( proof => proof.rootIndex, ), leafIndices: merkleProofsWithContext.map( proof => proof.leafIndex, ), leaves: merkleProofsWithContext.map(proof => bn(proof.hash)), merkleTrees: merkleProofsWithContext.map( proof => proof.merkleTree, ), nullifierQueues: merkleProofsWithContext.map( proof => proof.nullifierQueue, ), }; } else if (hashes.length === 0 && newAddresses.length > 0) { /// new-address const newAddressProofs: MerkleContextWithNewAddressProof[] = await this.getMultipleNewAddressProofs(newAddresses); const inputs = convertNonInclusionMerkleProofInputsToHex(newAddressProofs); const compressedProof = await proverRequest( this.proverEndpoint, 'new-address', inputs, this.log, ); validityProof = { compressedProof, roots: newAddressProofs.map(proof => proof.root), // TODO(crank): make dynamic to enable forester support in // test-rpc.ts. Currently this is a static root because the // address tree doesn't advance. rootIndices: newAddressProofs.map(_ => 3), leafIndices: newAddressProofs.map( proof => proof.indexHashedIndexedElementLeaf.toNumber(), // TODO: support >32bit ), leaves: newAddressProofs.map(proof => bn(proof.value)), merkleTrees: newAddressProofs.map(proof => proof.merkleTree), nullifierQueues: newAddressProofs.map( proof => proof.nullifierQueue, ), }; } else if (hashes.length > 0 && newAddresses.length > 0) { /// combined const merkleProofsWithContext = await this.getMultipleCompressedAccountProofs(hashes); const inputs = convertMerkleProofsWithContextToHex( merkleProofsWithContext, ); const newAddressProofs: MerkleContextWithNewAddressProof[] = await this.getMultipleNewAddressProofs(newAddresses); const newAddressInputs = convertNonInclusionMerkleProofInputsToHex(newAddressProofs); const compressedProof = await proverRequest( this.proverEndpoint, 'combined', [inputs, newAddressInputs], this.log, ); validityProof = { compressedProof, roots: merkleProofsWithContext .map(proof => proof.root) .concat(newAddressProofs.map(proof => proof.root)), rootIndices: merkleProofsWithContext .map(proof => proof.rootIndex) // TODO(crank): make dynamic to enable forester support in // test-rpc.ts. Currently this is a static root because the // address tree doesn't advance. .concat(newAddressProofs.map(_ => 3)), leafIndices: merkleProofsWithContext .map(proof => proof.leafIndex) .concat( newAddressProofs.map( proof => proof.indexHashedIndexedElementLeaf.toNumber(), // TODO: support >32bit ), ), leaves: merkleProofsWithContext .map(proof => bn(proof.hash)) .concat(newAddressProofs.map(proof => bn(proof.value))), merkleTrees: merkleProofsWithContext .map(proof => proof.merkleTree) .concat(newAddressProofs.map(proof => proof.merkleTree)), nullifierQueues: merkleProofsWithContext .map(proof => proof.nullifierQueue) .concat( newAddressProofs.map(proof => proof.nullifierQueue), ), }; } else throw new Error('Invalid input'); return validityProof; } async getValidityProofV0( hashes: HashWithTree[] = [], newAddresses: AddressWithTree[] = [], ): Promise<CompressedProofWithContext> { /// TODO(swen): add support for custom trees return this.getValidityProof( hashes.map(hash => hash.hash), newAddresses.map(address => address.address), ); } }
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/test-rpc/index.ts
export * from './test-rpc'; export * from './get-parsed-events'; export * from './get-compressed-token-accounts';
0
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers
solana_public_repos/Lightprotocol/light-protocol/js/stateless.js/src/test-helpers/test-rpc/get-parsed-events.ts
import { ParsedMessageAccount, ParsedTransactionWithMeta, } from '@solana/web3.js'; import { bs58 } from '@coral-xyz/anchor/dist/cjs/utils/bytes'; import { defaultStaticAccountsStruct } from '../../constants'; import { LightSystemProgram } from '../../programs'; import { Rpc } from '../../rpc'; import { PublicTransactionEvent } from '../../state'; type Deserializer<T> = (data: Buffer, tx: ParsedTransactionWithMeta) => T; /** * @internal * Returns newest first. * * */ export async function getParsedEvents( rpc: Rpc, ): Promise<PublicTransactionEvent[]> { const { noopProgram, accountCompressionProgram } = defaultStaticAccountsStruct(); /// Get raw transactions const signatures = ( await rpc.getConfirmedSignaturesForAddress2( accountCompressionProgram, undefined, 'confirmed', ) ).map(s => s.signature); const txs = await rpc.getParsedTransactions(signatures, { maxSupportedTransactionVersion: 0, commitment: 'confirmed', }); /// Filter by NOOP program const transactionEvents = txs.filter( (tx: ParsedTransactionWithMeta | null) => { if (!tx) { return false; } const accountKeys = tx.transaction.message.accountKeys; const hasSplNoopAddress = accountKeys.some( (item: ParsedMessageAccount) => { const itemStr = typeof item === 'string' ? item : item.pubkey.toBase58(); return itemStr === noopProgram.toBase58(); }, ); return hasSplNoopAddress; }, ); /// Parse events const parsedEvents = parseEvents( transactionEvents, parsePublicTransactionEventWithIdl, ); return parsedEvents; } export const parseEvents = <T>( indexerEventsTransactions: (ParsedTransactionWithMeta | null)[], deserializeFn: Deserializer<T>, ): NonNullable<T>[] => { const { noopProgram } = defaultStaticAccountsStruct(); const transactions: NonNullable<T>[] = []; indexerEventsTransactions.forEach(tx => { if ( !tx || !tx.meta || tx.meta.err || !tx.meta.innerInstructions || tx.meta.innerInstructions.length <= 0 ) { return; } /// We only care about the very last inner instruction as it contains the /// PublicTransactionEvent tx.meta.innerInstructions.forEach(ix => { if (ix.instructions.length > 0) { const ixInner = ix.instructions[ix.instructions.length - 1]; // Type guard for partially parsed web3js types. if ( 'data' in ixInner && ixInner.data && ixInner.programId.toBase58() === noopProgram.toBase58() ) { const data = bs58.decode(ixInner.data); const decodedEvent = deserializeFn(Buffer.from(data), tx); if (decodedEvent !== null && decodedEvent !== undefined) { transactions.push(decodedEvent as NonNullable<T>); } } } }); }); return transactions; }; // TODO: make it type safe. have to reimplement the types from the IDL. export const parsePublicTransactionEventWithIdl = ( data: Buffer, ): PublicTransactionEvent | null => { const numericData = Buffer.from(data.map(byte => byte)); try { return LightSystemProgram.program.coder.types.decode( 'PublicTransactionEvent', numericData, ); } catch (error) { console.error('Error deserializing event:', error); return null; } };
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/README.md
<p align="center"> <img src="https://github.com/ldiego08/light-protocol/raw/main/assets/logo.svg" width="90" /> </p> <h1 align="center">@lightprotocol/compressed-token</h1> <p align="center"> <b>This is the JavaScript SDK for interacting with the Compressed Token program on Solana.</b> </p> <p align="center"> <a href="https://badge.fury.io/js/@lightprotocol%2Fcompressed-token"> <img src="https://badge.fury.io/js/@lightprotocol%2Fcompressed-token.svg" alt="package npm version" height="18" /> </a> <img src="https://img.shields.io/npm/l/%40lightprotocol%2Fcompressed-token" alt="package license" height="18"> <img src="https://img.shields.io/npm/dw/%40lightprotocol%2Fcompressed-token" alt="package weekly downloads" height="18" /> </p> ### Installation **For use in Node.js or web** ```bash npm install --save \ @lightprotocol/compressed-token \ @lightprotocol/stateless.js \ ``` ### Documentation and examples - [Latest Source code](https://github.com/lightprotocol/lightprotocol/tree/main/js/compressed-token) - [Creating and sending compressed tokens](https://www.zkcompression.com/developers/typescript-client#creating-minting-and-transferring-a-compressed-token) ### Getting help Have a question or a problem? Check out the [Light](https://discord.gg/CYvjBgzRFP) and [Helius](https://discord.gg/Uzzf6a7zKr) Developer Discord servers! When asking for help, please include: - A detailed description of what you're trying to achieve - Source code, if possible - The text of any errors you encountered, with stacktraces if available ### Contributing The ZK Compression system programs and clients are maintained by [Light](https://github.com/lightprotocol) as a part of the Light Protocol. The ZK Compression RPC API and its indexer implementation is maintained by [Helius Labs](https://github.com/helius-labs). Light and ZK Compression are open protocols and contributions are welcome. If you have a contribution, please open a PR on the respective repository. If you found a bug or would like to request a feature, please file an [issue](https://github.com/lightprotocol/lightprotocol/issues/new). If you found a security vulnerability, please follow the [security policy](https://github.com/Lightprotocol/light-protocol/blob/main/SECURITY.md). ### Disclaimer All claims, content, designs, algorithms, estimates, roadmaps, specifications, and performance measurements described in this project are done with Light Protocol Labs' ("Labs") best efforts. It is up to the reader to check and validate their accuracy and truthfulness. Furthermore nothing in this project constitutes a solicitation for investment. Any content produced by Labs or developer resources that Labs provides, are for educational and inspiration purposes only. Labs does not encourage, induce or sanction the deployment, integration or use of any such applications (including the code comprising the Light blockchain protocol) in violation of applicable laws or regulations and hereby prohibits any such deployment, integration or use. This includes use of any such applications by the reader (a) in violation of export control or sanctions laws of the United States or any other applicable jurisdiction, (b) if the reader is located in or ordinarily resident in a country or territory subject to comprehensive sanctions administered by the U.S. Office of Foreign Assets Control (OFAC), or (c) if the reader is or is working on behalf of a Specially Designated National (SDN) or a person subject to similar blocking or denied party prohibitions. The reader should be aware that U.S. export control and sanctions laws prohibit U.S. persons (and other persons that are subject to such laws) from transacting with persons in certain countries and territories or that are on the SDN list. As a project based primarily on open-source software, it is possible that such sanctioned persons may nevertheless bypass prohibitions, obtain the code comprising the Light blockchain protocol (or other project code or applications) and deploy, integrate, or otherwise use it. Accordingly, there is a risk to individuals that other persons using the Light blockchain protocol may be sanctioned persons and that transactions with such persons would be a violation of U.S. export controls and sanctions law. This risk applies to individuals, organizations, and other ecosystem participants that deploy, integrate, or use the Light blockchain protocol code directly (e.g., as a node operator), and individuals that transact on the Light blockchain protocol implementation through clients, other kinds of nodes, third party interfaces, and/or wallet software.
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/rollup.config.js
import typescript from '@rollup/plugin-typescript'; import nodePolyfills from 'rollup-plugin-polyfill-node'; import dts from 'rollup-plugin-dts'; import resolve from '@rollup/plugin-node-resolve'; import commonjs from '@rollup/plugin-commonjs'; import alias from '@rollup/plugin-alias'; import json from '@rollup/plugin-json'; const rolls = (fmt, env) => ({ input: 'src/index.ts', output: { dir: `dist/${fmt}/${env}`, format: fmt, entryFileNames: `[name].${fmt === 'cjs' ? 'cjs' : 'js'}`, sourcemap: true, }, external: [ '@solana/web3.js', '@coral-xyz/anchor', '@solana/spl-token', '@lightprotocol/stateless.js', 'tweetnacl', ], plugins: [ json(), typescript({ target: fmt === 'es' ? 'ES2022' : 'ES2017', outDir: `dist/${fmt}/${env}`, rootDir: 'src', }), commonjs(), resolve({ browser: env === 'browser', preferBuiltins: env === 'node', extensions: ['.mjs', '.js', '.json', '.ts'], mainFields: ['module', 'main', 'browser'], }), alias({ entries: [ { find: 'crypto', replacement: env === 'browser' ? 'crypto-browserify' : 'crypto', }, ], }), env === 'browser' ? nodePolyfills() : undefined, ].filter(Boolean), onwarn(warning, warn) { if (warning.code !== 'CIRCULAR_DEPENDENCY') { warn(warning); } }, }); const typesConfig = { input: 'src/index.ts', output: [{ file: 'dist/types/index.d.ts', format: 'es' }], plugins: [dts()], }; export default [ rolls('cjs', 'browser'), rolls('es', 'browser'), rolls('cjs', 'node'), typesConfig, ];
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tsconfig.test.json
{ "compilerOptions": { "esModuleInterop": true, "rootDirs": ["src", "tests"] }, "extends": "./tsconfig.json", "include": ["./tests/**/*.ts", "vitest.config.ts"] }
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/package.json
{ "name": "@lightprotocol/compressed-token", "version": "0.16.0", "description": "JS client to interact with the compressed-token program", "sideEffects": false, "main": "dist/cjs/node/index.cjs", "type": "module", "exports": { ".": { "require": "./dist/cjs/node/index.cjs", "types": "./dist/types/index.d.ts", "default": "./dist/cjs/node/index.cjs" }, "./browser": { "import": "./dist/es/browser/index.js", "require": "./dist/cjs/browser/index.cjs", "types": "./dist/types/index.d.ts" } }, "types": "./dist/types/index.d.ts", "files": [ "dist" ], "scripts": { "test": "pnpm test:e2e:all", "test-all": "vitest run", "test:unit:all": "EXCLUDE_E2E=true vitest run", "test-all:verbose": "vitest run --reporter=verbose", "test-validator": "./../../cli/test_bin/run test-validator --prover-run-mode rpc", "test:e2e:create-mint": "pnpm test-validator && vitest run tests/e2e/create-mint.test.ts", "test:e2e:create-token-pool": "pnpm test-validator && vitest run tests/e2e/create-token-pool.test.ts", "test:e2e:mint-to": "pnpm test-validator && vitest run tests/e2e/mint-to.test.ts --reporter=verbose", "test:e2e:approve-and-mint-to": "pnpm test-validator && vitest run tests/e2e/approve-and-mint-to.test.ts --reporter=verbose", "test:e2e:merge-token-accounts": "pnpm test-validator && vitest run tests/e2e/merge-token-accounts.test.ts --reporter=verbose", "test:e2e:transfer": "pnpm test-validator && vitest run tests/e2e/transfer.test.ts --reporter=verbose", "test:e2e:compress": "pnpm test-validator && vitest run tests/e2e/compress.test.ts --reporter=verbose", "test:e2e:compress-spl-token-account": "pnpm test-validator && vitest run tests/e2e/compress-spl-token-account.test.ts --reporter=verbose", "test:e2e:decompress": "pnpm test-validator && vitest run tests/e2e/decompress.test.ts --reporter=verbose", "test:e2e:rpc-token-interop": "pnpm test-validator && vitest run tests/e2e/rpc-token-interop.test.ts --reporter=verbose", "test:e2e:custom-program-id": "vitest run tests/e2e/custom-program-id.test.ts --reporter=verbose", "test:e2e:all": "pnpm test-validator && vitest run tests/e2e/create-mint.test.ts && vitest run tests/e2e/mint-to.test.ts && vitest run tests/e2e/transfer.test.ts && vitest run tests/e2e/compress.test.ts && vitest run tests/e2e/compress-spl-token-account.test.ts && vitest run tests/e2e/decompress.test.ts && vitest run tests/e2e/create-token-pool.test.ts && vitest run tests/e2e/approve-and-mint-to.test.ts && vitest run tests/e2e/rpc-token-interop.test.ts && vitest run tests/e2e/custom-program-id.test.ts", "pull-idl": "../../scripts/push-compressed-token-idl.sh", "build": "rimraf dist && pnpm pull-idl && pnpm build:bundle", "build:bundle": "rollup -c", "format": "prettier --write .", "lint": "eslint ." }, "keywords": [ "zk", "compression", "light", "stateless", "solana" ], "maintainers": [ { "name": "Light Protocol Maintainers", "email": "friends@lightprotocol.com" } ], "license": "Apache-2.0", "peerDependencies": { "@lightprotocol/stateless.js": "workspace:*" }, "dependencies": { "@coral-xyz/anchor": "0.29.0", "@solana/web3.js": "1.95.3", "@solana/spl-token": "0.4.8", "buffer": "6.0.3", "tweetnacl": "1.0.3" }, "devDependencies": { "@esbuild-plugins/node-globals-polyfill": "^0.2.3", "@lightprotocol/hasher.rs": "workspace:*", "@lightprotocol/programs": "workspace:*", "@rollup/plugin-alias": "^5.1.0", "@rollup/plugin-babel": "^6.0.4", "@rollup/plugin-commonjs": "^26.0.1", "@rollup/plugin-json": "^6.1.0", "@rollup/plugin-node-resolve": "^15.2.3", "@rollup/plugin-replace": "^5.0.7", "@rollup/plugin-terser": "^0.4.4", "@rollup/plugin-typescript": "^11.1.6", "@types/node": "^22.5.5", "@typescript-eslint/eslint-plugin": "^7.13.1", "@typescript-eslint/parser": "^7.13.1", "add": "^2.0.6", "crypto-browserify": "^3.12.0", "eslint": "^8.56.0", "eslint-plugin-import": "^2.30.0", "eslint-plugin-n": "^17.10.2", "eslint-plugin-promise": "^7.1.0", "eslint-plugin-vitest": "^0.5.4", "prettier": "^3.3.3", "rimraf": "^6.0.1", "rollup": "^4.21.3", "rollup-plugin-copy": "^3.5.0", "rollup-plugin-dts": "^6.1.1", "rollup-plugin-polyfill-node": "^0.13.0", "rollup-plugin-visualizer": "^5.12.0", "ts-node": "^10.9.2", "tslib": "^2.7.0", "typescript": "^5.6.2", "vitest": "^2.1.1" }, "nx": { "targets": { "build": { "inputs": [ "{workspaceRoot}/cli", "{workspaceRoot}/target/idl", "{workspaceRoot}/target/types" ] } } } }
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/.prettierrc
{ "semi": true, "trailingComma": "all", "singleQuote": true, "printWidth": 80, "useTabs": false, "tabWidth": 4, "bracketSpacing": true, "arrowParens": "avoid" }
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tsconfig.json
{ "$schema": "https://json.schemastore.org/tsconfig", "compilerOptions": { "importHelpers": true, "outDir": "./dist", "esModuleInterop": true, "allowSyntheticDefaultImports": true, "strict": true, "declaration": false, "target": "ESNext", "module": "ESNext", "moduleResolution": "Node", "lib": ["ESNext", "DOM"], "types": ["node"], "skipLibCheck": false }, "include": ["./src/**/*.ts", "rollup.config.js"] }
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/.eslintignore
node_modules lib dist
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/vitest.config.ts
import { defineConfig } from 'vitest/config'; import { resolve } from 'path'; export default defineConfig({ logLevel: 'info', test: { include: process.env.EXCLUDE_E2E ? [] : ['src/**/__tests__/*.test.ts', 'tests/**/*.test.ts'], includeSource: ['src/**/*.{js,ts}'], exclude: ['src/program.ts'], testTimeout: 350000, hookTimeout: 30000, }, define: { 'import.meta.vitest': false, }, build: { lib: { formats: ['es', 'cjs'], entry: resolve(__dirname, 'src/index.ts'), fileName: 'index', }, }, });
0
solana_public_repos/Lightprotocol/light-protocol/js
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/.eslintrc.json
{ "root": true, "ignorePatterns": ["node_modules", "lib"], "parser": "@typescript-eslint/parser", "parserOptions": { "project": ["./tsconfig.json", "./tsconfig.test.json"] }, "plugins": ["@typescript-eslint"], "extends": [ "eslint:recommended", "plugin:@typescript-eslint/eslint-recommended", "plugin:@typescript-eslint/recommended" ], "rules": { "@typescript-eslint/ban-ts-comment": 0, "@typescript-eslint/no-explicit-any": 0, "@typescript-eslint/no-var-requires": 0, "@typescript-eslint/no-unused-vars": 0, "no-prototype-builtins": 0 } }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/approve-and-mint-to.test.ts
import { describe, it, expect, beforeAll, assert } from 'vitest'; import { PublicKey, Signer, Keypair, SystemProgram } from '@solana/web3.js'; import { MINT_SIZE, TOKEN_2022_PROGRAM_ID, TOKEN_PROGRAM_ID, createInitializeMint2Instruction, } from '@solana/spl-token'; import { approveAndMintTo, createTokenPool } from '../../src/actions'; import { Rpc, bn, buildAndSignTx, dedupeSigner, newAccountWithLamports, sendAndConfirmTx, getTestRpc, } from '@lightprotocol/stateless.js'; import { WasmFactory } from '@lightprotocol/hasher.rs'; import { BN } from '@coral-xyz/anchor'; async function createTestSplMint( rpc: Rpc, payer: Signer, mintKeypair: Signer, mintAuthority: Keypair, isToken2022 = false, ) { const programId = isToken2022 ? TOKEN_2022_PROGRAM_ID : TOKEN_PROGRAM_ID; const rentExemptBalance = await rpc.getMinimumBalanceForRentExemption(MINT_SIZE); const createMintAccountInstruction = SystemProgram.createAccount({ fromPubkey: payer.publicKey, lamports: rentExemptBalance, newAccountPubkey: mintKeypair.publicKey, programId, space: MINT_SIZE, }); const initializeMintInstruction = createInitializeMint2Instruction( mintKeypair.publicKey, TEST_TOKEN_DECIMALS, mintAuthority.publicKey, null, programId, ); const { blockhash } = await rpc.getLatestBlockhash(); const tx = buildAndSignTx( [createMintAccountInstruction, initializeMintInstruction], payer, blockhash, dedupeSigner(payer, [mintKeypair]), ); await sendAndConfirmTx(rpc, tx); } const TEST_TOKEN_DECIMALS = 2; describe('approveAndMintTo', () => { let rpc: Rpc; let payer: Signer; let bob: PublicKey; let mintKeypair: Keypair; let mint: PublicKey; let mintAuthority: Keypair; beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc); bob = Keypair.generate().publicKey; mintAuthority = Keypair.generate(); mintKeypair = Keypair.generate(); mint = mintKeypair.publicKey; /// Create external SPL mint await createTestSplMint(rpc, payer, mintKeypair, mintAuthority); /// Register mint await createTokenPool(rpc, payer, mint); }); it('should mintTo compressed account with external spl mint', async () => { assert(mint.equals(mintKeypair.publicKey)); await approveAndMintTo( rpc, payer, mint, bob, mintAuthority, 1000000000, ); await assertApproveAndMintTo(rpc, mint, bn(1000000000), bob); }); it('should mintTo compressed account with external token 2022 mint', async () => { const payer = await newAccountWithLamports(rpc); const bob = Keypair.generate().publicKey; const token22MintAuthority = Keypair.generate(); const token22MintKeypair = Keypair.generate(); const token22Mint = token22MintKeypair.publicKey; /// Create external SPL mint await createTestSplMint( rpc, payer, token22MintKeypair, token22MintAuthority, true, ); const mintAccountInfo = await rpc.getAccountInfo(token22Mint); assert(mintAccountInfo!.owner.equals(TOKEN_2022_PROGRAM_ID)); /// Register mint await createTokenPool(rpc, payer, token22Mint); assert(token22Mint.equals(token22MintKeypair.publicKey)); await approveAndMintTo( rpc, payer, token22Mint, bob, token22MintAuthority, 1000000000, ); await assertApproveAndMintTo(rpc, token22Mint, bn(1000000000), bob); }); }); /** * Assert that approveAndMintTo() creates a new compressed token account for the * recipient */ async function assertApproveAndMintTo( rpc: Rpc, refMint: PublicKey, refAmount: BN, refTo: PublicKey, ) { const compressedTokenAccounts = await rpc.getCompressedTokenAccountsByOwner( refTo, { mint: refMint, }, ); const compressedTokenAccount = compressedTokenAccounts.items[0]; expect(compressedTokenAccount.parsed.mint.toBase58()).toBe( refMint.toBase58(), ); expect(compressedTokenAccount.parsed.amount.eq(refAmount)).toBe(true); expect(compressedTokenAccount.parsed.owner.equals(refTo)).toBe(true); expect(compressedTokenAccount.parsed.delegate).toBe(null); }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/merge-token-accounts.test.ts
import { describe, it, expect, beforeAll, beforeEach } from 'vitest'; import { PublicKey, Keypair, Signer } from '@solana/web3.js'; import { Rpc, bn, defaultTestStateTreeAccounts, newAccountWithLamports, getTestRpc, } from '@lightprotocol/stateless.js'; import { WasmFactory } from '@lightprotocol/hasher.rs'; import { createMint, mintTo, mergeTokenAccounts } from '../../src/actions'; describe('mergeTokenAccounts', () => { let rpc: Rpc; let payer: Signer; let owner: Signer; let mint: PublicKey; let mintAuthority: Keypair; const { merkleTree } = defaultTestStateTreeAccounts(); beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc, 1e9); mintAuthority = Keypair.generate(); const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, 2, mintKeypair, ) ).mint; }); beforeEach(async () => { owner = await newAccountWithLamports(rpc, 1e9); // Mint multiple token accounts to the owner for (let i = 0; i < 5; i++) { await mintTo( rpc, payer, mint, owner.publicKey, mintAuthority, bn(100), ); } }); it.only('should merge all token accounts', async () => { const preAccounts = await rpc.getCompressedTokenAccountsByOwner( owner.publicKey, { mint }, ); expect(preAccounts.items.length).to.be.greaterThan(1); await mergeTokenAccounts(rpc, payer, mint, owner, merkleTree); const postAccounts = await rpc.getCompressedTokenAccountsByOwner( owner.publicKey, { mint }, ); expect(postAccounts.items.length).to.be.lessThan( preAccounts.items.length, ); const totalBalance = postAccounts.items.reduce( (sum, account) => sum.add(account.parsed.amount), bn(0), ); expect(totalBalance.toNumber()).to.equal(500); // 5 accounts * 100 tokens each }); // TODO: add coverage for this apparent edge case. not required for now though. it('should handle merging when there is only one account', async () => { try { await mergeTokenAccounts(rpc, payer, mint, owner, merkleTree); console.log('First merge succeeded'); const postFirstMergeAccounts = await rpc.getCompressedTokenAccountsByOwner(owner.publicKey, { mint, }); console.log('Accounts after first merge:', postFirstMergeAccounts); } catch (error) { console.error('First merge failed:', error); throw error; } // Second merge attempt try { await mergeTokenAccounts(rpc, payer, mint, owner, merkleTree); console.log('Second merge succeeded'); } catch (error) { console.error('Second merge failed:', error); } const finalAccounts = await rpc.getCompressedTokenAccountsByOwner( owner.publicKey, { mint }, ); console.log('Final accounts:', finalAccounts); expect(finalAccounts.items.length).to.equal(1); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/create-mint.test.ts
import { describe, it, expect, beforeAll, assert } from 'vitest'; import { CompressedTokenProgram } from '../../src/program'; import { PublicKey, Signer, Keypair } from '@solana/web3.js'; import { unpackMint, unpackAccount } from '@solana/spl-token'; import { createMint } from '../../src/actions'; import { Rpc, newAccountWithLamports, getTestRpc, } from '@lightprotocol/stateless.js'; import { WasmFactory } from '@lightprotocol/hasher.rs'; /** * Asserts that createMint() creates a new spl mint account + the respective * system pool account */ async function assertCreateMint( mint: PublicKey, authority: PublicKey, rpc: Rpc, decimals: number, poolAccount: PublicKey, ) { const mintAcc = await rpc.getAccountInfo(mint); const unpackedMint = unpackMint(mint, mintAcc); expect(unpackedMint.mintAuthority?.toString()).toBe(authority.toString()); expect(unpackedMint.supply).toBe(0n); expect(unpackedMint.decimals).toBe(decimals); expect(unpackedMint.isInitialized).toBe(true); expect(unpackedMint.freezeAuthority).toBe(null); expect(unpackedMint.tlvData.length).toBe(0); /// Pool (omnibus) account is a regular SPL Token account const poolAccountInfo = await rpc.getAccountInfo(poolAccount); const unpackedPoolAccount = unpackAccount(poolAccount, poolAccountInfo); expect(unpackedPoolAccount.mint.equals(mint)).toBe(true); expect(unpackedPoolAccount.amount).toBe(0n); expect( unpackedPoolAccount.owner.equals( CompressedTokenProgram.deriveCpiAuthorityPda, ), ).toBe(true); expect(unpackedPoolAccount.delegate).toBe(null); } const TEST_TOKEN_DECIMALS = 2; describe('createMint', () => { let rpc: Rpc; let payer: Signer; let mint: PublicKey; let mintAuthority: Keypair; beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc, 1e9); }); it('should create mint', async () => { mintAuthority = Keypair.generate(); const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ) ).mint; const poolAccount = CompressedTokenProgram.deriveTokenPoolPda(mint); assert(mint.equals(mintKeypair.publicKey)); await assertCreateMint( mint, mintAuthority.publicKey, rpc, TEST_TOKEN_DECIMALS, poolAccount, ); /// Mint already exists await expect( createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ), ).rejects.toThrow(); }); it('should create mint with payer as authority', async () => { mint = ( await createMint(rpc, payer, payer.publicKey, TEST_TOKEN_DECIMALS) ).mint; const poolAccount = CompressedTokenProgram.deriveTokenPoolPda(mint); await assertCreateMint( mint, payer.publicKey, rpc, TEST_TOKEN_DECIMALS, poolAccount, ); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/transfer.test.ts
import { describe, it, expect, beforeAll, beforeEach, assert } from 'vitest'; import { PublicKey, Keypair, Signer } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { ParsedTokenAccount, Rpc, bn, defaultTestStateTreeAccounts, newAccountWithLamports, getTestRpc, } from '@lightprotocol/stateless.js'; import { WasmFactory } from '@lightprotocol/hasher.rs'; import { createMint, mintTo, transfer } from '../../src/actions'; import { TOKEN_2022_PROGRAM_ID } from '@solana/spl-token'; /** * Assert that we created recipient and change-account for the sender, with all * amounts correctly accounted for */ async function assertTransfer( rpc: Rpc, senderPreCompressedTokenAccounts: ParsedTokenAccount[], refMint: PublicKey, refAmount: BN, refSender: PublicKey, refRecipient: PublicKey, expectedAccountCountSenderPost?: number, expectedAccountCountRecipientPost?: number, ) { /// Transfer can merge input compressed accounts therefore we need to pass all as ref const senderPostCompressedTokenAccounts = ( await rpc.getCompressedTokenAccountsByOwner(refSender, { mint: refMint, }) ).items; /// pre = post-amount const sumPre = senderPreCompressedTokenAccounts.reduce( (acc, curr) => bn(acc).add(curr.parsed.amount), bn(0), ); const sumPost = senderPostCompressedTokenAccounts.reduce( (acc, curr) => bn(acc).add(curr.parsed.amount), bn(0), ); if (expectedAccountCountSenderPost) { expect(senderPostCompressedTokenAccounts.length).toBe( expectedAccountCountSenderPost, ); } expect(sumPre.sub(refAmount).eq(sumPost)).toBe(true); const recipientCompressedTokenAccounts = ( await rpc.getCompressedTokenAccountsByOwner(refRecipient, { mint: refMint, }) ).items; if (expectedAccountCountRecipientPost) { expect(recipientCompressedTokenAccounts.length).toBe( expectedAccountCountRecipientPost, ); } /// recipient should have received the amount const recipientCompressedTokenAccount = recipientCompressedTokenAccounts[0]; expect(recipientCompressedTokenAccount.parsed.amount.eq(refAmount)).toBe( true, ); expect(recipientCompressedTokenAccount.parsed.delegate).toBe(null); } const TEST_TOKEN_DECIMALS = 2; describe('transfer', () => { let rpc: Rpc; let payer: Signer; let bob: Signer; let charlie: Signer; let mint: PublicKey; let mintAuthority: Keypair; const { merkleTree } = defaultTestStateTreeAccounts(); beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc, 1e9); mintAuthority = Keypair.generate(); const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ) ).mint; }); beforeEach(async () => { bob = await newAccountWithLamports(rpc, 1e9); charlie = await newAccountWithLamports(rpc, 1e9); await mintTo(rpc, payer, mint, bob.publicKey, mintAuthority, bn(1000)); }); it('should transfer from bob -> charlie', async () => { /// send 700 from bob -> charlie /// bob: 300, charlie: 700 const bobPreCompressedTokenAccounts = ( await rpc.getCompressedTokenAccountsByOwner(bob.publicKey, { mint, }) ).items; await transfer( rpc, payer, mint, bn(700), bob, charlie.publicKey, merkleTree, ); await assertTransfer( rpc, bobPreCompressedTokenAccounts, mint, bn(700), bob.publicKey, charlie.publicKey, 1, 1, ); /// send 200 from bob -> charlie /// bob: 100, charlie: (700+200) const bobPreCompressedTokenAccounts2 = await rpc.getCompressedTokenAccountsByOwner(bob.publicKey, { mint, }); await transfer( rpc, payer, mint, bn(200), bob, charlie.publicKey, merkleTree, ); await assertTransfer( rpc, bobPreCompressedTokenAccounts2.items, mint, bn(200), bob.publicKey, charlie.publicKey, 1, 2, ); /// send 5 from charlie -> bob /// bob: (100+5), charlie: (695+200) const charliePreCompressedTokenAccounts3 = await rpc.getCompressedTokenAccountsByOwner(charlie.publicKey, { mint, }); await transfer( rpc, payer, mint, bn(5), charlie, bob.publicKey, merkleTree, ); await assertTransfer( rpc, charliePreCompressedTokenAccounts3.items, mint, bn(5), charlie.publicKey, bob.publicKey, 2, 2, ); /// send 700 from charlie -> bob, 2 compressed account inputs /// bob: (100+5+700), charlie: (195) const charliePreCompressedTokenAccounts4 = await rpc.getCompressedTokenAccountsByOwner(charlie.publicKey, { mint, }); await transfer(rpc, payer, mint, bn(700), charlie, bob.publicKey); await assertTransfer( rpc, charliePreCompressedTokenAccounts4.items, mint, bn(700), charlie.publicKey, bob.publicKey, 1, 3, ); await expect( transfer( rpc, payer, mint, 10000, bob, charlie.publicKey, merkleTree, ), ).rejects.toThrow('Not enough balance for transfer'); }); it('should transfer token 2022 from bob -> charlie', async () => { const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, undefined, true, ) ).mint; const mintAccountInfo = await rpc.getAccountInfo(mint); assert.equal( mintAccountInfo!.owner.toBase58(), TOKEN_2022_PROGRAM_ID.toBase58(), ); await mintTo(rpc, payer, mint, bob.publicKey, mintAuthority, bn(1000)); /// send 700 from bob -> charlie /// bob: 300, charlie: 700 const bobPreCompressedTokenAccounts = ( await rpc.getCompressedTokenAccountsByOwner(bob.publicKey, { mint, }) ).items; await transfer( rpc, payer, mint, bn(700), bob, charlie.publicKey, merkleTree, ); await assertTransfer( rpc, bobPreCompressedTokenAccounts, mint, bn(700), bob.publicKey, charlie.publicKey, 1, 1, ); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/compress.test.ts
import { describe, it, expect, beforeAll } from 'vitest'; import { PublicKey, Keypair, Signer, ComputeBudgetProgram, } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { ParsedTokenAccount, Rpc, bn, defaultTestStateTreeAccounts, newAccountWithLamports, dedupeSigner, buildAndSignTx, sendAndConfirmTx, getTestRpc, } from '@lightprotocol/stateless.js'; import { compress, createMint, createTokenProgramLookupTable, decompress, mintTo, } from '../../src/actions'; import { createAssociatedTokenAccount, TOKEN_2022_PROGRAM_ID, } from '@solana/spl-token'; import { CompressedTokenProgram } from '../../src/program'; import { WasmFactory } from '@lightprotocol/hasher.rs'; /** * Assert that we created recipient and change ctokens for the sender, with all * amounts correctly accounted for */ async function assertCompress( rpc: Rpc, refSenderAtaBalanceBefore: BN, refSenderAta: PublicKey, refMint: PublicKey, refAmounts: BN[], refRecipients: PublicKey[], refRecipientCompressedTokenBalancesBefore: ParsedTokenAccount[][], ) { if (refAmounts.length !== refRecipients.length) { throw new Error('Mismatch in length of amounts and recipients arrays'); } const refSenderAtaBalanceAfter = await rpc.getTokenAccountBalance(refSenderAta); const totalAmount = refAmounts.reduce((acc, curr) => acc.add(curr), bn(0)); expect( refSenderAtaBalanceBefore .sub(totalAmount) .eq(bn(refSenderAtaBalanceAfter.value.amount)), ).toBe(true); for (let i = 0; i < refRecipients.length; i++) { const recipientCompressedTokenBalanceAfter = await rpc.getCompressedTokenAccountsByOwner(refRecipients[i], { mint: refMint, }); const recipientSumPost = recipientCompressedTokenBalanceAfter.items.reduce( (acc, curr) => bn(acc).add(curr.parsed.amount), bn(0), ); const recipientSumPre = refRecipientCompressedTokenBalancesBefore[ i ].reduce((acc, curr) => bn(acc).add(curr.parsed.amount), bn(0)); /// recipient should have received the amount expect(recipientSumPost.eq(refAmounts[i].add(recipientSumPre))).toBe( true, ); } } const TEST_TOKEN_DECIMALS = 2; describe('compress', () => { let rpc: Rpc; let payer: Signer; let bob: Signer; let bobAta: PublicKey; let charlie: Signer; let mint: PublicKey; let mintAuthority: Keypair; let lut: PublicKey; const { merkleTree } = defaultTestStateTreeAccounts(); beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc, 1e9); mintAuthority = Keypair.generate(); const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ) ).mint; bob = await newAccountWithLamports(rpc, 1e9); charlie = await newAccountWithLamports(rpc, 1e9); bobAta = await createAssociatedTokenAccount( rpc, payer, mint, bob.publicKey, ); await mintTo(rpc, payer, mint, bob.publicKey, mintAuthority, bn(10000)); await decompress(rpc, payer, mint, bn(9000), bob, bobAta); /// Setup LUT. const { address } = await createTokenProgramLookupTable( rpc, payer, payer, ); lut = address; }, 80_000); it('should compress from bobAta -> charlie', async () => { const senderAtaBalanceBefore = await rpc.getTokenAccountBalance(bobAta); const recipientCompressedTokenBalanceBefore = await rpc.getCompressedTokenAccountsByOwner(charlie.publicKey, { mint, }); await compress( rpc, payer, mint, bn(700), bob, bobAta, charlie.publicKey, merkleTree, ); await assertCompress( rpc, bn(senderAtaBalanceBefore.value.amount), bobAta, mint, [bn(700)], [charlie.publicKey], [recipientCompressedTokenBalanceBefore.items], ); }); const maxBatchSize = 15; const recipients = Array.from( { length: maxBatchSize }, () => Keypair.generate().publicKey, ); const amounts = Array.from({ length: maxBatchSize }, (_, i) => bn(i + 1)); it('should compress to multiple (11 max without LUT) recipients with array of amounts and addresses', async () => { const senderAtaBalanceBefore = await rpc.getTokenAccountBalance(bobAta); const recipientCompressedTokenBalancesBefore = await Promise.all( recipients.map(recipient => rpc.getCompressedTokenAccountsByOwner(recipient, { mint }), ), ); await compress( rpc, payer, mint, amounts.slice(0, 11), bob, bobAta, recipients.slice(0, 11), merkleTree, ); for (let i = 0; i < recipients.length; i++) { await assertCompress( rpc, bn(senderAtaBalanceBefore.value.amount), bobAta, mint, amounts.slice(0, 11), recipients.slice(0, 11), recipientCompressedTokenBalancesBefore.map(x => x.items), ); } const senderAtaBalanceAfter = await rpc.getTokenAccountBalance(bobAta); const totalCompressed = amounts .slice(0, 11) .reduce((sum, amount) => sum.add(amount), bn(0)); expect(senderAtaBalanceAfter.value.amount).toEqual( bn(senderAtaBalanceBefore.value.amount) .sub(totalCompressed) .toString(), ); }); it('should fail when passing unequal array lengths for amounts and toAddress', async () => { await expect( compress( rpc, payer, mint, amounts.slice(0, 10), bob, bobAta, recipients.slice(0, 11), merkleTree, ), ).rejects.toThrow( 'Amount and toAddress arrays must have the same length', ); await expect( compress( rpc, payer, mint, amounts[0], bob, bobAta, recipients, merkleTree, ), ).rejects.toThrow( 'Both amount and toAddress must be arrays or both must be single values', ); }); it(`should compress-batch to max ${maxBatchSize} recipients optimized with LUT`, async () => { /// Fetch state of LUT const lookupTableAccount = (await rpc.getAddressLookupTable(lut)) .value!; /// Mint to max recipients with LUT const ix = await CompressedTokenProgram.compress({ payer: payer.publicKey, owner: bob.publicKey, source: bobAta, toAddress: recipients, amount: amounts, mint, outputStateTree: merkleTree, }); const { blockhash } = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [bob]); const tx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 500_000 }), ix], payer, blockhash, additionalSigners, [lookupTableAccount], ); const txId = await sendAndConfirmTx(rpc, tx); return txId; }); it('should compress from bob Token 2022 Ata -> charlie', async () => { const mintKeypair = Keypair.generate(); const token22Mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, undefined, true, ) ).mint; const mintAccountInfo = await rpc.getAccountInfo(token22Mint); expect( mintAccountInfo!.owner.toBase58(), TOKEN_2022_PROGRAM_ID.toBase58(), ); bob = await newAccountWithLamports(rpc, 1e9); charlie = await newAccountWithLamports(rpc, 1e9); const bobToken2022Ata = await createAssociatedTokenAccount( rpc, payer, token22Mint, bob.publicKey, undefined, TOKEN_2022_PROGRAM_ID, ); await mintTo( rpc, payer, token22Mint, bob.publicKey, mintAuthority, bn(10000), ); await decompress( rpc, payer, token22Mint, bn(9000), bob, bobToken2022Ata, ); const senderAtaBalanceBefore = await rpc.getTokenAccountBalance(bobToken2022Ata); const recipientCompressedTokenBalanceBefore = await rpc.getCompressedTokenAccountsByOwner(charlie.publicKey, { mint: token22Mint, }); await compress( rpc, payer, token22Mint, bn(701), bob, bobToken2022Ata, charlie.publicKey, merkleTree, ); await assertCompress( rpc, bn(senderAtaBalanceBefore.value.amount), bobToken2022Ata, token22Mint, [bn(701)], [charlie.publicKey], [recipientCompressedTokenBalanceBefore.items], ); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/mint-to.test.ts
import { describe, it, expect, beforeAll } from 'vitest'; import { PublicKey, Signer, Keypair, ComputeBudgetProgram, } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { createMint, createTokenProgramLookupTable, mintTo, } from '../../src/actions'; import { getTestKeypair, newAccountWithLamports, bn, defaultTestStateTreeAccounts, Rpc, sendAndConfirmTx, buildAndSignTx, dedupeSigner, getTestRpc, } from '@lightprotocol/stateless.js'; import { CompressedTokenProgram } from '../../src/program'; import { WasmFactory } from '@lightprotocol/hasher.rs'; /** * Asserts that mintTo() creates a new compressed token account for the * recipient */ async function assertMintTo( rpc: Rpc, refMint: PublicKey, refAmount: BN, refTo: PublicKey, ) { const compressedTokenAccounts = await rpc.getCompressedTokenAccountsByOwner( refTo, { mint: refMint, }, ); const compressedTokenAccount = compressedTokenAccounts.items[0]; expect(compressedTokenAccount.parsed.mint.toBase58()).toBe( refMint.toBase58(), ); expect(compressedTokenAccount.parsed.amount.eq(refAmount)).toBe(true); expect(compressedTokenAccount.parsed.owner.equals(refTo)).toBe(true); expect(compressedTokenAccount.parsed.delegate).toBe(null); } const TEST_TOKEN_DECIMALS = 2; describe('mintTo', () => { let rpc: Rpc; let payer: Signer; let bob: Signer; let mint: PublicKey; let mintAuthority: Keypair; let lut: PublicKey; const { merkleTree } = defaultTestStateTreeAccounts(); beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc); bob = getTestKeypair(); mintAuthority = Keypair.generate(); const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ) ).mint; /// Setup LUT. const { address } = await createTokenProgramLookupTable( rpc, payer, payer, ); lut = address; }, 80_000); it('should mint to bob', async () => { const amount = bn(1000); await mintTo(rpc, payer, mint, bob.publicKey, mintAuthority, amount); await assertMintTo(rpc, mint, amount, bob.publicKey); /// wrong authority await expect( mintTo(rpc, payer, mint, bob.publicKey, payer, amount), ).rejects.toThrowError(/custom program error: 0x1782/); /// with output state merkle tree defined await mintTo( rpc, payer, mint, bob.publicKey, mintAuthority, amount, merkleTree, ); }); const maxRecipients = 18; const recipients = Array.from( { length: maxRecipients }, () => Keypair.generate().publicKey, ); const amounts = Array.from({ length: maxRecipients }, (_, i) => bn(i + 1)); it('should mint to multiple recipients', async () => { /// mint to three recipients await mintTo( rpc, payer, mint, recipients.slice(0, 3), mintAuthority, amounts.slice(0, 3), ); /// Mint to 10 recipients await mintTo( rpc, payer, mint, recipients.slice(0, 10), mintAuthority, amounts.slice(0, 10), ); // Uneven amounts await expect( mintTo( rpc, payer, mint, recipients, mintAuthority, amounts.slice(0, 2), ), ).rejects.toThrowError( /Amount and toPubkey arrays must have the same length/, ); }); it(`should mint to ${recipients.length} recipients optimized with LUT`, async () => { const lookupTableAccount = (await rpc.getAddressLookupTable(lut)) .value!; const ix = await CompressedTokenProgram.mintTo({ feePayer: payer.publicKey, mint, authority: mintAuthority.publicKey, amount: amounts, toPubkey: recipients, merkleTree, }); const { blockhash } = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [mintAuthority]); const tx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 500_000 }), ix], payer, blockhash, additionalSigners, [lookupTableAccount], ); return await sendAndConfirmTx(rpc, tx); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/decompress.test.ts
import { describe, it, expect, beforeAll } from 'vitest'; import { PublicKey, Keypair, Signer } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { ParsedTokenAccount, Rpc, bn, defaultTestStateTreeAccounts, newAccountWithLamports, getTestRpc, } from '@lightprotocol/stateless.js'; import { WasmFactory } from '@lightprotocol/hasher.rs'; import { createMint, decompress, mintTo } from '../../src/actions'; import { createAssociatedTokenAccount } from '@solana/spl-token'; /** * Assert that we created recipient and change ctokens for the sender, with all * amounts correctly accounted for */ async function assertDecompress( rpc: Rpc, refRecipientAtaBalanceBefore: BN, refRecipientAta: PublicKey, // all refMint: PublicKey, refAmount: BN, refSender: PublicKey, refSenderCompressedTokenBalanceBefore: ParsedTokenAccount[], ) { const refRecipientAtaBalanceAfter = await rpc.getTokenAccountBalance(refRecipientAta); const senderCompressedTokenBalanceAfter = ( await rpc.getCompressedTokenAccountsByOwner(refSender, { mint: refMint, }) ).items; const senderSumPost = senderCompressedTokenBalanceAfter.reduce( (acc, curr) => bn(acc).add(curr.parsed.amount), bn(0), ); const senderSumPre = refSenderCompressedTokenBalanceBefore.reduce( (acc, curr) => bn(acc).add(curr.parsed.amount), bn(0), ); /// recipient ata should have received the amount expect( bn(refRecipientAtaBalanceAfter.value.amount) .sub(refAmount) .eq(refRecipientAtaBalanceBefore), ).toBe(true); /// should have sent the amount expect(senderSumPost.eq(senderSumPre.sub(refAmount))).toBe(true); } const TEST_TOKEN_DECIMALS = 2; describe('decompress', () => { let rpc: Rpc; let payer: Signer; let bob: Signer; let charlie: Signer; let charlieAta: PublicKey; let mint: PublicKey; let mintAuthority: Keypair; const { merkleTree } = defaultTestStateTreeAccounts(); beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc, 1e9); mintAuthority = Keypair.generate(); const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ) ).mint; bob = await newAccountWithLamports(rpc, 1e9); charlie = await newAccountWithLamports(rpc, 1e9); charlieAta = await createAssociatedTokenAccount( rpc, payer, mint, charlie.publicKey, ); await mintTo(rpc, payer, mint, bob.publicKey, mintAuthority, bn(1000)); }); const LOOP = 10; it(`should decompress from bob -> charlieAta ${LOOP} times`, async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); for (let i = 0; i < LOOP; i++) { const recipientAtaBalanceBefore = await rpc.getTokenAccountBalance(charlieAta); const senderCompressedTokenBalanceBefore = await rpc.getCompressedTokenAccountsByOwner(bob.publicKey, { mint, }); await decompress( rpc, payer, mint, bn(5), bob, charlieAta, merkleTree, ); await assertDecompress( rpc, bn(recipientAtaBalanceBefore.value.amount), charlieAta, mint, bn(5), bob.publicKey, senderCompressedTokenBalanceBefore.items, ); } }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/custom-program-id.test.ts
import { describe, it, expect } from 'vitest'; import { CompressedTokenProgram } from '../../src/program'; import { PublicKey } from '@solana/web3.js'; describe('custom programId', () => { it('should switch programId', async () => { const defaultProgramId = new PublicKey( 'cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m', ); const solMint = new PublicKey( 'So11111111111111111111111111111111111111112', ); const expectedPoolPda = new PublicKey( '3EJpXEsHL6JxNoPJWjF4QTKuvFvxzsUPbf1xF8iMbnL7', ); const newProgramId = new PublicKey( '2WpGefPmpKMbkyLewupcfb8DuJ1ZMSPkMSu5WEvDMpF4', ); // Check default program ID expect(CompressedTokenProgram.programId).toEqual(defaultProgramId); expect(CompressedTokenProgram.deriveTokenPoolPda(solMint)).toEqual( expectedPoolPda, ); expect(CompressedTokenProgram.program.programId).toEqual( defaultProgramId, ); // Set new program ID CompressedTokenProgram.setProgramId(newProgramId); // Verify program ID was updated expect(CompressedTokenProgram.programId).toEqual(newProgramId); expect(CompressedTokenProgram.deriveTokenPoolPda(solMint)).not.toEqual( expectedPoolPda, ); expect(CompressedTokenProgram.program.programId).toEqual(newProgramId); // Reset program ID CompressedTokenProgram.setProgramId(defaultProgramId); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/compress-spl-token-account.test.ts
import { describe, it, expect, beforeAll, assert } from 'vitest'; import { PublicKey, Keypair, Signer } from '@solana/web3.js'; import { Rpc, bn, defaultTestStateTreeAccounts, newAccountWithLamports, getTestRpc, } from '@lightprotocol/stateless.js'; import { createMint, decompress, mintTo, compressSplTokenAccount, } from '../../src/actions'; import { createAssociatedTokenAccount, mintToChecked, TOKEN_2022_PROGRAM_ID, } from '@solana/spl-token'; import { WasmFactory } from '@lightprotocol/hasher.rs'; const TEST_TOKEN_DECIMALS = 2; describe('compressSplTokenAccount', () => { let rpc: Rpc; let payer: Signer; let alice: Signer; let aliceAta: PublicKey; let mint: PublicKey; let mintAuthority: Keypair; beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc, 1e9); mintAuthority = Keypair.generate(); const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ) ).mint; alice = await newAccountWithLamports(rpc, 1e9); aliceAta = await createAssociatedTokenAccount( rpc, payer, mint, alice.publicKey, ); // Mint some tokens to alice's ATA await mintTo( rpc, payer, mint, alice.publicKey, mintAuthority, bn(1000), ); await decompress(rpc, payer, mint, bn(1000), alice, aliceAta); }, 80_000); it('should compress entire token balance when remainingAmount is undefined', async () => { // Get initial ATA balance const ataBalanceBefore = await rpc.getTokenAccountBalance(aliceAta); const initialCompressedBalance = await rpc.getCompressedTokenAccountsByOwner(alice.publicKey, { mint, }); // Compress the entire balance await compressSplTokenAccount( rpc, payer, mint, alice, aliceAta, defaultTestStateTreeAccounts().merkleTree, ); // Get final balances const ataBalanceAfter = await rpc.getTokenAccountBalance(aliceAta); const compressedBalanceAfter = await rpc.getCompressedTokenAccountsByOwner(alice.publicKey, { mint, }); // Assert ATA is empty expect(bn(ataBalanceAfter.value.amount).eq(bn(0))).toBe(true); // Assert compressed balance equals original ATA balance const totalCompressedAmount = compressedBalanceAfter.items.reduce( (sum, item) => sum.add(item.parsed.amount), bn(0), ); const initialCompressedAmount = initialCompressedBalance.items.reduce( (sum, item) => sum.add(item.parsed.amount), bn(0), ); expect( totalCompressedAmount.eq( bn(ataBalanceBefore.value.amount).add(initialCompressedAmount), ), ).toBe(true); }); it('should fail when trying to compress more than available balance', async () => { // Mint new tokens for this test const testAmount = bn(100); await mintToChecked( rpc, payer, mint, aliceAta, mintAuthority, testAmount.toNumber(), TEST_TOKEN_DECIMALS, ); // Try to compress more than available await expect( compressSplTokenAccount( rpc, payer, mint, alice, aliceAta, defaultTestStateTreeAccounts().merkleTree, bn(testAmount.add(bn(1))), // Try to leave more than available ), ).rejects.toThrow(); }); it('should leave specified remaining amount in token account', async () => { /// still has 100 expect( Number((await rpc.getTokenAccountBalance(aliceAta)).value.amount), ).toBe(100); const remainingAmount = bn(10); const ataBalanceBefore = await rpc.getTokenAccountBalance(aliceAta); const initialCompressedBalance = await rpc.getCompressedTokenAccountsByOwner(alice.publicKey, { mint, }); // Compress tokens while leaving remainingAmount await compressSplTokenAccount( rpc, payer, mint, alice, aliceAta, defaultTestStateTreeAccounts().merkleTree, remainingAmount, ); // Get final balances const ataBalanceAfter = await rpc.getTokenAccountBalance(aliceAta); const compressedBalanceAfter = await rpc.getCompressedTokenAccountsByOwner(alice.publicKey, { mint, }); // Assert remaining amount in ATA expect(bn(ataBalanceAfter.value.amount).eq(remainingAmount)).toBe(true); // Assert compressed amount is correct const totalCompressedAmount = compressedBalanceAfter.items.reduce( (sum, item) => sum.add(item.parsed.amount), bn(0), ); const initialCompressedAmount = initialCompressedBalance.items.reduce( (sum, item) => sum.add(item.parsed.amount), bn(0), ); // Assert that the total compressed amount equals: // Initial ATA balance - remaining amount + initial compressed amount expect( totalCompressedAmount.eq( bn(ataBalanceBefore.value.amount) .sub(remainingAmount) .add(initialCompressedAmount), ), ).toBe(true); }); it('should handle remainingAmount = current balance', async () => { // Mint some tokens for testing const testAmount = bn(100); await mintToChecked( rpc, payer, mint, aliceAta, mintAuthority, testAmount.toNumber(), TEST_TOKEN_DECIMALS, ); const balanceBefore = await rpc.getTokenAccountBalance(aliceAta); const compressedBefore = await rpc.getCompressedTokenAccountsByOwner( alice.publicKey, { mint }, ); await compressSplTokenAccount( rpc, payer, mint, alice, aliceAta, defaultTestStateTreeAccounts().merkleTree, bn(balanceBefore.value.amount), ); const balanceAfter = await rpc.getTokenAccountBalance(aliceAta); const compressedAfter = await rpc.getCompressedTokenAccountsByOwner( alice.publicKey, { mint }, ); expect(balanceAfter.value.amount).toBe(balanceBefore.value.amount); expect(compressedAfter.items.length).toBe( compressedBefore.items.length + 1, ); expect(compressedAfter.items[0].parsed.amount.eq(bn(0))).toBe(true); }); it('should fail when non-owner tries to compress', async () => { const nonOwner = await newAccountWithLamports(rpc, 1e9); // Mint some tokens to ensure non-zero balance await mintToChecked( rpc, payer, mint, aliceAta, mintAuthority, 100, TEST_TOKEN_DECIMALS, ); await expect( compressSplTokenAccount( rpc, payer, mint, nonOwner, // wrong signer aliceAta, defaultTestStateTreeAccounts().merkleTree, ), ).rejects.toThrow(); }); it('should fail with invalid state tree', async () => { const invalidTree = Keypair.generate().publicKey; // Mint some tokens to ensure non-zero balance await mintToChecked( rpc, payer, mint, aliceAta, mintAuthority, 100, TEST_TOKEN_DECIMALS, ); await expect( compressSplTokenAccount( rpc, payer, mint, alice, aliceAta, invalidTree, ), ).rejects.toThrow(); }); it('should compress entire token 2022 account balance when remainingAmount is undefined', async () => { const mintKeypair = Keypair.generate(); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, undefined, true, ) ).mint; const mintAccountInfo = await rpc.getAccountInfo(mint); assert.equal( mintAccountInfo!.owner.toBase58(), TOKEN_2022_PROGRAM_ID.toBase58(), ); alice = await newAccountWithLamports(rpc, 1e9); aliceAta = await createAssociatedTokenAccount( rpc, payer, mint, alice.publicKey, undefined, TOKEN_2022_PROGRAM_ID, ); // Mint some tokens to alice's ATA await mintTo( rpc, payer, mint, alice.publicKey, mintAuthority, bn(1000), ); await decompress(rpc, payer, mint, bn(1000), alice, aliceAta); // Get initial ATA balance const ataBalanceBefore = await rpc.getTokenAccountBalance(aliceAta); const initialCompressedBalance = await rpc.getCompressedTokenAccountsByOwner(alice.publicKey, { mint, }); // Compress the entire balance await compressSplTokenAccount( rpc, payer, mint, alice, aliceAta, defaultTestStateTreeAccounts().merkleTree, ); // Get final balances const ataBalanceAfter = await rpc.getTokenAccountBalance(aliceAta); const compressedBalanceAfter = await rpc.getCompressedTokenAccountsByOwner(alice.publicKey, { mint, }); // Assert ATA is empty expect(bn(ataBalanceAfter.value.amount).eq(bn(0))).toBe(true); // Assert compressed balance equals original ATA balance const totalCompressedAmount = compressedBalanceAfter.items.reduce( (sum, item) => sum.add(item.parsed.amount), bn(0), ); const initialCompressedAmount = initialCompressedBalance.items.reduce( (sum, item) => sum.add(item.parsed.amount), bn(0), ); expect( totalCompressedAmount.eq( bn(ataBalanceBefore.value.amount).add(initialCompressedAmount), ), ).toBe(true); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/rpc-token-interop.test.ts
import { describe, it, assert, beforeAll } from 'vitest'; import { Keypair, PublicKey, Signer } from '@solana/web3.js'; import { Rpc, newAccountWithLamports, bn, createRpc, getTestRpc, TestRpc, } from '@lightprotocol/stateless.js'; import { WasmFactory } from '@lightprotocol/hasher.rs'; import { createMint, mintTo, transfer } from '../../src/actions'; const TEST_TOKEN_DECIMALS = 2; describe('rpc-interop token', () => { let rpc: Rpc; let testRpc: TestRpc; let payer: Signer; let bob: Signer; let charlie: Signer; let mint: PublicKey; let mintAuthority: Keypair; beforeAll(async () => { rpc = createRpc(); const lightWasm = await WasmFactory.getInstance(); payer = await newAccountWithLamports(rpc, 1e9, 256); mintAuthority = Keypair.generate(); const mintKeypair = Keypair.generate(); testRpc = await getTestRpc(lightWasm); mint = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ) ).mint; bob = await newAccountWithLamports(rpc, 1e9, 256); charlie = await newAccountWithLamports(rpc, 1e9, 256); await mintTo(rpc, payer, mint, bob.publicKey, mintAuthority, bn(1000)); await transfer(rpc, payer, mint, bn(700), bob, charlie.publicKey); }); it('getCompressedTokenAccountsByOwner should match', async () => { const senderAccounts = ( await rpc.getCompressedTokenAccountsByOwner(bob.publicKey, { mint }) ).items; const senderAccountsTest = ( await testRpc.getCompressedTokenAccountsByOwner(bob.publicKey, { mint, }) ).items; assert.equal(senderAccounts.length, senderAccountsTest.length); senderAccounts.forEach((account, index) => { assert.equal( account.parsed.owner.toBase58(), senderAccountsTest[index].parsed.owner.toBase58(), ); assert.isTrue( account.parsed.amount.eq( senderAccountsTest[index].parsed.amount, ), ); }); const receiverAccounts = ( await rpc.getCompressedTokenAccountsByOwner(charlie.publicKey, { mint, }) ).items; const receiverAccountsTest = ( await testRpc.getCompressedTokenAccountsByOwner(charlie.publicKey, { mint, }) ).items; assert.equal(receiverAccounts.length, receiverAccountsTest.length); receiverAccounts.forEach((account, index) => { assert.equal( account.parsed.owner.toBase58(), receiverAccountsTest[index].parsed.owner.toBase58(), ); assert.isTrue( account.parsed.amount.eq( receiverAccountsTest[index].parsed.amount, ), ); }); }); it('getCompressedTokenAccountBalance should match ', async () => { const senderAccounts = await rpc.getCompressedTokenAccountsByOwner( bob.publicKey, { mint }, ); const balance = await rpc.getCompressedTokenAccountBalance( bn(senderAccounts.items[0].compressedAccount.hash), ); const balanceTest = await testRpc.getCompressedTokenAccountBalance( bn(senderAccounts.items[0].compressedAccount.hash), ); assert.isTrue(balance.amount.eq(balanceTest.amount)); assert.isNotNull(balance.amount); assert.isNotNull(balanceTest.amount); }); it('getCompressedTokenBalancesByOwner should match', async () => { const balances = ( await rpc.getCompressedTokenBalancesByOwner(bob.publicKey, { mint }) ).items; const balancesTest = ( await testRpc.getCompressedTokenBalancesByOwner(bob.publicKey, { mint, }) ).items; assert.equal(balances.length, balancesTest.length); balances.forEach((balance, index) => { assert.isTrue(balance.balance.eq(balancesTest[index].balance)); }); const balancesReceiver = ( await rpc.getCompressedTokenBalancesByOwner(charlie.publicKey, { mint, }) ).items; const balancesReceiverTest = ( await testRpc.getCompressedTokenBalancesByOwner(charlie.publicKey, { mint, }) ).items; assert.equal(balancesReceiver.length, balancesReceiverTest.length); balancesReceiver.forEach((balance, index) => { assert.isTrue( balance.balance.eq(balancesReceiverTest[index].balance), ); }); }); it('getCompressedTokenBalancesByOwnerV2 should match', async () => { const balances = ( await rpc.getCompressedTokenBalancesByOwnerV2(bob.publicKey, { mint, }) ).value.items; const balancesTest = ( await testRpc.getCompressedTokenBalancesByOwnerV2(bob.publicKey, { mint, }) ).value.items; assert.equal(balances.length, balancesTest.length); balances.forEach((balance, index) => { assert.isTrue(balance.balance.eq(balancesTest[index].balance)); }); const balancesReceiver = ( await rpc.getCompressedTokenBalancesByOwnerV2(charlie.publicKey, { mint, }) ).value.items; const balancesReceiverTest = ( await testRpc.getCompressedTokenBalancesByOwnerV2( charlie.publicKey, { mint, }, ) ).value.items; assert.equal(balancesReceiver.length, balancesReceiverTest.length); balancesReceiver.forEach((balance, index) => { assert.isTrue( balance.balance.eq(balancesReceiverTest[index].balance), ); }); }); it('[test-rpc missing] getSignaturesForTokenOwner should match', async () => { const signatures = ( await rpc.getCompressionSignaturesForTokenOwner(bob.publicKey) ).items; assert.equal(signatures.length, 2); const signaturesReceiver = ( await rpc.getCompressionSignaturesForTokenOwner(charlie.publicKey) ).items; assert.equal(signaturesReceiver.length, 1); }); it('[test-rpc missing] getTransactionWithCompressionInfo should return correct token pre and post balances', async () => { const signatures = ( await rpc.getCompressionSignaturesForTokenOwner(bob.publicKey) ).items; const tx = await rpc.getTransactionWithCompressionInfo( // most recent signatures[0].signature, ); assert.isTrue( tx!.compressionInfo.preTokenBalances![0].amount.eq(bn(1000)), ); assert.isTrue( tx!.compressionInfo.postTokenBalances![0].amount.eq(bn(300)), ); assert.isTrue(tx!.compressionInfo.postTokenBalances!.length === 2); assert.isTrue(tx!.compressionInfo.preTokenBalances!.length === 1); }); it('[delegate unused] getCompressedTokenAccountsByDelegate should match', async () => { const accs = await rpc.getCompressedTokenAccountsByDelegate( bob.publicKey, { mint }, ); assert.equal(accs.items.length, 0); }); it('[rpc] getCompressedTokenAccountsByOwner with 2 mints should return both mints', async () => { // additional mint const mint2 = ( await createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, ) ).mint; await mintTo(rpc, payer, mint2, bob.publicKey, mintAuthority, bn(1000)); const senderAccounts = await rpc.getCompressedTokenAccountsByOwner( bob.publicKey, ); // check that mint and mint2 exist in list of senderaccounts at least once assert.isTrue( senderAccounts.items.some( account => account.parsed.mint.toBase58() === mint.toBase58(), ), ); assert.isTrue( senderAccounts.items.some( account => account.parsed.mint.toBase58() === mint2.toBase58(), ), ); }); it('getCompressedMintTokenHolders should return correct holders', async () => { const holders = await rpc.getCompressedMintTokenHolders(mint); assert.equal(holders.value.items.length, 2); const bobHolder = holders.value.items.find( holder => holder.owner.toBase58() === bob.publicKey.toBase58(), ); assert.isNotNull(bobHolder); assert.isTrue(bobHolder!.balance.eq(bn(300))); const charlieHolder = holders.value.items.find( holder => holder.owner.toBase58() === charlie.publicKey.toBase58(), ); assert.isNotNull(charlieHolder); assert.isTrue(charlieHolder!.balance.eq(bn(700))); }); it('getCompressedMintTokenHolders should handle cursor and limit', async () => { // Get first holder with limit 1 const firstPage = await rpc.getCompressedMintTokenHolders(mint, { limit: bn(1), }); assert.equal(firstPage.value.items.length, 1); assert.isNotNull(firstPage.value.cursor); // Get second holder using cursor const secondPage = await rpc.getCompressedMintTokenHolders(mint, { cursor: firstPage.value.cursor!, limit: bn(1), }); assert.equal(secondPage.value.items.length, 1); // Verify we got both holders across the pages const allHolders = [ ...firstPage.value.items, ...secondPage.value.items, ]; assert.equal(allHolders.length, 2); const hasCharlie = allHolders.some( holder => holder.owner.toBase58() === charlie.publicKey.toBase58(), ); const hasBob = allHolders.some( holder => holder.owner.toBase58() === bob.publicKey.toBase58(), ); assert.isTrue(hasCharlie && hasBob); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/tests/e2e/create-token-pool.test.ts
import { describe, it, expect, beforeAll, assert } from 'vitest'; import { CompressedTokenProgram } from '../../src/program'; import { PublicKey, Signer, Keypair, SystemProgram } from '@solana/web3.js'; import { unpackMint, unpackAccount, MINT_SIZE, TOKEN_PROGRAM_ID, createInitializeMint2Instruction, } from '@solana/spl-token'; import { createMint, createTokenPool } from '../../src/actions'; import { Rpc, buildAndSignTx, dedupeSigner, newAccountWithLamports, sendAndConfirmTx, getTestRpc, } from '@lightprotocol/stateless.js'; import { WasmFactory } from '@lightprotocol/hasher.rs'; /** * Assert that createTokenPool() creates system-pool account for external mint, * with external mintAuthority. */ async function assertRegisterMint( mint: PublicKey, authority: PublicKey, rpc: Rpc, decimals: number, poolAccount: PublicKey, ) { const mintAcc = await rpc.getAccountInfo(mint); const unpackedMint = unpackMint(mint, mintAcc); expect(unpackedMint.mintAuthority?.toString()).toBe(authority.toString()); expect(unpackedMint.supply).toBe(0n); expect(unpackedMint.decimals).toBe(decimals); expect(unpackedMint.isInitialized).toBe(true); expect(unpackedMint.freezeAuthority).toBe(null); expect(unpackedMint.tlvData.length).toBe(0); /// Pool (omnibus) account is a regular SPL Token account const poolAccountInfo = await rpc.getAccountInfo(poolAccount); const unpackedPoolAccount = unpackAccount(poolAccount, poolAccountInfo); expect(unpackedPoolAccount.mint.equals(mint)).toBe(true); expect(unpackedPoolAccount.amount).toBe(0n); expect( unpackedPoolAccount.owner.equals( CompressedTokenProgram.deriveCpiAuthorityPda, ), ).toBe(true); expect(unpackedPoolAccount.delegate).toBe(null); } async function createTestSplMint( rpc: Rpc, payer: Signer, mintKeypair: Signer, mintAuthority: Keypair, ) { const rentExemptBalance = await rpc.getMinimumBalanceForRentExemption(MINT_SIZE); const createMintAccountInstruction = SystemProgram.createAccount({ fromPubkey: payer.publicKey, lamports: rentExemptBalance, newAccountPubkey: mintKeypair.publicKey, programId: TOKEN_PROGRAM_ID, space: MINT_SIZE, }); const initializeMintInstruction = createInitializeMint2Instruction( mintKeypair.publicKey, TEST_TOKEN_DECIMALS, mintAuthority.publicKey, null, TOKEN_PROGRAM_ID, ); const { blockhash } = await rpc.getLatestBlockhash(); const tx = buildAndSignTx( [createMintAccountInstruction, initializeMintInstruction], payer, blockhash, dedupeSigner(payer, [mintKeypair]), ); await sendAndConfirmTx(rpc, tx); } const TEST_TOKEN_DECIMALS = 2; describe('createTokenPool', () => { let rpc: Rpc; let payer: Signer; let mintKeypair: Keypair; let mint: PublicKey; let mintAuthority: Keypair; beforeAll(async () => { const lightWasm = await WasmFactory.getInstance(); rpc = await getTestRpc(lightWasm); payer = await newAccountWithLamports(rpc); mintAuthority = Keypair.generate(); mintKeypair = Keypair.generate(); mint = mintKeypair.publicKey; /// Create external SPL mint await createTestSplMint(rpc, payer, mintKeypair, mintAuthority); }); it('should register existing spl mint', async () => { const poolAccount = CompressedTokenProgram.deriveTokenPoolPda(mint); assert(mint.equals(mintKeypair.publicKey)); /// Mint already exists externally await expect( createMint( rpc, payer, mintAuthority.publicKey, TEST_TOKEN_DECIMALS, mintKeypair, ), ).rejects.toThrow(); await createTokenPool(rpc, payer, mint); await assertRegisterMint( mint, mintAuthority.publicKey, rpc, TEST_TOKEN_DECIMALS, poolAccount, ); /// Mint already registered await expect(createTokenPool(rpc, payer, mint)).rejects.toThrow(); }); it('should create mint with payer as authority', async () => { /// Create new external SPL mint with payer === authority mintKeypair = Keypair.generate(); mint = mintKeypair.publicKey; await createTestSplMint(rpc, payer, mintKeypair, payer as Keypair); await createTokenPool(rpc, payer, mint); const poolAccount = CompressedTokenProgram.deriveTokenPoolPda(mint); await assertRegisterMint( mint, payer.publicKey, rpc, TEST_TOKEN_DECIMALS, poolAccount, ); }); });
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/program.ts
import { PublicKey, Keypair, TransactionInstruction, SystemProgram, Connection, AddressLookupTableProgram, AccountMeta, } from '@solana/web3.js'; import { BN, Program, AnchorProvider, setProvider } from '@coral-xyz/anchor'; import { IDL, LightCompressedToken } from './idl/light_compressed_token'; import { CompressedProof, LightSystemProgram, ParsedTokenAccount, TokenTransferOutputData, bn, confirmConfig, CompressedTokenInstructionDataTransfer, defaultStaticAccountsStruct, sumUpLamports, toArray, useWallet, validateSameOwner, validateSufficientBalance, defaultTestStateTreeAccounts, } from '@lightprotocol/stateless.js'; import { MINT_SIZE, TOKEN_2022_PROGRAM_ID, TOKEN_PROGRAM_ID, createInitializeMint2Instruction, createMintToInstruction, } from '@solana/spl-token'; import { CPI_AUTHORITY_SEED, POOL_SEED } from './constants'; import { packCompressedTokenAccounts } from './instructions/pack-compressed-token-accounts'; export type CompressParams = { /** * The payer of the transaction. */ payer: PublicKey; /** * owner of the *uncompressed* token account. */ owner: PublicKey; /** * source (associated) token account address. */ source: PublicKey; /** * owner of the compressed token account. * To compress to a batch of recipients, pass an array of PublicKeys. */ toAddress: PublicKey | PublicKey[]; /** * Mint address of the token to compress. */ mint: PublicKey; /** * amount of tokens to compress. */ amount: number | BN | number[] | BN[]; /** * The state tree that the tx output should be inserted into. Defaults to a * public state tree if unspecified. */ outputStateTree?: PublicKey; /** * Optional: The token program ID. Default: SPL Token Program ID */ tokenProgramId?: PublicKey; }; export type CompressSplTokenAccountParams = { /** * Tx feepayer */ feePayer: PublicKey; /** * Authority that owns the token account */ authority: PublicKey; /** * Token account to compress */ tokenAccount: PublicKey; /** * Mint public key */ mint: PublicKey; /** * Optional: remaining amount to leave in token account. Default: 0 */ remainingAmount?: BN; /** * The state tree that the compressed token account should be inserted into. */ outputStateTree: PublicKey; /** * Optional: The token program ID. Default: SPL Token Program ID */ tokenProgramId?: PublicKey; }; export type DecompressParams = { /** * The payer of the transaction. */ payer: PublicKey; /** * input state to be consumed */ inputCompressedTokenAccounts: ParsedTokenAccount[]; /** * address of **uncompressed** destination token account. */ toAddress: PublicKey; /** * amount of tokens to decompress. */ amount: number | BN; /** * The recent state root indices of the input state. The expiry is tied to * the proof. */ recentInputStateRootIndices: number[]; /** * The recent validity proof for state inclusion of the input state. It * expires after n slots. */ recentValidityProof: CompressedProof; /** * The state tree that the change tx output should be inserted into. * Defaults to a public state tree if unspecified. */ outputStateTree?: PublicKey; /** * Optional: The token program ID. Default: SPL Token Program ID */ tokenProgramId?: PublicKey; }; export type TransferParams = { /** * The payer of the transaction */ payer: PublicKey; /** * The input state to be consumed */ inputCompressedTokenAccounts: ParsedTokenAccount[]; /** * Recipient address */ toAddress: PublicKey; /** * Amount of tokens to transfer */ amount: BN | number; /** * The recent state root indices of the input state. The expiry is tied to * the proof. */ recentInputStateRootIndices: number[]; /** * The recent validity proof for state inclusion of the input state. It * expires after n slots. */ recentValidityProof: CompressedProof; /** * The state trees that the tx output should be inserted into. This can be a * single PublicKey or an array of PublicKey. Defaults to the 0th state tree * of input state. */ outputStateTrees?: PublicKey[] | PublicKey; }; /** * Create Mint account for compressed Tokens */ export type CreateMintParams = { /** * Tx feepayer */ feePayer: PublicKey; /** * Mint authority */ authority: PublicKey; /** * Mint public key */ mint: PublicKey; /** * Mint decimals */ decimals: number; /** * Optional: freeze authority */ freezeAuthority: PublicKey | null; /** * lamport amount for mint account rent exemption */ rentExemptBalance: number; /** * Optional: The token program ID. Default: SPL Token Program ID */ tokenProgramId?: PublicKey; }; /** * Parameters for merging compressed token accounts */ export type MergeTokenAccountsParams = { /** * Tx feepayer */ payer: PublicKey; /** * Owner of the token accounts to be merged */ owner: PublicKey; /** * Mint public key */ mint: PublicKey; /** * Array of compressed token accounts to merge */ inputCompressedTokenAccounts: ParsedTokenAccount[]; /** * Optional: Public key of the state tree to merge into */ outputStateTree: PublicKey; /** * Optional: Recent validity proof for state inclusion */ recentValidityProof: CompressedProof; /** * Optional: Recent state root indices of the input state */ recentInputStateRootIndices: number[]; }; /** * Create compressed token accounts */ export type MintToParams = { /** * Tx feepayer */ feePayer: PublicKey; /** * Mint authority */ authority: PublicKey; /** * Mint public key */ mint: PublicKey; /** * The Solana Public Keys to mint to. */ toPubkey: PublicKey[] | PublicKey; /** * The amount of compressed tokens to mint. */ amount: BN | BN[] | number | number[]; /** * Public key of the state tree to mint into. Defaults to a public state * tree if unspecified. */ merkleTree?: PublicKey; /** * Optional: The token program ID. Default: SPL Token Program ID */ tokenProgramId?: PublicKey; }; /** * Register an existing SPL mint account to the compressed token program * Creates an omnibus account for the mint */ export type RegisterMintParams = { /** Tx feepayer */ feePayer: PublicKey; /** Mint public key */ mint: PublicKey; /** * Optional: The token program ID. Default: SPL Token Program ID */ tokenProgramId?: PublicKey; }; /** * Mint from existing SPL mint to compressed token accounts */ export type ApproveAndMintToParams = { /** * Tx feepayer */ feePayer: PublicKey; /** * Mint authority */ authority: PublicKey; /** * Mint authority (associated) token account */ authorityTokenAccount: PublicKey; /** * Mint public key */ mint: PublicKey; /** * The Solana Public Key to mint to. */ toPubkey: PublicKey; /** * The amount of compressed tokens to mint. */ amount: BN | number; /** * Public key of the state tree to mint into. Defaults to a public state * tree if unspecified. */ merkleTree?: PublicKey; /** * Optional: The token program ID. Default: SPL Token Program ID */ tokenProgramId?: PublicKey; }; export type CreateTokenProgramLookupTableParams = { /** * The payer of the transaction. */ payer: PublicKey; /** * The authority of the transaction. */ authority: PublicKey; /** * Recently finalized Solana slot. */ recentSlot: number; /** * Optional Mint addresses to store in the lookup table. */ mints?: PublicKey[]; /** * Optional additional addresses to store in the lookup table. */ remainingAccounts?: PublicKey[]; }; /** * Sum up the token amounts of the compressed token accounts */ export const sumUpTokenAmount = (accounts: ParsedTokenAccount[]): BN => { return accounts.reduce( (acc, account: ParsedTokenAccount) => acc.add(account.parsed.amount), bn(0), ); }; /** * Validate that all the compressed token accounts are owned by the same owner. */ export const validateSameTokenOwner = (accounts: ParsedTokenAccount[]) => { const owner = accounts[0].parsed.owner; accounts.forEach(acc => { if (!acc.parsed.owner.equals(owner)) { throw new Error('Token accounts must be owned by the same owner'); } }); }; /** * Parse compressed token accounts to get the mint, current owner and delegate. */ export const parseTokenData = ( compressedTokenAccounts: ParsedTokenAccount[], ) => { const mint = compressedTokenAccounts[0].parsed.mint; const currentOwner = compressedTokenAccounts[0].parsed.owner; const delegate = compressedTokenAccounts[0].parsed.delegate; return { mint, currentOwner, delegate }; }; /** * Create the output state for a transfer transaction. * @param inputCompressedTokenAccounts Input state * @param toAddress Recipient address * @param amount Amount of tokens to transfer * @returns Output token data for the transfer * instruction */ export function createTransferOutputState( inputCompressedTokenAccounts: ParsedTokenAccount[], toAddress: PublicKey, amount: number | BN, ): TokenTransferOutputData[] { amount = bn(amount); const inputAmount = sumUpTokenAmount(inputCompressedTokenAccounts); const inputLamports = sumUpLamports( inputCompressedTokenAccounts.map(acc => acc.compressedAccount), ); const changeAmount = inputAmount.sub(amount); validateSufficientBalance(changeAmount); if (changeAmount.eq(bn(0)) && inputLamports.eq(bn(0))) { return [ { owner: toAddress, amount, lamports: inputLamports, tlv: null, }, ]; } /// validates token program validateSameOwner( inputCompressedTokenAccounts.map(acc => acc.compressedAccount), ); validateSameTokenOwner(inputCompressedTokenAccounts); const outputCompressedAccounts: TokenTransferOutputData[] = [ { owner: inputCompressedTokenAccounts[0].parsed.owner, amount: changeAmount, lamports: inputLamports, tlv: null, }, { owner: toAddress, amount, lamports: bn(0), tlv: null, }, ]; return outputCompressedAccounts; } /** * Create the output state for a compress transaction. * @param inputCompressedTokenAccounts Input state * @param amount Amount of tokens to compress * @returns Output token data for the compress * instruction */ export function createDecompressOutputState( inputCompressedTokenAccounts: ParsedTokenAccount[], amount: number | BN, ): TokenTransferOutputData[] { amount = bn(amount); const inputLamports = sumUpLamports( inputCompressedTokenAccounts.map(acc => acc.compressedAccount), ); const inputAmount = sumUpTokenAmount(inputCompressedTokenAccounts); const changeAmount = inputAmount.sub(amount); validateSufficientBalance(changeAmount); /// lamports gets decompressed if (changeAmount.eq(bn(0)) && inputLamports.eq(bn(0))) { return []; } validateSameOwner( inputCompressedTokenAccounts.map(acc => acc.compressedAccount), ); validateSameTokenOwner(inputCompressedTokenAccounts); const tokenTransferOutputs: TokenTransferOutputData[] = [ { owner: inputCompressedTokenAccounts[0].parsed.owner, amount: changeAmount, lamports: inputLamports, tlv: null, }, ]; return tokenTransferOutputs; } export class CompressedTokenProgram { /** * @internal */ constructor() {} /** * Public key that identifies the CompressedPda program */ static programId: PublicKey = new PublicKey( 'cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m', ); /** * Set a custom programId via PublicKey or base58 encoded string. * This method is not required for regular usage. * * Use this only if you know what you are doing. */ static setProgramId(programId: PublicKey | string) { this.programId = typeof programId === 'string' ? new PublicKey(programId) : programId; // Reset program when programId changes this._program = null; } private static _program: Program<LightCompressedToken> | null = null; /** @internal */ static get program(): Program<LightCompressedToken> { if (!this._program) { this.initializeProgram(); } return this._program!; } /** * @internal * Initializes the program statically if not already initialized. */ private static initializeProgram() { if (!this._program) { /// Note: We can use a mock connection because we're using the /// program only for serde and building instructions, not for /// interacting with the network. const mockKeypair = Keypair.generate(); const mockConnection = new Connection( 'http://127.0.0.1:8899', 'confirmed', ); const mockProvider = new AnchorProvider( mockConnection, useWallet(mockKeypair), confirmConfig, ); setProvider(mockProvider); this._program = new Program(IDL, this.programId, mockProvider); } } /** @internal */ static deriveTokenPoolPda(mint: PublicKey): PublicKey { const seeds = [POOL_SEED, mint.toBuffer()]; const [address, _] = PublicKey.findProgramAddressSync( seeds, this.programId, ); return address; } /** @internal */ static get deriveCpiAuthorityPda(): PublicKey { const [address, _] = PublicKey.findProgramAddressSync( [CPI_AUTHORITY_SEED], this.programId, ); return address; } /** * Construct createMint instruction for compressed tokens */ static async createMint( params: CreateMintParams, ): Promise<TransactionInstruction[]> { const { mint, authority, feePayer, rentExemptBalance, tokenProgramId } = params; const tokenProgram = tokenProgramId ?? TOKEN_PROGRAM_ID; /// Create and initialize SPL Mint account const createMintAccountInstruction = SystemProgram.createAccount({ fromPubkey: feePayer, lamports: rentExemptBalance, newAccountPubkey: mint, programId: tokenProgram, space: MINT_SIZE, }); const initializeMintInstruction = createInitializeMint2Instruction( mint, params.decimals, authority, params.freezeAuthority, tokenProgram, ); const ix = await this.createTokenPool({ feePayer, mint, tokenProgramId: tokenProgram, }); return [createMintAccountInstruction, initializeMintInstruction, ix]; } /** * Enable compression for an existing SPL mint, creating an omnibus account. * For new mints, use `CompressedTokenProgram.createMint`. */ static async createTokenPool( params: RegisterMintParams, ): Promise<TransactionInstruction> { const { mint, feePayer, tokenProgramId } = params; const tokenProgram = tokenProgramId ?? TOKEN_PROGRAM_ID; const tokenPoolPda = this.deriveTokenPoolPda(mint); const ix = await this.program.methods .createTokenPool() .accounts({ mint, feePayer, tokenPoolPda, systemProgram: SystemProgram.programId, tokenProgram, cpiAuthorityPda: this.deriveCpiAuthorityPda, }) .instruction(); return ix; } /** * Construct mintTo instruction for compressed tokens */ static async mintTo(params: MintToParams): Promise<TransactionInstruction> { const systemKeys = defaultStaticAccountsStruct(); const { mint, feePayer, authority, merkleTree, toPubkey, amount, tokenProgramId, } = params; const tokenProgram = tokenProgramId ?? TOKEN_PROGRAM_ID; const tokenPoolPda = this.deriveTokenPoolPda(mint); const amounts = toArray<BN | number>(amount).map(amount => bn(amount)); const toPubkeys = toArray(toPubkey); if (amounts.length !== toPubkeys.length) { throw new Error( 'Amount and toPubkey arrays must have the same length', ); } const instruction = await this.program.methods .mintTo(toPubkeys, amounts, null) .accounts({ feePayer, authority, cpiAuthorityPda: this.deriveCpiAuthorityPda, mint, tokenPoolPda, tokenProgram, lightSystemProgram: LightSystemProgram.programId, registeredProgramPda: systemKeys.registeredProgramPda, noopProgram: systemKeys.noopProgram, accountCompressionAuthority: systemKeys.accountCompressionAuthority, accountCompressionProgram: systemKeys.accountCompressionProgram, merkleTree: merkleTree ?? defaultTestStateTreeAccounts().merkleTree, selfProgram: this.programId, solPoolPda: null, }) .instruction(); return instruction; } /// TODO: add compressBatch functionality for batch minting /** * Mint tokens from registed SPL mint account to a compressed account */ static async approveAndMintTo(params: ApproveAndMintToParams) { const { mint, feePayer, authorityTokenAccount, authority, merkleTree, toPubkey, tokenProgramId, } = params; const amount: bigint = BigInt(params.amount.toString()); /// 1. Mint to existing ATA of mintAuthority. const splMintToInstruction = createMintToInstruction( mint, authorityTokenAccount, authority, amount, [], tokenProgramId, ); /// 2. Compress from mint authority ATA to recipient compressed account const compressInstruction = await this.compress({ payer: feePayer, owner: authority, source: authorityTokenAccount, toAddress: toPubkey, mint, amount: params.amount, outputStateTree: merkleTree, tokenProgramId, }); return [splMintToInstruction, compressInstruction]; } /** * Construct transfer instruction for compressed tokens */ static async transfer( params: TransferParams, ): Promise<TransactionInstruction> { const { payer, inputCompressedTokenAccounts, recentInputStateRootIndices, recentValidityProof, amount, outputStateTrees, toAddress, } = params; const tokenTransferOutputs: TokenTransferOutputData[] = createTransferOutputState( inputCompressedTokenAccounts, toAddress, amount, ); const { inputTokenDataWithContext, packedOutputTokenData, remainingAccountMetas, } = packCompressedTokenAccounts({ inputCompressedTokenAccounts, outputStateTrees, rootIndices: recentInputStateRootIndices, tokenTransferOutputs, }); const { mint, currentOwner } = parseTokenData( inputCompressedTokenAccounts, ); const data: CompressedTokenInstructionDataTransfer = { proof: recentValidityProof, mint, delegatedTransfer: null, // TODO: implement inputTokenDataWithContext, outputCompressedAccounts: packedOutputTokenData, compressOrDecompressAmount: null, isCompress: false, cpiContext: null, lamportsChangeAccountMerkleTreeIndex: null, }; const encodedData = this.program.coder.types.encode( 'CompressedTokenInstructionDataTransfer', data, ); const { accountCompressionAuthority, noopProgram, registeredProgramPda, accountCompressionProgram, } = defaultStaticAccountsStruct(); const instruction = await this.program.methods .transfer(encodedData) .accounts({ feePayer: payer!, authority: currentOwner!, cpiAuthorityPda: this.deriveCpiAuthorityPda, lightSystemProgram: LightSystemProgram.programId, registeredProgramPda: registeredProgramPda, noopProgram: noopProgram, accountCompressionAuthority: accountCompressionAuthority, accountCompressionProgram: accountCompressionProgram, selfProgram: this.programId, tokenPoolPda: null, compressOrDecompressTokenAccount: null, tokenProgram: null, }) .remainingAccounts(remainingAccountMetas) .instruction(); return instruction; } /** * Create lookup table instructions for the token program's default accounts. */ static async createTokenProgramLookupTable( params: CreateTokenProgramLookupTableParams, ) { const { authority, mints, recentSlot, payer, remainingAccounts } = params; const [createInstruction, lookupTableAddress] = AddressLookupTableProgram.createLookupTable({ authority, payer: authority, recentSlot, }); let optionalMintKeys: PublicKey[] = []; if (mints) { optionalMintKeys = [ ...mints, ...mints.map(mint => this.deriveTokenPoolPda(mint)), ]; } const extendInstruction = AddressLookupTableProgram.extendLookupTable({ payer, authority, lookupTable: lookupTableAddress, addresses: [ this.deriveCpiAuthorityPda, LightSystemProgram.programId, defaultStaticAccountsStruct().registeredProgramPda, defaultStaticAccountsStruct().noopProgram, defaultStaticAccountsStruct().accountCompressionAuthority, defaultStaticAccountsStruct().accountCompressionProgram, defaultTestStateTreeAccounts().merkleTree, defaultTestStateTreeAccounts().nullifierQueue, defaultTestStateTreeAccounts().addressTree, defaultTestStateTreeAccounts().addressQueue, this.programId, TOKEN_PROGRAM_ID, TOKEN_2022_PROGRAM_ID, authority, ...optionalMintKeys, ...(remainingAccounts ?? []), ], }); return { instructions: [createInstruction, extendInstruction], address: lookupTableAddress, }; } /** * Create compress instruction * @returns compressInstruction */ static async compress( params: CompressParams, ): Promise<TransactionInstruction> { const { payer, owner, source, toAddress, mint, outputStateTree, tokenProgramId, } = params; if (Array.isArray(params.amount) !== Array.isArray(params.toAddress)) { throw new Error( 'Both amount and toAddress must be arrays or both must be single values', ); } let tokenTransferOutputs: TokenTransferOutputData[]; if (Array.isArray(params.amount) && Array.isArray(params.toAddress)) { if (params.amount.length !== params.toAddress.length) { throw new Error( 'Amount and toAddress arrays must have the same length', ); } tokenTransferOutputs = params.amount.map((amt, index) => { const amount = bn(amt); return { owner: (params.toAddress as PublicKey[])[index], amount, lamports: bn(0), tlv: null, }; }); } else { tokenTransferOutputs = [ { owner: toAddress as PublicKey, amount: bn(params.amount as number | BN), lamports: bn(0), tlv: null, }, ]; } const { inputTokenDataWithContext, packedOutputTokenData, remainingAccountMetas, } = packCompressedTokenAccounts({ inputCompressedTokenAccounts: [], outputStateTrees: outputStateTree, rootIndices: [], tokenTransferOutputs, }); const data: CompressedTokenInstructionDataTransfer = { proof: null, mint, delegatedTransfer: null, // TODO: implement inputTokenDataWithContext, outputCompressedAccounts: packedOutputTokenData, compressOrDecompressAmount: Array.isArray(params.amount) ? params.amount .map(amt => new BN(amt)) .reduce((sum, amt) => sum.add(amt), new BN(0)) : new BN(params.amount), isCompress: true, cpiContext: null, lamportsChangeAccountMerkleTreeIndex: null, }; const encodedData = this.program.coder.types.encode( 'CompressedTokenInstructionDataTransfer', data, ); const tokenProgram = tokenProgramId ?? TOKEN_PROGRAM_ID; const instruction = await this.program.methods .transfer(encodedData) .accounts({ feePayer: payer, authority: owner, cpiAuthorityPda: this.deriveCpiAuthorityPda, lightSystemProgram: LightSystemProgram.programId, registeredProgramPda: defaultStaticAccountsStruct().registeredProgramPda, noopProgram: defaultStaticAccountsStruct().noopProgram, accountCompressionAuthority: defaultStaticAccountsStruct().accountCompressionAuthority, accountCompressionProgram: defaultStaticAccountsStruct().accountCompressionProgram, selfProgram: this.programId, tokenPoolPda: this.deriveTokenPoolPda(mint), compressOrDecompressTokenAccount: source, // token tokenProgram, }) .remainingAccounts(remainingAccountMetas) .instruction(); return instruction; } /** * Construct decompress instruction */ static async decompress( params: DecompressParams, ): Promise<TransactionInstruction> { const { payer, inputCompressedTokenAccounts, toAddress, outputStateTree, recentValidityProof, recentInputStateRootIndices, tokenProgramId, } = params; const amount = bn(params.amount); const tokenTransferOutputs = createDecompressOutputState( inputCompressedTokenAccounts, amount, ); /// Pack const { inputTokenDataWithContext, packedOutputTokenData, remainingAccountMetas, } = packCompressedTokenAccounts({ inputCompressedTokenAccounts, outputStateTrees: outputStateTree, rootIndices: recentInputStateRootIndices, tokenTransferOutputs: tokenTransferOutputs, }); const { mint, currentOwner } = parseTokenData( inputCompressedTokenAccounts, ); const data: CompressedTokenInstructionDataTransfer = { proof: recentValidityProof, mint, delegatedTransfer: null, // TODO: implement inputTokenDataWithContext, outputCompressedAccounts: packedOutputTokenData, compressOrDecompressAmount: amount, isCompress: false, cpiContext: null, lamportsChangeAccountMerkleTreeIndex: null, }; const encodedData = this.program.coder.types.encode( 'CompressedTokenInstructionDataTransfer', data, ); const { accountCompressionAuthority, noopProgram, registeredProgramPda, accountCompressionProgram, } = defaultStaticAccountsStruct(); const tokenProgram = tokenProgramId ?? TOKEN_PROGRAM_ID; const instruction = await this.program.methods .transfer(encodedData) .accounts({ feePayer: payer, authority: currentOwner, cpiAuthorityPda: this.deriveCpiAuthorityPda, lightSystemProgram: LightSystemProgram.programId, registeredProgramPda: registeredProgramPda, noopProgram: noopProgram, accountCompressionAuthority: accountCompressionAuthority, accountCompressionProgram: accountCompressionProgram, selfProgram: this.programId, tokenPoolPda: this.deriveTokenPoolPda(mint), compressOrDecompressTokenAccount: toAddress, tokenProgram, }) .remainingAccounts(remainingAccountMetas) .instruction(); return instruction; } static async mergeTokenAccounts( params: MergeTokenAccountsParams, ): Promise<TransactionInstruction[]> { const { payer, owner, inputCompressedTokenAccounts, outputStateTree, recentValidityProof, recentInputStateRootIndices, } = params; if (inputCompressedTokenAccounts.length > 3) { throw new Error('Cannot merge more than 3 token accounts at once'); } const ix = await this.transfer({ payer, inputCompressedTokenAccounts, toAddress: owner, amount: inputCompressedTokenAccounts.reduce( (sum, account) => sum.add(account.parsed.amount), new BN(0), ), outputStateTrees: outputStateTree, recentInputStateRootIndices, recentValidityProof, }); return [ix]; } static async compressSplTokenAccount( params: CompressSplTokenAccountParams, ): Promise<TransactionInstruction> { const { feePayer, authority, tokenAccount, mint, remainingAmount, outputStateTree, tokenProgramId, } = params; const tokenProgram = tokenProgramId ?? TOKEN_PROGRAM_ID; const remainingAccountMetas: AccountMeta[] = [ { pubkey: outputStateTree, isSigner: false, isWritable: true, }, ]; const instruction = await this.program.methods .compressSplTokenAccount(authority, remainingAmount ?? null, null) .accounts({ feePayer, authority, cpiAuthorityPda: this.deriveCpiAuthorityPda, lightSystemProgram: LightSystemProgram.programId, registeredProgramPda: defaultStaticAccountsStruct().registeredProgramPda, noopProgram: defaultStaticAccountsStruct().noopProgram, accountCompressionAuthority: defaultStaticAccountsStruct().accountCompressionAuthority, accountCompressionProgram: defaultStaticAccountsStruct().accountCompressionProgram, selfProgram: this.programId, tokenPoolPda: this.deriveTokenPoolPda(mint), compressOrDecompressTokenAccount: tokenAccount, tokenProgram, systemProgram: SystemProgram.programId, }) .remainingAccounts(remainingAccountMetas) .instruction(); return instruction; } static async get_mint_program_id( mint: PublicKey, connection: Connection, ): Promise<PublicKey | undefined> { return (await connection.getAccountInfo(mint))?.owner; } }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/types.ts
import { PublicKey } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { CompressedProof } from '@lightprotocol/stateless.js'; /// TODO: remove index_mt_account on-chain. passed as part of /// CompressedTokenInstructionDataInvoke export type TokenTransferOutputData = { /** * The owner of the output token account */ owner: PublicKey; /** * The amount of tokens of the output token account */ amount: BN; /** * lamports associated with the output token account */ lamports: BN | null; /** * TokenExtension tlv */ tlv: Buffer | null; }; export type PackedTokenTransferOutputData = { /** * The owner of the output token account */ owner: PublicKey; /** * The amount of tokens of the output token account */ amount: BN; /** * lamports associated with the output token account */ lamports: BN | null; /** * Merkle tree pubkey index in remaining accounts */ merkleTreeIndex: number; /** * TokenExtension tlv */ tlv: Buffer | null; }; export type InputTokenDataWithContext = { /** * The amount of tokens to transfer */ amount: BN; /** * Optional: The index of the delegate in remaining accounts */ delegateIndex: number | null; /** * The index of the merkle tree address in remaining accounts */ merkleTreePubkeyIndex: number; /** * The index of the nullifier queue address in remaining accounts */ nullifierQueuePubkeyIndex: number; /** * The index of the leaf in the merkle tree */ leafIndex: number; /** * Lamports in the input token account. */ lamports: BN | null; /** * TokenExtension tlv */ tlv: Buffer | null; }; export type CompressedTokenInstructionDataInvoke = { /** * Validity proof */ proof: CompressedProof | null; /** * The root indices of the transfer */ rootIndices: number[]; /** * The mint of the transfer */ mint: PublicKey; /** * Whether the signer is a delegate * TODO: implement delegated transfer struct */ delegatedTransfer: null; /** * Input token data with packed merkle context */ inputTokenDataWithContext: InputTokenDataWithContext[]; /** * Data of the output token accounts */ outputCompressedAccounts: TokenTransferOutputData[]; /** * The indices of the output state merkle tree accounts in 'remaining * accounts' */ outputStateMerkleTreeAccountIndices: Buffer; /** * The index of the Merkle tree for a lamport change account. */ lamportsChangeAccountMerkleTreeIndex: number | null; }; export type TokenData = { /** * The mint associated with this account */ mint: PublicKey; /** * The owner of this account */ owner: PublicKey; /** * The amount of tokens this account holds */ amount: BN; /** * If `delegate` is `Some` then `delegated_amount` represents the amount * authorized by the delegate */ delegate: PublicKey | null; /** * The account's state */ state: number; /** * TokenExtension tlv */ tlv: Buffer | null; };
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/constants.ts
export const POOL_SEED = Buffer.from('pool'); export const CPI_AUTHORITY_SEED = Buffer.from('cpi_authority'); export const SPL_TOKEN_MINT_RENT_EXEMPT_BALANCE = 1461600;
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/index.ts
export * from './idl'; export * from './instructions'; export * from './constants'; export * from './program'; export * from './types'; export * from './actions';
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/idl/index.ts
export * from './light_compressed_token';
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/idl/light_compressed_token.ts
export type LightCompressedToken = { version: '1.2.0'; name: 'light_compressed_token'; instructions: [ { name: 'createTokenPool'; docs: [ 'This instruction creates a token pool for a given mint. Every spl mint', 'can have one token pool. When a token is compressed the tokens are', 'transferrred to the token pool, and their compressed equivalent is', 'minted into a Merkle tree.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: true; isSigner: false; }, { name: 'tokenProgram'; isMut: false; isSigner: false; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, ]; args: []; }, { name: 'mintTo'; docs: [ 'Mints tokens from an spl token mint to a list of compressed accounts.', 'Minted tokens are transferred to a pool account owned by the compressed', 'token program. The instruction creates one compressed output account for', 'every amount and pubkey input pair. A constant amount of lamports can be', 'transferred to each output account to enable. A use case to add lamports', 'to a compressed token account is to prevent spam. This is the only way', 'to add lamports to a compressed token account.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: true; isSigner: false; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; }, { name: 'tokenProgram'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; docs: ['programs']; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'merkleTree'; isMut: true; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'solPoolPda'; isMut: true; isSigner: false; isOptional: true; }, ]; args: [ { name: 'publicKeys'; type: { vec: 'publicKey'; }; }, { name: 'amounts'; type: { vec: 'u64'; }; }, { name: 'lamports'; type: { option: 'u64'; }; }, ]; }, { name: 'compressSplTokenAccount'; docs: [ 'Compresses the balance of an spl token account sub an optional remaining', 'amount. This instruction does not close the spl token account. To close', 'the account bundle a close spl account instruction in your transaction.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'compressOrDecompressTokenAccount'; isMut: true; isSigner: false; isOptional: true; }, { name: 'tokenProgram'; isMut: false; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'owner'; type: 'publicKey'; }, { name: 'remainingAmount'; type: { option: 'u64'; }; }, { name: 'cpiContext'; type: { option: { defined: 'CompressedCpiContext'; }; }; }, ]; }, { name: 'transfer'; docs: [ 'Transfers compressed tokens from one account to another. All accounts', 'must be of the same mint. Additional spl tokens can be compressed or', 'decompressed. In one transaction only compression or decompression is', 'possible. Lamports can be transferred alongside tokens. If output token', 'accounts specify less lamports than inputs the remaining lamports are', 'transferred to an output compressed account. Signer must be owner or', 'delegate. If a delegated token account is transferred the delegate is', 'not preserved.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'compressOrDecompressTokenAccount'; isMut: true; isSigner: false; isOptional: true; }, { name: 'tokenProgram'; isMut: false; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'approve'; docs: [ 'Delegates an amount to a delegate. A compressed token account is either', 'completely delegated or not. Prior delegates are not preserved. Cannot', 'be called by a delegate.', 'The instruction creates two output accounts:', '1. one account with delegated amount', '2. one account with remaining(change) amount', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'revoke'; docs: [ 'Revokes a delegation. The instruction merges all inputs into one output', 'account. Cannot be called by a delegate. Delegates are not preserved.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'freeze'; docs: [ 'Freezes compressed token accounts. Inputs must not be frozen. Creates as', 'many outputs as inputs. Balances and delegates are preserved.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['that this program is the signer of the cpi.']; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'thaw'; docs: [ 'Thaws frozen compressed token accounts. Inputs must be frozen. Creates', 'as many outputs as inputs. Balances and delegates are preserved.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['that this program is the signer of the cpi.']; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'burn'; docs: [ 'Burns compressed tokens and spl tokens from the pool account. Delegates', 'can burn tokens. The output compressed token account remains delegated.', 'Creates one output compressed token account.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'mint'; isMut: true; isSigner: false; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; }, { name: 'tokenProgram'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs'; type: 'bytes'; }, ]; }, { name: 'stubIdlBuild'; docs: [ 'This function is a stub to allow Anchor to include the input types in', 'the IDL. It should not be included in production builds nor be called in', 'practice.', ]; accounts: [ { name: 'feePayer'; isMut: true; isSigner: true; docs: ['UNCHECKED: only pays fees.']; }, { name: 'authority'; isMut: false; isSigner: true; docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ]; }, { name: 'cpiAuthorityPda'; isMut: false; isSigner: false; }, { name: 'lightSystemProgram'; isMut: false; isSigner: false; }, { name: 'registeredProgramPda'; isMut: false; isSigner: false; }, { name: 'noopProgram'; isMut: false; isSigner: false; }, { name: 'accountCompressionAuthority'; isMut: false; isSigner: false; }, { name: 'accountCompressionProgram'; isMut: false; isSigner: false; }, { name: 'selfProgram'; isMut: false; isSigner: false; docs: ['this program is the signer of the cpi.']; }, { name: 'tokenPoolPda'; isMut: true; isSigner: false; isOptional: true; }, { name: 'compressOrDecompressTokenAccount'; isMut: true; isSigner: false; isOptional: true; }, { name: 'tokenProgram'; isMut: false; isSigner: false; isOptional: true; }, { name: 'systemProgram'; isMut: false; isSigner: false; }, ]; args: [ { name: 'inputs1'; type: { defined: 'CompressedTokenInstructionDataTransfer'; }; }, { name: 'inputs2'; type: { defined: 'TokenData'; }; }, ]; }, ]; types: [ { name: 'AccessMetadata'; type: { kind: 'struct'; fields: [ { name: 'owner'; docs: ['Owner of the Merkle tree.']; type: 'publicKey'; }, { name: 'programOwner'; docs: [ 'Program owner of the Merkle tree. This will be used for program owned Merkle trees.', ]; type: 'publicKey'; }, { name: 'forester'; docs: [ 'Optional privileged forester pubkey, can be set for custom Merkle trees', 'without a network fee. Merkle trees without network fees are not', 'forested by light foresters. The variable is not used in the account', 'compression program but the registry program. The registry program', 'implements access control to prevent contention during forester. The', 'forester pubkey specified in this struct can bypass contention checks.', ]; type: 'publicKey'; }, ]; }; }, { name: 'AccountState'; type: { kind: 'enum'; variants: [ { name: 'Initialized'; }, { name: 'Frozen'; }, ]; }; }, { name: 'CompressedAccount'; type: { kind: 'struct'; fields: [ { name: 'owner'; type: 'publicKey'; }, { name: 'lamports'; type: 'u64'; }, { name: 'address'; type: { option: { array: ['u8', 32]; }; }; }, { name: 'data'; type: { option: { defined: 'CompressedAccountData'; }; }; }, ]; }; }, { name: 'CompressedAccountData'; type: { kind: 'struct'; fields: [ { name: 'discriminator'; type: { array: ['u8', 8]; }; }, { name: 'data'; type: 'bytes'; }, { name: 'dataHash'; type: { array: ['u8', 32]; }; }, ]; }; }, { name: 'CompressedCpiContext'; type: { kind: 'struct'; fields: [ { name: 'setContext'; docs: [ 'Is set by the program that is invoking the CPI to signal that is should', 'set the cpi context.', ]; type: 'bool'; }, { name: 'firstSetContext'; docs: [ 'Is set to wipe the cpi context since someone could have set it before', 'with unrelated data.', ]; type: 'bool'; }, { name: 'cpiContextAccountIndex'; docs: [ 'Index of cpi context account in remaining accounts.', ]; type: 'u8'; }, ]; }; }, { name: 'CompressedProof'; type: { kind: 'struct'; fields: [ { name: 'a'; type: { array: ['u8', 32]; }; }, { name: 'b'; type: { array: ['u8', 64]; }; }, { name: 'c'; type: { array: ['u8', 32]; }; }, ]; }; }, { name: 'CompressedTokenInstructionDataTransfer'; type: { kind: 'struct'; fields: [ { name: 'proof'; type: { option: { defined: 'CompressedProof'; }; }; }, { name: 'mint'; type: 'publicKey'; }, { name: 'delegatedTransfer'; docs: [ 'Is required if the signer is delegate,', '-> delegate is authority account,', 'owner = Some(owner) is the owner of the token account.', ]; type: { option: { defined: 'DelegatedTransfer'; }; }; }, { name: 'inputTokenDataWithContext'; type: { vec: { defined: 'InputTokenDataWithContext'; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'PackedTokenTransferOutputData'; }; }; }, { name: 'isCompress'; type: 'bool'; }, { name: 'compressOrDecompressAmount'; type: { option: 'u64'; }; }, { name: 'cpiContext'; type: { option: { defined: 'CompressedCpiContext'; }; }; }, { name: 'lamportsChangeAccountMerkleTreeIndex'; type: { option: 'u8'; }; }, ]; }; }, { name: 'DelegatedTransfer'; docs: [ 'Struct to provide the owner when the delegate is signer of the transaction.', ]; type: { kind: 'struct'; fields: [ { name: 'owner'; type: 'publicKey'; }, { name: 'delegateChangeAccountIndex'; docs: [ 'Index of change compressed account in output compressed accounts. In', "case that the delegate didn't spend the complete delegated compressed", 'account balance the change compressed account will be delegated to her', 'as well.', ]; type: { option: 'u8'; }; }, ]; }; }, { name: 'InputTokenDataWithContext'; type: { kind: 'struct'; fields: [ { name: 'amount'; type: 'u64'; }, { name: 'delegateIndex'; type: { option: 'u8'; }; }, { name: 'merkleContext'; type: { defined: 'PackedMerkleContext'; }; }, { name: 'rootIndex'; type: 'u16'; }, { name: 'lamports'; type: { option: 'u64'; }; }, { name: 'tlv'; docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ]; type: { option: 'bytes'; }; }, ]; }; }, { name: 'InstructionDataInvoke'; type: { kind: 'struct'; fields: [ { name: 'proof'; type: { option: { defined: 'CompressedProof'; }; }; }, { name: 'inputCompressedAccountsWithMerkleContext'; type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext'; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'newAddressParams'; type: { vec: { defined: 'NewAddressParamsPacked'; }; }; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, ]; }; }, { name: 'InstructionDataInvokeCpi'; type: { kind: 'struct'; fields: [ { name: 'proof'; type: { option: { defined: 'CompressedProof'; }; }; }, { name: 'newAddressParams'; type: { vec: { defined: 'NewAddressParamsPacked'; }; }; }, { name: 'inputCompressedAccountsWithMerkleContext'; type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext'; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, { name: 'cpiContext'; type: { option: { defined: 'CompressedCpiContext'; }; }; }, ]; }; }, { name: 'MerkleTreeMetadata'; type: { kind: 'struct'; fields: [ { name: 'accessMetadata'; type: { defined: 'AccessMetadata'; }; }, { name: 'rolloverMetadata'; type: { defined: 'RolloverMetadata'; }; }, { name: 'associatedQueue'; type: 'publicKey'; }, { name: 'nextMerkleTree'; type: 'publicKey'; }, ]; }; }, { name: 'MerkleTreeSequenceNumber'; type: { kind: 'struct'; fields: [ { name: 'pubkey'; type: 'publicKey'; }, { name: 'seq'; type: 'u64'; }, ]; }; }, { name: 'NewAddressParamsPacked'; type: { kind: 'struct'; fields: [ { name: 'seed'; type: { array: ['u8', 32]; }; }, { name: 'addressQueueAccountIndex'; type: 'u8'; }, { name: 'addressMerkleTreeAccountIndex'; type: 'u8'; }, { name: 'addressMerkleTreeRootIndex'; type: 'u16'; }, ]; }; }, { name: 'OutputCompressedAccountWithPackedContext'; type: { kind: 'struct'; fields: [ { name: 'compressedAccount'; type: { defined: 'CompressedAccount'; }; }, { name: 'merkleTreeIndex'; type: 'u8'; }, ]; }; }, { name: 'PackedCompressedAccountWithMerkleContext'; type: { kind: 'struct'; fields: [ { name: 'compressedAccount'; type: { defined: 'CompressedAccount'; }; }, { name: 'merkleContext'; type: { defined: 'PackedMerkleContext'; }; }, { name: 'rootIndex'; docs: [ 'Index of root used in inclusion validity proof.', ]; type: 'u16'; }, { name: 'readOnly'; docs: [ 'Placeholder to mark accounts read-only unimplemented set to false.', ]; type: 'bool'; }, ]; }; }, { name: 'PackedMerkleContext'; type: { kind: 'struct'; fields: [ { name: 'merkleTreePubkeyIndex'; type: 'u8'; }, { name: 'nullifierQueuePubkeyIndex'; type: 'u8'; }, { name: 'leafIndex'; type: 'u32'; }, { name: 'queueIndex'; docs: [ 'Index of leaf in queue. Placeholder of batched Merkle tree updates', 'currently unimplemented.', ]; type: { option: { defined: 'QueueIndex'; }; }; }, ]; }; }, { name: 'PackedTokenTransferOutputData'; type: { kind: 'struct'; fields: [ { name: 'owner'; type: 'publicKey'; }, { name: 'amount'; type: 'u64'; }, { name: 'lamports'; type: { option: 'u64'; }; }, { name: 'merkleTreeIndex'; type: 'u8'; }, { name: 'tlv'; docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ]; type: { option: 'bytes'; }; }, ]; }; }, { name: 'PublicTransactionEvent'; type: { kind: 'struct'; fields: [ { name: 'inputCompressedAccountHashes'; type: { vec: { array: ['u8', 32]; }; }; }, { name: 'outputCompressedAccountHashes'; type: { vec: { array: ['u8', 32]; }; }; }, { name: 'outputCompressedAccounts'; type: { vec: { defined: 'OutputCompressedAccountWithPackedContext'; }; }; }, { name: 'outputLeafIndices'; type: { vec: 'u32'; }; }, { name: 'sequenceNumbers'; type: { vec: { defined: 'MerkleTreeSequenceNumber'; }; }; }, { name: 'relayFee'; type: { option: 'u64'; }; }, { name: 'isCompress'; type: 'bool'; }, { name: 'compressOrDecompressLamports'; type: { option: 'u64'; }; }, { name: 'pubkeyArray'; type: { vec: 'publicKey'; }; }, { name: 'message'; type: { option: 'bytes'; }; }, ]; }; }, { name: 'QueueIndex'; type: { kind: 'struct'; fields: [ { name: 'queueId'; docs: ['Id of queue in queue account.']; type: 'u8'; }, { name: 'index'; docs: ['Index of compressed account hash in queue.']; type: 'u16'; }, ]; }; }, { name: 'RolloverMetadata'; type: { kind: 'struct'; fields: [ { name: 'index'; docs: ['Unique index.']; type: 'u64'; }, { name: 'rolloverFee'; docs: [ 'This fee is used for rent for the next account.', 'It accumulates in the account so that once the corresponding Merkle tree account is full it can be rolled over', ]; type: 'u64'; }, { name: 'rolloverThreshold'; docs: [ 'The threshold in percentage points when the account should be rolled over (95 corresponds to 95% filled).', ]; type: 'u64'; }, { name: 'networkFee'; docs: ['Tip for maintaining the account.']; type: 'u64'; }, { name: 'rolledoverSlot'; docs: [ 'The slot when the account was rolled over, a rolled over account should not be written to.', ]; type: 'u64'; }, { name: 'closeThreshold'; docs: [ 'If current slot is greater than rolledover_slot + close_threshold and', "the account is empty it can be closed. No 'close' functionality has been", 'implemented yet.', ]; type: 'u64'; }, { name: 'additionalBytes'; docs: [ 'Placeholder for bytes of additional accounts which are tied to the', 'Merkle trees operation and need to be rolled over as well.', ]; type: 'u64'; }, ]; }; }, { name: 'TokenData'; type: { kind: 'struct'; fields: [ { name: 'mint'; docs: ['The mint associated with this account']; type: 'publicKey'; }, { name: 'owner'; docs: ['The owner of this account.']; type: 'publicKey'; }, { name: 'amount'; docs: ['The amount of tokens this account holds.']; type: 'u64'; }, { name: 'delegate'; docs: [ 'If `delegate` is `Some` then `delegated_amount` represents', 'the amount authorized by the delegate', ]; type: { option: 'publicKey'; }; }, { name: 'state'; docs: ["The account's state"]; type: { defined: 'AccountState'; }; }, { name: 'tlv'; docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ]; type: { option: 'bytes'; }; }, ]; }; }, ]; errors: [ { code: 6000; name: 'SignerCheckFailed'; msg: 'Signer check failed'; }, { code: 6001; name: 'CreateTransferInstructionFailed'; msg: 'Create transfer instruction failed'; }, { code: 6002; name: 'AccountNotFound'; msg: 'Account not found'; }, { code: 6003; name: 'SerializationError'; msg: 'Serialization error'; }, ]; }; export const IDL: LightCompressedToken = { version: '1.2.0', name: 'light_compressed_token', instructions: [ { name: 'createTokenPool', docs: [ 'This instruction creates a token pool for a given mint. Every spl mint', 'can have one token pool. When a token is compressed the tokens are', 'transferrred to the token pool, and their compressed equivalent is', 'minted into a Merkle tree.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'tokenPoolPda', isMut: true, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'mint', isMut: true, isSigner: false, }, { name: 'tokenProgram', isMut: false, isSigner: false, }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, ], args: [], }, { name: 'mintTo', docs: [ 'Mints tokens from an spl token mint to a list of compressed accounts.', 'Minted tokens are transferred to a pool account owned by the compressed', 'token program. The instruction creates one compressed output account for', 'every amount and pubkey input pair. A constant amount of lamports can be', 'transferred to each output account to enable. A use case to add lamports', 'to a compressed token account is to prevent spam. This is the only way', 'to add lamports to a compressed token account.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'mint', isMut: true, isSigner: false, }, { name: 'tokenPoolPda', isMut: true, isSigner: false, }, { name: 'tokenProgram', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, docs: ['programs'], }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'merkleTree', isMut: true, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'solPoolPda', isMut: true, isSigner: false, isOptional: true, }, ], args: [ { name: 'publicKeys', type: { vec: 'publicKey', }, }, { name: 'amounts', type: { vec: 'u64', }, }, { name: 'lamports', type: { option: 'u64', }, }, ], }, { name: 'compressSplTokenAccount', docs: [ 'Compresses the balance of an spl token account sub an optional remaining', 'amount. This instruction does not close the spl token account. To close', 'the account bundle a close spl account instruction in your transaction.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'tokenPoolPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'compressOrDecompressTokenAccount', isMut: true, isSigner: false, isOptional: true, }, { name: 'tokenProgram', isMut: false, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'owner', type: 'publicKey', }, { name: 'remainingAmount', type: { option: 'u64', }, }, { name: 'cpiContext', type: { option: { defined: 'CompressedCpiContext', }, }, }, ], }, { name: 'transfer', docs: [ 'Transfers compressed tokens from one account to another. All accounts', 'must be of the same mint. Additional spl tokens can be compressed or', 'decompressed. In one transaction only compression or decompression is', 'possible. Lamports can be transferred alongside tokens. If output token', 'accounts specify less lamports than inputs the remaining lamports are', 'transferred to an output compressed account. Signer must be owner or', 'delegate. If a delegated token account is transferred the delegate is', 'not preserved.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'tokenPoolPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'compressOrDecompressTokenAccount', isMut: true, isSigner: false, isOptional: true, }, { name: 'tokenProgram', isMut: false, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'approve', docs: [ 'Delegates an amount to a delegate. A compressed token account is either', 'completely delegated or not. Prior delegates are not preserved. Cannot', 'be called by a delegate.', 'The instruction creates two output accounts:', '1. one account with delegated amount', '2. one account with remaining(change) amount', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'revoke', docs: [ 'Revokes a delegation. The instruction merges all inputs into one output', 'account. Cannot be called by a delegate. Delegates are not preserved.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'freeze', docs: [ 'Freezes compressed token accounts. Inputs must not be frozen. Creates as', 'many outputs as inputs. Balances and delegates are preserved.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['that this program is the signer of the cpi.'], }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'mint', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'thaw', docs: [ 'Thaws frozen compressed token accounts. Inputs must be frozen. Creates', 'as many outputs as inputs. Balances and delegates are preserved.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['that this program is the signer of the cpi.'], }, { name: 'systemProgram', isMut: false, isSigner: false, }, { name: 'mint', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'burn', docs: [ 'Burns compressed tokens and spl tokens from the pool account. Delegates', 'can burn tokens. The output compressed token account remains delegated.', 'Creates one output compressed token account.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'mint', isMut: true, isSigner: false, }, { name: 'tokenPoolPda', isMut: true, isSigner: false, }, { name: 'tokenProgram', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs', type: 'bytes', }, ], }, { name: 'stubIdlBuild', docs: [ 'This function is a stub to allow Anchor to include the input types in', 'the IDL. It should not be included in production builds nor be called in', 'practice.', ], accounts: [ { name: 'feePayer', isMut: true, isSigner: true, docs: ['UNCHECKED: only pays fees.'], }, { name: 'authority', isMut: false, isSigner: true, docs: [ 'Authority is verified through proof since both owner and delegate', 'are included in the token data hash, which is a public input to the', 'validity proof.', ], }, { name: 'cpiAuthorityPda', isMut: false, isSigner: false, }, { name: 'lightSystemProgram', isMut: false, isSigner: false, }, { name: 'registeredProgramPda', isMut: false, isSigner: false, }, { name: 'noopProgram', isMut: false, isSigner: false, }, { name: 'accountCompressionAuthority', isMut: false, isSigner: false, }, { name: 'accountCompressionProgram', isMut: false, isSigner: false, }, { name: 'selfProgram', isMut: false, isSigner: false, docs: ['this program is the signer of the cpi.'], }, { name: 'tokenPoolPda', isMut: true, isSigner: false, isOptional: true, }, { name: 'compressOrDecompressTokenAccount', isMut: true, isSigner: false, isOptional: true, }, { name: 'tokenProgram', isMut: false, isSigner: false, isOptional: true, }, { name: 'systemProgram', isMut: false, isSigner: false, }, ], args: [ { name: 'inputs1', type: { defined: 'CompressedTokenInstructionDataTransfer', }, }, { name: 'inputs2', type: { defined: 'TokenData', }, }, ], }, ], types: [ { name: 'AccessMetadata', type: { kind: 'struct', fields: [ { name: 'owner', docs: ['Owner of the Merkle tree.'], type: 'publicKey', }, { name: 'programOwner', docs: [ 'Program owner of the Merkle tree. This will be used for program owned Merkle trees.', ], type: 'publicKey', }, { name: 'forester', docs: [ 'Optional privileged forester pubkey, can be set for custom Merkle trees', 'without a network fee. Merkle trees without network fees are not', 'forested by light foresters. The variable is not used in the account', 'compression program but the registry program. The registry program', 'implements access control to prevent contention during forester. The', 'forester pubkey specified in this struct can bypass contention checks.', ], type: 'publicKey', }, ], }, }, { name: 'AccountState', type: { kind: 'enum', variants: [ { name: 'Initialized', }, { name: 'Frozen', }, ], }, }, { name: 'CompressedAccount', type: { kind: 'struct', fields: [ { name: 'owner', type: 'publicKey', }, { name: 'lamports', type: 'u64', }, { name: 'address', type: { option: { array: ['u8', 32], }, }, }, { name: 'data', type: { option: { defined: 'CompressedAccountData', }, }, }, ], }, }, { name: 'CompressedAccountData', type: { kind: 'struct', fields: [ { name: 'discriminator', type: { array: ['u8', 8], }, }, { name: 'data', type: 'bytes', }, { name: 'dataHash', type: { array: ['u8', 32], }, }, ], }, }, { name: 'CompressedCpiContext', type: { kind: 'struct', fields: [ { name: 'setContext', docs: [ 'Is set by the program that is invoking the CPI to signal that is should', 'set the cpi context.', ], type: 'bool', }, { name: 'firstSetContext', docs: [ 'Is set to wipe the cpi context since someone could have set it before', 'with unrelated data.', ], type: 'bool', }, { name: 'cpiContextAccountIndex', docs: [ 'Index of cpi context account in remaining accounts.', ], type: 'u8', }, ], }, }, { name: 'CompressedProof', type: { kind: 'struct', fields: [ { name: 'a', type: { array: ['u8', 32], }, }, { name: 'b', type: { array: ['u8', 64], }, }, { name: 'c', type: { array: ['u8', 32], }, }, ], }, }, { name: 'CompressedTokenInstructionDataTransfer', type: { kind: 'struct', fields: [ { name: 'proof', type: { option: { defined: 'CompressedProof', }, }, }, { name: 'mint', type: 'publicKey', }, { name: 'delegatedTransfer', docs: [ 'Is required if the signer is delegate,', '-> delegate is authority account,', 'owner = Some(owner) is the owner of the token account.', ], type: { option: { defined: 'DelegatedTransfer', }, }, }, { name: 'inputTokenDataWithContext', type: { vec: { defined: 'InputTokenDataWithContext', }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'PackedTokenTransferOutputData', }, }, }, { name: 'isCompress', type: 'bool', }, { name: 'compressOrDecompressAmount', type: { option: 'u64', }, }, { name: 'cpiContext', type: { option: { defined: 'CompressedCpiContext', }, }, }, { name: 'lamportsChangeAccountMerkleTreeIndex', type: { option: 'u8', }, }, ], }, }, { name: 'DelegatedTransfer', docs: [ 'Struct to provide the owner when the delegate is signer of the transaction.', ], type: { kind: 'struct', fields: [ { name: 'owner', type: 'publicKey', }, { name: 'delegateChangeAccountIndex', docs: [ 'Index of change compressed account in output compressed accounts. In', "case that the delegate didn't spend the complete delegated compressed", 'account balance the change compressed account will be delegated to her', 'as well.', ], type: { option: 'u8', }, }, ], }, }, { name: 'InputTokenDataWithContext', type: { kind: 'struct', fields: [ { name: 'amount', type: 'u64', }, { name: 'delegateIndex', type: { option: 'u8', }, }, { name: 'merkleContext', type: { defined: 'PackedMerkleContext', }, }, { name: 'rootIndex', type: 'u16', }, { name: 'lamports', type: { option: 'u64', }, }, { name: 'tlv', docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ], type: { option: 'bytes', }, }, ], }, }, { name: 'InstructionDataInvoke', type: { kind: 'struct', fields: [ { name: 'proof', type: { option: { defined: 'CompressedProof', }, }, }, { name: 'inputCompressedAccountsWithMerkleContext', type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext', }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'newAddressParams', type: { vec: { defined: 'NewAddressParamsPacked', }, }, }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, ], }, }, { name: 'InstructionDataInvokeCpi', type: { kind: 'struct', fields: [ { name: 'proof', type: { option: { defined: 'CompressedProof', }, }, }, { name: 'newAddressParams', type: { vec: { defined: 'NewAddressParamsPacked', }, }, }, { name: 'inputCompressedAccountsWithMerkleContext', type: { vec: { defined: 'PackedCompressedAccountWithMerkleContext', }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, { name: 'cpiContext', type: { option: { defined: 'CompressedCpiContext', }, }, }, ], }, }, { name: 'MerkleTreeMetadata', type: { kind: 'struct', fields: [ { name: 'accessMetadata', type: { defined: 'AccessMetadata', }, }, { name: 'rolloverMetadata', type: { defined: 'RolloverMetadata', }, }, { name: 'associatedQueue', type: 'publicKey', }, { name: 'nextMerkleTree', type: 'publicKey', }, ], }, }, { name: 'MerkleTreeSequenceNumber', type: { kind: 'struct', fields: [ { name: 'pubkey', type: 'publicKey', }, { name: 'seq', type: 'u64', }, ], }, }, { name: 'NewAddressParamsPacked', type: { kind: 'struct', fields: [ { name: 'seed', type: { array: ['u8', 32], }, }, { name: 'addressQueueAccountIndex', type: 'u8', }, { name: 'addressMerkleTreeAccountIndex', type: 'u8', }, { name: 'addressMerkleTreeRootIndex', type: 'u16', }, ], }, }, { name: 'OutputCompressedAccountWithPackedContext', type: { kind: 'struct', fields: [ { name: 'compressedAccount', type: { defined: 'CompressedAccount', }, }, { name: 'merkleTreeIndex', type: 'u8', }, ], }, }, { name: 'PackedCompressedAccountWithMerkleContext', type: { kind: 'struct', fields: [ { name: 'compressedAccount', type: { defined: 'CompressedAccount', }, }, { name: 'merkleContext', type: { defined: 'PackedMerkleContext', }, }, { name: 'rootIndex', docs: [ 'Index of root used in inclusion validity proof.', ], type: 'u16', }, { name: 'readOnly', docs: [ 'Placeholder to mark accounts read-only unimplemented set to false.', ], type: 'bool', }, ], }, }, { name: 'PackedMerkleContext', type: { kind: 'struct', fields: [ { name: 'merkleTreePubkeyIndex', type: 'u8', }, { name: 'nullifierQueuePubkeyIndex', type: 'u8', }, { name: 'leafIndex', type: 'u32', }, { name: 'queueIndex', docs: [ 'Index of leaf in queue. Placeholder of batched Merkle tree updates', 'currently unimplemented.', ], type: { option: { defined: 'QueueIndex', }, }, }, ], }, }, { name: 'PackedTokenTransferOutputData', type: { kind: 'struct', fields: [ { name: 'owner', type: 'publicKey', }, { name: 'amount', type: 'u64', }, { name: 'lamports', type: { option: 'u64', }, }, { name: 'merkleTreeIndex', type: 'u8', }, { name: 'tlv', docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ], type: { option: 'bytes', }, }, ], }, }, { name: 'PublicTransactionEvent', type: { kind: 'struct', fields: [ { name: 'inputCompressedAccountHashes', type: { vec: { array: ['u8', 32], }, }, }, { name: 'outputCompressedAccountHashes', type: { vec: { array: ['u8', 32], }, }, }, { name: 'outputCompressedAccounts', type: { vec: { defined: 'OutputCompressedAccountWithPackedContext', }, }, }, { name: 'outputLeafIndices', type: { vec: 'u32', }, }, { name: 'sequenceNumbers', type: { vec: { defined: 'MerkleTreeSequenceNumber', }, }, }, { name: 'relayFee', type: { option: 'u64', }, }, { name: 'isCompress', type: 'bool', }, { name: 'compressOrDecompressLamports', type: { option: 'u64', }, }, { name: 'pubkeyArray', type: { vec: 'publicKey', }, }, { name: 'message', type: { option: 'bytes', }, }, ], }, }, { name: 'QueueIndex', type: { kind: 'struct', fields: [ { name: 'queueId', docs: ['Id of queue in queue account.'], type: 'u8', }, { name: 'index', docs: ['Index of compressed account hash in queue.'], type: 'u16', }, ], }, }, { name: 'RolloverMetadata', type: { kind: 'struct', fields: [ { name: 'index', docs: ['Unique index.'], type: 'u64', }, { name: 'rolloverFee', docs: [ 'This fee is used for rent for the next account.', 'It accumulates in the account so that once the corresponding Merkle tree account is full it can be rolled over', ], type: 'u64', }, { name: 'rolloverThreshold', docs: [ 'The threshold in percentage points when the account should be rolled over (95 corresponds to 95% filled).', ], type: 'u64', }, { name: 'networkFee', docs: ['Tip for maintaining the account.'], type: 'u64', }, { name: 'rolledoverSlot', docs: [ 'The slot when the account was rolled over, a rolled over account should not be written to.', ], type: 'u64', }, { name: 'closeThreshold', docs: [ 'If current slot is greater than rolledover_slot + close_threshold and', "the account is empty it can be closed. No 'close' functionality has been", 'implemented yet.', ], type: 'u64', }, { name: 'additionalBytes', docs: [ 'Placeholder for bytes of additional accounts which are tied to the', 'Merkle trees operation and need to be rolled over as well.', ], type: 'u64', }, ], }, }, { name: 'TokenData', type: { kind: 'struct', fields: [ { name: 'mint', docs: ['The mint associated with this account'], type: 'publicKey', }, { name: 'owner', docs: ['The owner of this account.'], type: 'publicKey', }, { name: 'amount', docs: ['The amount of tokens this account holds.'], type: 'u64', }, { name: 'delegate', docs: [ 'If `delegate` is `Some` then `delegated_amount` represents', 'the amount authorized by the delegate', ], type: { option: 'publicKey', }, }, { name: 'state', docs: ["The account's state"], type: { defined: 'AccountState', }, }, { name: 'tlv', docs: [ 'Placeholder for TokenExtension tlv data (unimplemented)', ], type: { option: 'bytes', }, }, ], }, }, ], errors: [ { code: 6000, name: 'SignerCheckFailed', msg: 'Signer check failed', }, { code: 6001, name: 'CreateTransferInstructionFailed', msg: 'Create transfer instruction failed', }, { code: 6002, name: 'AccountNotFound', msg: 'Account not found', }, { code: 6003, name: 'SerializationError', msg: 'Serialization error', }, ], };
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/instructions/pack-compressed-token-accounts.ts
import { ParsedTokenAccount, InputTokenDataWithContext, getIndexOrAdd, bn, padOutputStateMerkleTrees, TokenTransferOutputData, } from '@lightprotocol/stateless.js'; import { PublicKey, AccountMeta } from '@solana/web3.js'; import { PackedTokenTransferOutputData } from '../types'; export type PackCompressedTokenAccountsParams = { /** Input state to be consumed */ inputCompressedTokenAccounts: ParsedTokenAccount[]; /** * State trees that the output should be inserted into. Defaults to the 0th * state tree of the input state. Gets padded to the length of * outputCompressedAccounts. */ outputStateTrees?: PublicKey[] | PublicKey; /** Optional remaining accounts to append to */ remainingAccounts?: PublicKey[]; /** * Root indices that are used on-chain to fetch the correct root * from the state Merkle tree account for validity proof verification. */ rootIndices: number[]; tokenTransferOutputs: TokenTransferOutputData[]; }; // TODO: include owner and lamports in packing. /** * Packs Compressed Token Accounts. */ export function packCompressedTokenAccounts( params: PackCompressedTokenAccountsParams, ): { inputTokenDataWithContext: InputTokenDataWithContext[]; remainingAccountMetas: AccountMeta[]; packedOutputTokenData: PackedTokenTransferOutputData[]; } { const { inputCompressedTokenAccounts, outputStateTrees, remainingAccounts = [], rootIndices, tokenTransferOutputs, } = params; const _remainingAccounts = remainingAccounts.slice(); let delegateIndex: number | null = null; if ( inputCompressedTokenAccounts.length > 0 && inputCompressedTokenAccounts[0].parsed.delegate ) { delegateIndex = getIndexOrAdd( _remainingAccounts, inputCompressedTokenAccounts[0].parsed.delegate, ); } /// TODO: move pubkeyArray to remainingAccounts /// Currently just packs 'delegate' to pubkeyArray const packedInputTokenData: InputTokenDataWithContext[] = []; /// pack inputs inputCompressedTokenAccounts.forEach( (account: ParsedTokenAccount, index) => { const merkleTreePubkeyIndex = getIndexOrAdd( _remainingAccounts, account.compressedAccount.merkleTree, ); const nullifierQueuePubkeyIndex = getIndexOrAdd( _remainingAccounts, account.compressedAccount.nullifierQueue, ); packedInputTokenData.push({ amount: account.parsed.amount, delegateIndex, merkleContext: { merkleTreePubkeyIndex, nullifierQueuePubkeyIndex, leafIndex: account.compressedAccount.leafIndex, queueIndex: null, }, rootIndex: rootIndices[index], lamports: account.compressedAccount.lamports.eq(bn(0)) ? null : account.compressedAccount.lamports, tlv: null, }); }, ); /// pack output state trees const paddedOutputStateMerkleTrees = padOutputStateMerkleTrees( outputStateTrees, tokenTransferOutputs.length, inputCompressedTokenAccounts.map(acc => acc.compressedAccount), ); const packedOutputTokenData: PackedTokenTransferOutputData[] = []; paddedOutputStateMerkleTrees.forEach((account, index) => { const merkleTreeIndex = getIndexOrAdd(_remainingAccounts, account); packedOutputTokenData.push({ owner: tokenTransferOutputs[index].owner, amount: tokenTransferOutputs[index].amount, lamports: tokenTransferOutputs[index].lamports?.eq(bn(0)) ? null : tokenTransferOutputs[index].lamports, merkleTreeIndex, tlv: null, }); }); // to meta const remainingAccountMetas = _remainingAccounts.map( (account): AccountMeta => ({ pubkey: account, isWritable: true, isSigner: false, }), ); return { inputTokenDataWithContext: packedInputTokenData, remainingAccountMetas, packedOutputTokenData, }; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/instructions/index.ts
export * from './pack-compressed-token-accounts';
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/create-token-pool.ts
import { ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { CompressedTokenProgram } from '../program'; import { Rpc, buildAndSignTx, sendAndConfirmTx, } from '@lightprotocol/stateless.js'; /** * Register an existing mint with the CompressedToken program * * @param rpc RPC to use * @param payer Payer of the transaction and initialization fees * @param mintAuthority Account or multisig that will control minting. Is signer. * @param mintAddress Address of the existing mint * @param confirmOptions Options for confirming the transaction * * @return transaction signature */ export async function createTokenPool( rpc: Rpc, payer: Signer, mint: PublicKey, confirmOptions?: ConfirmOptions, tokenProgramId?: PublicKey, ): Promise<TransactionSignature> { tokenProgramId = tokenProgramId ? tokenProgramId : await CompressedTokenProgram.get_mint_program_id(mint, rpc); const ix = await CompressedTokenProgram.createTokenPool({ feePayer: payer.publicKey, mint, tokenProgramId, }); const { blockhash } = await rpc.getLatestBlockhash(); const tx = buildAndSignTx([ix], payer, blockhash); const txId = await sendAndConfirmTx(rpc, tx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/create-mint.ts
import { ConfirmOptions, Keypair, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { CompressedTokenProgram } from '../program'; import { MINT_SIZE, TOKEN_2022_PROGRAM_ID, TOKEN_PROGRAM_ID, } from '@solana/spl-token'; import { Rpc, buildAndSignTx, sendAndConfirmTx, dedupeSigner, } from '@lightprotocol/stateless.js'; /** * Create and initialize a new compressed token mint * * @param rpc RPC to use * @param payer Payer of the transaction and initialization fees * @param mintAuthority Account or multisig that will control minting * @param decimals Location of the decimal place * @param keypair Optional keypair, defaulting to a new random one * @param confirmOptions Options for confirming the transaction * * @return Address of the new mint and the transaction signature */ export async function createMint( rpc: Rpc, payer: Signer, mintAuthority: PublicKey, decimals: number, keypair = Keypair.generate(), confirmOptions?: ConfirmOptions, isToken22 = false, ): Promise<{ mint: PublicKey; transactionSignature: TransactionSignature }> { const rentExemptBalance = await rpc.getMinimumBalanceForRentExemption(MINT_SIZE); const tokenProgramId = isToken22 ? TOKEN_2022_PROGRAM_ID : TOKEN_PROGRAM_ID; const ixs = await CompressedTokenProgram.createMint({ feePayer: payer.publicKey, mint: keypair.publicKey, decimals, authority: mintAuthority, freezeAuthority: null, // TODO: add feature rentExemptBalance, tokenProgramId, }); const { blockhash } = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [keypair]); const tx = buildAndSignTx(ixs, payer, blockhash, additionalSigners); const txId = await sendAndConfirmTx(rpc, tx, confirmOptions); return { mint: keypair.publicKey, transactionSignature: txId }; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/create-token-program-lookup-table.ts
import { PublicKey, Signer, TransactionSignature } from '@solana/web3.js'; import { sendAndConfirmTx, buildAndSignTx, Rpc, dedupeSigner, } from '@lightprotocol/stateless.js'; import { CompressedTokenProgram } from '../program'; /** * Create a lookup table for the token program's default accounts * * @param rpc Rpc connection to use * @param payer Payer of the transaction fees * @param authority Authority of the lookup table * @param mints Optional array of mint public keys to include in * the lookup table * @param additionalAccounts Optional array of additional account public keys * to include in the lookup table * * @return Transaction signatures and the address of the created lookup table */ export async function createTokenProgramLookupTable( rpc: Rpc, payer: Signer, authority: Signer, mints?: PublicKey[], additionalAccounts?: PublicKey[], ): Promise<{ txIds: TransactionSignature[]; address: PublicKey }> { const recentSlot = await rpc.getSlot('finalized'); const { instructions, address } = await CompressedTokenProgram.createTokenProgramLookupTable({ payer: payer.publicKey, authority: authority.publicKey, mints, remainingAccounts: additionalAccounts, recentSlot, }); const additionalSigners = dedupeSigner(payer, [authority]); const blockhashCtx = await rpc.getLatestBlockhash(); const signedTx = buildAndSignTx( [instructions[0]], payer, blockhashCtx.blockhash, additionalSigners, ); /// Must wait for the first instruction to be finalized. const txId = await sendAndConfirmTx( rpc, signedTx, { commitment: 'finalized' }, blockhashCtx, ); const blockhashCtx2 = await rpc.getLatestBlockhash(); const signedTx2 = buildAndSignTx( [instructions[1]], payer, blockhashCtx2.blockhash, additionalSigners, ); const txId2 = await sendAndConfirmTx( rpc, signedTx2, { commitment: 'finalized' }, blockhashCtx2, ); return { txIds: [txId, txId2], address }; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/mint-to.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { sendAndConfirmTx, buildAndSignTx, Rpc, dedupeSigner, } from '@lightprotocol/stateless.js'; import { CompressedTokenProgram } from '../program'; /** * Mint compressed tokens to a solana address * * @param rpc Rpc to use * @param payer Payer of the transaction fees * @param mint Mint for the account * @param destination Address of the account to mint to. Can be an array of * addresses if the amount is an array of amounts. * @param authority Minting authority * @param amount Amount to mint. Can be an array of amounts if the * destination is an array of addresses. * @param merkleTree State tree account that the compressed tokens should be * part of. Defaults to the default state tree account. * @param confirmOptions Options for confirming the transaction * * @return Signature of the confirmed transaction */ export async function mintTo( rpc: Rpc, payer: Signer, mint: PublicKey, destination: PublicKey | PublicKey[], authority: Signer, amount: number | BN | number[] | BN[], merkleTree?: PublicKey, confirmOptions?: ConfirmOptions, tokenProgramId?: PublicKey, ): Promise<TransactionSignature> { tokenProgramId = tokenProgramId ? tokenProgramId : await CompressedTokenProgram.get_mint_program_id(mint, rpc); const additionalSigners = dedupeSigner(payer, [authority]); const ix = await CompressedTokenProgram.mintTo({ feePayer: payer.publicKey, mint, authority: authority.publicKey, amount: amount, toPubkey: destination, merkleTree, tokenProgramId, }); const { blockhash } = await rpc.getLatestBlockhash(); const tx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ix], payer, blockhash, additionalSigners, ); const txId = await sendAndConfirmTx(rpc, tx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/compress.ts
import { ConfirmOptions, PublicKey, Signer, TransactionSignature, ComputeBudgetProgram, } from '@solana/web3.js'; import { sendAndConfirmTx, buildAndSignTx, Rpc, dedupeSigner, } from '@lightprotocol/stateless.js'; import { BN } from '@coral-xyz/anchor'; import { CompressedTokenProgram } from '../program'; /** * Compress SPL tokens * * @param rpc Rpc connection to use * @param payer Payer of the transaction fees * @param mint Mint of the compressed token * @param amount Number of tokens to transfer * @param owner Owner of the compressed tokens. * @param sourceTokenAccount Source (associated) token account * @param toAddress Destination address of the recipient * @param merkleTree State tree account that the compressed tokens * should be inserted into. Defaults to a default * state tree account. * @param confirmOptions Options for confirming the transaction * * * @return Signature of the confirmed transaction */ export async function compress( rpc: Rpc, payer: Signer, mint: PublicKey, amount: number | BN | number[] | BN[], owner: Signer, sourceTokenAccount: PublicKey, toAddress: PublicKey | Array<PublicKey>, merkleTree?: PublicKey, confirmOptions?: ConfirmOptions, tokenProgramId?: PublicKey, ): Promise<TransactionSignature> { tokenProgramId = tokenProgramId ? tokenProgramId : await CompressedTokenProgram.get_mint_program_id(mint, rpc); const compressIx = await CompressedTokenProgram.compress({ payer: payer.publicKey, owner: owner.publicKey, source: sourceTokenAccount, toAddress, amount, mint, outputStateTree: merkleTree, tokenProgramId, }); const blockhashCtx = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [owner]); const signedTx = buildAndSignTx( [ ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000, }), compressIx, ], payer, blockhashCtx.blockhash, additionalSigners, ); const txId = await sendAndConfirmTx( rpc, signedTx, confirmOptions, blockhashCtx, ); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/merge-token-accounts.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { Rpc, dedupeSigner, buildAndSignTx, sendAndConfirmTx, bn, } from '@lightprotocol/stateless.js'; import { CompressedTokenProgram } from '../program'; /** * Merge multiple compressed token accounts for a given mint into a single * account * * @param rpc RPC to use * @param payer Payer of the transaction fees * @param mint Public key of the token's mint * @param owner Owner of the token accounts to be merged * @param merkleTree Optional merkle tree for compressed tokens * @param confirmOptions Options for confirming the transaction * * @return Array of transaction signatures */ export async function mergeTokenAccounts( rpc: Rpc, payer: Signer, mint: PublicKey, owner: Signer, merkleTree?: PublicKey, confirmOptions?: ConfirmOptions, ): Promise<TransactionSignature> { const compressedTokenAccounts = await rpc.getCompressedTokenAccountsByOwner( owner.publicKey, { mint }, ); if (compressedTokenAccounts.items.length === 0) { throw new Error( `No compressed token accounts found for mint ${mint.toBase58()}`, ); } if (compressedTokenAccounts.items.length >= 6) { throw new Error( `Too many compressed token accounts used for mint ${mint.toBase58()}`, ); } const instructions = [ ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ]; for ( let i = 0; i < compressedTokenAccounts.items.slice(0, 6).length; i += 3 ) { const batch = compressedTokenAccounts.items.slice(i, i + 3); const proof = await rpc.getValidityProof( batch.map(account => bn(account.compressedAccount.hash)), ); const batchInstructions = await CompressedTokenProgram.mergeTokenAccounts({ payer: payer.publicKey, owner: owner.publicKey, mint, inputCompressedTokenAccounts: batch, outputStateTree: merkleTree!, recentValidityProof: proof.compressedProof, recentInputStateRootIndices: proof.rootIndices, }); instructions.push(...batchInstructions); } const { blockhash } = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [owner]); const signedTx = buildAndSignTx( instructions, payer, blockhash, additionalSigners, ); const txId = await sendAndConfirmTx(rpc, signedTx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/transfer.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { bn, sendAndConfirmTx, buildAndSignTx, Rpc, ParsedTokenAccount, dedupeSigner, } from '@lightprotocol/stateless.js'; import { BN } from '@coral-xyz/anchor'; import { CompressedTokenProgram } from '../program'; /** * Transfer compressed tokens from one owner to another * * @param rpc Rpc to use * @param payer Payer of the transaction fees * @param mint Mint of the compressed token * @param amount Number of tokens to transfer * @param owner Owner of the compressed tokens * @param toAddress Destination address of the recipient * @param merkleTree State tree account that the compressed tokens should be * inserted into. Defaults to the default state tree * account. * @param confirmOptions Options for confirming the transaction * * * @return Signature of the confirmed transaction */ export async function transfer( rpc: Rpc, payer: Signer, mint: PublicKey, amount: number | BN, owner: Signer, toAddress: PublicKey, /// TODO: allow multiple merkleTree?: PublicKey, confirmOptions?: ConfirmOptions, ): Promise<TransactionSignature> { amount = bn(amount); const compressedTokenAccounts = await rpc.getCompressedTokenAccountsByOwner( owner.publicKey, { mint, }, ); const [inputAccounts] = selectMinCompressedTokenAccountsForTransfer( compressedTokenAccounts.items, amount, ); const proof = await rpc.getValidityProof( inputAccounts.map(account => bn(account.compressedAccount.hash)), ); const ix = await CompressedTokenProgram.transfer({ payer: payer.publicKey, inputCompressedTokenAccounts: inputAccounts, toAddress, amount, recentInputStateRootIndices: proof.rootIndices, recentValidityProof: proof.compressedProof, outputStateTrees: merkleTree, }); const { blockhash } = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [owner]); const signedTx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ix], payer, blockhash, additionalSigners, ); const txId = await sendAndConfirmTx(rpc, signedTx, confirmOptions); return txId; } /** * Selects the minimal number of compressed token accounts for a transfer. * * 1. Sorts the accounts by amount in descending order * 2. Accumulates the amount until it is greater than or equal to the transfer * amount */ export function selectMinCompressedTokenAccountsForTransfer( accounts: ParsedTokenAccount[], transferAmount: BN, ): [ selectedAccounts: ParsedTokenAccount[], total: BN, totalLamports: BN | null, ] { let accumulatedAmount = bn(0); let accumulatedLamports = bn(0); const selectedAccounts: ParsedTokenAccount[] = []; accounts.sort((a, b) => b.parsed.amount.cmp(a.parsed.amount)); for (const account of accounts) { if (accumulatedAmount.gte(bn(transferAmount))) break; accumulatedAmount = accumulatedAmount.add(account.parsed.amount); accumulatedLamports = accumulatedLamports.add( account.compressedAccount.lamports, ); selectedAccounts.push(account); } if (accumulatedAmount.lt(bn(transferAmount))) { throw new Error( `Not enough balance for transfer. Required: ${transferAmount.toString()}, available: ${accumulatedAmount.toString()}`, ); } return [ selectedAccounts, accumulatedAmount, accumulatedLamports.lt(bn(0)) ? accumulatedLamports : null, ]; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/approve-and-mint-to.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { BN } from '@coral-xyz/anchor'; import { sendAndConfirmTx, buildAndSignTx, Rpc, dedupeSigner, } from '@lightprotocol/stateless.js'; import { CompressedTokenProgram } from '../program'; import { getOrCreateAssociatedTokenAccount } from '@solana/spl-token'; /** * Mint compressed tokens to a solana address from an external mint authority * * @param rpc Rpc to use * @param payer Payer of the transaction fees * @param mint Mint for the account * @param destination Address of the account to mint to * @param authority Minting authority * @param amount Amount to mint * @param merkleTree State tree account that the compressed tokens should be * part of. Defaults to the default state tree account. * @param confirmOptions Options for confirming the transaction * * @return Signature of the confirmed transaction */ export async function approveAndMintTo( rpc: Rpc, payer: Signer, mint: PublicKey, destination: PublicKey, authority: Signer, amount: number | BN, merkleTree?: PublicKey, confirmOptions?: ConfirmOptions, tokenProgramId?: PublicKey, ): Promise<TransactionSignature> { tokenProgramId = tokenProgramId ? tokenProgramId : await CompressedTokenProgram.get_mint_program_id(mint, rpc); const authorityTokenAccount = await getOrCreateAssociatedTokenAccount( rpc, payer, mint, authority.publicKey, undefined, undefined, confirmOptions, tokenProgramId, ); const ixs = await CompressedTokenProgram.approveAndMintTo({ feePayer: payer.publicKey, mint, authority: authority.publicKey, authorityTokenAccount: authorityTokenAccount.address, amount, toPubkey: destination, merkleTree, tokenProgramId, }); const { blockhash } = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [authority]); const tx = buildAndSignTx( [ ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ...ixs, ], payer, blockhash, additionalSigners, ); const txId = await sendAndConfirmTx(rpc, tx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/compress-spl-token-account.ts
import { ConfirmOptions, PublicKey, Signer, TransactionSignature, ComputeBudgetProgram, } from '@solana/web3.js'; import { sendAndConfirmTx, buildAndSignTx, Rpc, dedupeSigner, } from '@lightprotocol/stateless.js'; import { BN } from '@coral-xyz/anchor'; import { CompressedTokenProgram } from '../program'; /** * Compress SPL tokens into compressed token format * * @param rpc Rpc connection to use * @param payer Payer of the transaction fees * @param mint Mint of the token to compress * @param owner Owner of the token account * @param tokenAccount Token account to compress * @param outputStateTree State tree to insert the compressed token account into * @param remainingAmount Optional: amount to leave in token account. Default: 0 * @param confirmOptions Options for confirming the transaction * * @return Signature of the confirmed transaction */ export async function compressSplTokenAccount( rpc: Rpc, payer: Signer, mint: PublicKey, owner: Signer, tokenAccount: PublicKey, outputStateTree: PublicKey, remainingAmount?: BN, confirmOptions?: ConfirmOptions, tokenProgramId?: PublicKey, ): Promise<TransactionSignature> { tokenProgramId = tokenProgramId ? tokenProgramId : await CompressedTokenProgram.get_mint_program_id(mint, rpc); const compressIx = await CompressedTokenProgram.compressSplTokenAccount({ feePayer: payer.publicKey, authority: owner.publicKey, tokenAccount, mint, remainingAmount, outputStateTree, tokenProgramId, }); const blockhashCtx = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [owner]); const signedTx = buildAndSignTx( [ ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000, }), compressIx, ], payer, blockhashCtx.blockhash, additionalSigners, ); const txId = await sendAndConfirmTx( rpc, signedTx, confirmOptions, blockhashCtx, ); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/index.ts
export * from './approve-and-mint-to'; export * from './compress'; export * from './decompress'; export * from './create-mint'; export * from './mint-to'; export * from './merge-token-accounts'; export * from './create-token-pool'; export * from './transfer'; export * from './create-token-program-lookup-table'; export * from './compress-spl-token-account';
0
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src
solana_public_repos/Lightprotocol/light-protocol/js/compressed-token/src/actions/decompress.ts
import { ComputeBudgetProgram, ConfirmOptions, PublicKey, Signer, TransactionSignature, } from '@solana/web3.js'; import { bn, sendAndConfirmTx, buildAndSignTx, Rpc, dedupeSigner, } from '@lightprotocol/stateless.js'; import { BN } from '@coral-xyz/anchor'; import { CompressedTokenProgram } from '../program'; import { selectMinCompressedTokenAccountsForTransfer } from './transfer'; /** * Decompress compressed tokens * * @param rpc Rpc to use * @param payer Payer of the transaction fees * @param mint Mint of the compressed token * @param amount Number of tokens to transfer * @param owner Owner of the compressed tokens * @param toAddress Destination **uncompressed** (associated) token account * address. * @param merkleTree State tree account that any change compressed tokens should be * inserted into. Defaults to a default state tree * account. * @param confirmOptions Options for confirming the transaction * * * @return Signature of the confirmed transaction */ export async function decompress( rpc: Rpc, payer: Signer, mint: PublicKey, amount: number | BN, owner: Signer, toAddress: PublicKey, /// TODO: allow multiple merkleTree?: PublicKey, confirmOptions?: ConfirmOptions, tokenProgramId?: PublicKey, ): Promise<TransactionSignature> { tokenProgramId = tokenProgramId ? tokenProgramId : await CompressedTokenProgram.get_mint_program_id(mint, rpc); amount = bn(amount); const compressedTokenAccounts = await rpc.getCompressedTokenAccountsByOwner( owner.publicKey, { mint, }, ); /// TODO: consider using a different selection algorithm const [inputAccounts] = selectMinCompressedTokenAccountsForTransfer( compressedTokenAccounts.items, amount, ); const proof = await rpc.getValidityProof( inputAccounts.map(account => bn(account.compressedAccount.hash)), ); const ix = await CompressedTokenProgram.decompress({ payer: payer.publicKey, inputCompressedTokenAccounts: inputAccounts, toAddress, // TODO: add explicit check that it is a token account amount, outputStateTree: merkleTree, recentInputStateRootIndices: proof.rootIndices, recentValidityProof: proof.compressedProof, tokenProgramId, }); const { blockhash } = await rpc.getLatestBlockhash(); const additionalSigners = dedupeSigner(payer, [owner]); const signedTx = buildAndSignTx( [ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }), ix], payer, blockhash, additionalSigners, ); const txId = await sendAndConfirmTx(rpc, signedTx, confirmOptions); return txId; }
0
solana_public_repos/Lightprotocol/light-protocol
solana_public_repos/Lightprotocol/light-protocol/tsconfig/README.md
# `tsconfig` These are base shared `tsconfig.json`s from which all other `tsconfig.json`'s inherit from.
0
solana_public_repos/Lightprotocol/light-protocol
solana_public_repos/Lightprotocol/light-protocol/tsconfig/package.json
{ "name": "@lightprotocol/tsconfig", "version": "0.0.0", "private": true, "files": [ "base.json" ] }
0
solana_public_repos/Lightprotocol/light-protocol
solana_public_repos/Lightprotocol/light-protocol/tsconfig/base.json
{ "$schema": "https://json.schemastore.org/tsconfig", "display": "Default", "compilerOptions": { "composite": false, "declaration": true, "declarationMap": true, "forceConsistentCasingInFileNames": true, "inlineSources": false, "isolatedModules": true, "moduleResolution": "node", "module": "CommonJS", "esModuleInterop": true, "noFallthroughCasesInSwitch": true, "preserveWatchOutput": true, "noUnusedLocals": false, "noUnusedParameters": false, "noImplicitAny": false, "skipLibCheck": true, "strict": true, "target": "ES2022" }, "exclude": ["node_modules"] }
0
solana_public_repos/Lightprotocol/light-protocol
solana_public_repos/Lightprotocol/light-protocol/utils/Cargo.toml
[package] name = "light-utils" version = "1.1.0" description = "Common utility functions used in Light Protocol" repository = "https://github.com/Lightprotocol/light-protocol" license = "Apache-2.0" edition = "2021" [dependencies] anyhow = "1.0" ark-ff = "0.4" light-bounded-vec = { version = "1.1.0", path = "../merkle-tree/bounded-vec" } num-bigint = { version = "0.4", features = ["rand"] } thiserror = "1.0" solana-program = { workspace = true } ark-bn254 = "0.4.0" rand = "0.8" [dev-dependencies] bytemuck = "1.17" memoffset = "0.9"
0
solana_public_repos/Lightprotocol/light-protocol/utils
solana_public_repos/Lightprotocol/light-protocol/utils/src/bigint.rs
use num_bigint::BigUint; use crate::UtilsError; /// Converts the given [`num_bigint::BigUint`](num_bigint::BigUint) into a little-endian /// byte array. pub fn bigint_to_le_bytes_array<const BYTES_SIZE: usize>( bigint: &BigUint, ) -> Result<[u8; BYTES_SIZE], UtilsError> { let mut array = [0u8; BYTES_SIZE]; let bytes = bigint.to_bytes_le(); if bytes.len() > BYTES_SIZE { return Err(UtilsError::InputTooLarge(BYTES_SIZE)); } array[..bytes.len()].copy_from_slice(bytes.as_slice()); Ok(array) } /// Converts the given [`ark_ff::BigUint`](ark_ff::BigUint) into a big-endian /// byte array. pub fn bigint_to_be_bytes_array<const BYTES_SIZE: usize>( bigint: &BigUint, ) -> Result<[u8; BYTES_SIZE], UtilsError> { let mut array = [0u8; BYTES_SIZE]; let bytes = bigint.to_bytes_be(); if bytes.len() > BYTES_SIZE { return Err(UtilsError::InputTooLarge(BYTES_SIZE)); } let start_pos = BYTES_SIZE - bytes.len(); array[start_pos..].copy_from_slice(bytes.as_slice()); Ok(array) } #[cfg(test)] mod test { use num_bigint::{RandBigInt, ToBigUint}; use rand::thread_rng; use super::*; const ITERATIONS: usize = 64; #[test] fn test_bigint_conversion_rand() { let mut rng = thread_rng(); for _ in 0..ITERATIONS { let b64 = rng.gen_biguint(32); let b64_converted: [u8; 8] = bigint_to_be_bytes_array(&b64).unwrap(); let b64_converted = BigUint::from_bytes_be(&b64_converted); assert_eq!(b64, b64_converted); let b64_converted: [u8; 8] = bigint_to_le_bytes_array(&b64).unwrap(); let b64_converted = BigUint::from_bytes_le(&b64_converted); assert_eq!(b64, b64_converted); let b128 = rng.gen_biguint(128); let b128_converted: [u8; 16] = bigint_to_be_bytes_array(&b128).unwrap(); let b128_converted = BigUint::from_bytes_be(&b128_converted); assert_eq!(b128, b128_converted); let b128_converted: [u8; 16] = bigint_to_le_bytes_array(&b128).unwrap(); let b128_converted = BigUint::from_bytes_le(&b128_converted); assert_eq!(b128, b128_converted); let b256 = rng.gen_biguint(256); let b256_converted: [u8; 32] = bigint_to_be_bytes_array(&b256).unwrap(); let b256_converted = BigUint::from_bytes_be(&b256_converted); assert_eq!(b256, b256_converted); let b256_converted: [u8; 32] = bigint_to_le_bytes_array(&b256).unwrap(); let b256_converted = BigUint::from_bytes_le(&b256_converted); assert_eq!(b256, b256_converted); let b320 = rng.gen_biguint(320); let b320_converted: [u8; 40] = bigint_to_be_bytes_array(&b320).unwrap(); let b320_converted = BigUint::from_bytes_be(&b320_converted); assert_eq!(b320, b320_converted); let b320_converted: [u8; 40] = bigint_to_le_bytes_array(&b320).unwrap(); let b320_converted = BigUint::from_bytes_le(&b320_converted); assert_eq!(b320, b320_converted); let b384 = rng.gen_biguint(384); let b384_converted: [u8; 48] = bigint_to_be_bytes_array(&b384).unwrap(); let b384_converted = BigUint::from_bytes_be(&b384_converted); assert_eq!(b384, b384_converted); let b384_converted: [u8; 48] = bigint_to_le_bytes_array(&b384).unwrap(); let b384_converted = BigUint::from_bytes_le(&b384_converted); assert_eq!(b384, b384_converted); let b448 = rng.gen_biguint(448); let b448_converted: [u8; 56] = bigint_to_be_bytes_array(&b448).unwrap(); let b448_converted = BigUint::from_bytes_be(&b448_converted); assert_eq!(b448, b448_converted); let b448_converted: [u8; 56] = bigint_to_le_bytes_array(&b448).unwrap(); let b448_converted = BigUint::from_bytes_le(&b448_converted); assert_eq!(b448, b448_converted); let b768 = rng.gen_biguint(768); let b768_converted: [u8; 96] = bigint_to_be_bytes_array(&b768).unwrap(); let b768_converted = BigUint::from_bytes_be(&b768_converted); assert_eq!(b768, b768_converted); let b768_converted: [u8; 96] = bigint_to_le_bytes_array(&b768).unwrap(); let b768_converted = BigUint::from_bytes_le(&b768_converted); assert_eq!(b768, b768_converted); let b832 = rng.gen_biguint(832); let b832_converted: [u8; 104] = bigint_to_be_bytes_array(&b832).unwrap(); let b832_converted = BigUint::from_bytes_be(&b832_converted); assert_eq!(b832, b832_converted); let b832_converted: [u8; 104] = bigint_to_le_bytes_array(&b832).unwrap(); let b832_converted = BigUint::from_bytes_le(&b832_converted); assert_eq!(b832, b832_converted); } } #[test] fn test_bigint_conversion_zero() { let zero = 0_u32.to_biguint().unwrap(); let b64_converted: [u8; 8] = bigint_to_be_bytes_array(&zero).unwrap(); let b64_converted = BigUint::from_bytes_be(&b64_converted); assert_eq!(zero, b64_converted); let b64_converted: [u8; 8] = bigint_to_le_bytes_array(&zero).unwrap(); let b64_converted = BigUint::from_bytes_le(&b64_converted); assert_eq!(zero, b64_converted); let b128_converted: [u8; 16] = bigint_to_be_bytes_array(&zero).unwrap(); let b128_converted = BigUint::from_bytes_be(&b128_converted); assert_eq!(zero, b128_converted); let b128_converted: [u8; 16] = bigint_to_le_bytes_array(&zero).unwrap(); let b128_converted = BigUint::from_bytes_le(&b128_converted); assert_eq!(zero, b128_converted); let b256_converted: [u8; 32] = bigint_to_be_bytes_array(&zero).unwrap(); let b256_converted = BigUint::from_bytes_be(&b256_converted); assert_eq!(zero, b256_converted); let b256_converted: [u8; 32] = bigint_to_le_bytes_array(&zero).unwrap(); let b256_converted = BigUint::from_bytes_le(&b256_converted); assert_eq!(zero, b256_converted); let b320_converted: [u8; 40] = bigint_to_be_bytes_array(&zero).unwrap(); let b320_converted = BigUint::from_bytes_be(&b320_converted); assert_eq!(zero, b320_converted); let b320_converted: [u8; 40] = bigint_to_le_bytes_array(&zero).unwrap(); let b320_converted = BigUint::from_bytes_le(&b320_converted); assert_eq!(zero, b320_converted); let b384_converted: [u8; 48] = bigint_to_be_bytes_array(&zero).unwrap(); let b384_converted = BigUint::from_bytes_be(&b384_converted); assert_eq!(zero, b384_converted); let b384_converted: [u8; 48] = bigint_to_le_bytes_array(&zero).unwrap(); let b384_converted = BigUint::from_bytes_le(&b384_converted); assert_eq!(zero, b384_converted); let b448_converted: [u8; 56] = bigint_to_be_bytes_array(&zero).unwrap(); let b448_converted = BigUint::from_bytes_be(&b448_converted); assert_eq!(zero, b448_converted); let b448_converted: [u8; 56] = bigint_to_le_bytes_array(&zero).unwrap(); let b448_converted = BigUint::from_bytes_le(&b448_converted); assert_eq!(zero, b448_converted); let b768_converted: [u8; 96] = bigint_to_be_bytes_array(&zero).unwrap(); let b768_converted = BigUint::from_bytes_be(&b768_converted); assert_eq!(zero, b768_converted); let b768_converted: [u8; 96] = bigint_to_le_bytes_array(&zero).unwrap(); let b768_converted = BigUint::from_bytes_le(&b768_converted); assert_eq!(zero, b768_converted); let b832_converted: [u8; 104] = bigint_to_be_bytes_array(&zero).unwrap(); let b832_converted = BigUint::from_bytes_be(&b832_converted); assert_eq!(zero, b832_converted); let b832_converted: [u8; 104] = bigint_to_le_bytes_array(&zero).unwrap(); let b832_converted = BigUint::from_bytes_le(&b832_converted); assert_eq!(zero, b832_converted); } #[test] fn test_bigint_conversion_one() { let one = 1_u32.to_biguint().unwrap(); let b64_converted: [u8; 8] = bigint_to_be_bytes_array(&one).unwrap(); let b64_converted = BigUint::from_bytes_be(&b64_converted); assert_eq!(one, b64_converted); let b64_converted: [u8; 8] = bigint_to_le_bytes_array(&one).unwrap(); let b64_converted = BigUint::from_bytes_le(&b64_converted); assert_eq!(one, b64_converted); let b64 = BigUint::from_bytes_be(&[0, 0, 0, 0, 0, 0, 0, 1]); assert_eq!(one, b64); let b64 = BigUint::from_bytes_le(&[1, 0, 0, 0, 0, 0, 0, 0]); assert_eq!(one, b64); let b128_converted: [u8; 16] = bigint_to_be_bytes_array(&one).unwrap(); let b128_converted = BigUint::from_bytes_be(&b128_converted); assert_eq!(one, b128_converted); let b128_converted: [u8; 16] = bigint_to_le_bytes_array(&one).unwrap(); let b128_converted = BigUint::from_bytes_le(&b128_converted); assert_eq!(one, b128_converted); let b256_converted: [u8; 32] = bigint_to_be_bytes_array(&one).unwrap(); let b256_converted = BigUint::from_bytes_be(&b256_converted); assert_eq!(one, b256_converted); let b256_converted: [u8; 32] = bigint_to_le_bytes_array(&one).unwrap(); let b256_converted = BigUint::from_bytes_le(&b256_converted); assert_eq!(one, b256_converted); let b320_converted: [u8; 40] = bigint_to_be_bytes_array(&one).unwrap(); let b320_converted = BigUint::from_bytes_be(&b320_converted); assert_eq!(one, b320_converted); let b320_converted: [u8; 40] = bigint_to_le_bytes_array(&one).unwrap(); let b320_converted = BigUint::from_bytes_le(&b320_converted); assert_eq!(one, b320_converted); let b384_converted: [u8; 48] = bigint_to_be_bytes_array(&one).unwrap(); let b384_converted = BigUint::from_bytes_be(&b384_converted); assert_eq!(one, b384_converted); let b384_converted: [u8; 48] = bigint_to_le_bytes_array(&one).unwrap(); let b384_converted = BigUint::from_bytes_le(&b384_converted); assert_eq!(one, b384_converted); let b448_converted: [u8; 56] = bigint_to_be_bytes_array(&one).unwrap(); let b448_converted = BigUint::from_bytes_be(&b448_converted); assert_eq!(one, b448_converted); let b448_converted: [u8; 56] = bigint_to_le_bytes_array(&one).unwrap(); let b448_converted = BigUint::from_bytes_le(&b448_converted); assert_eq!(one, b448_converted); let b768_converted: [u8; 96] = bigint_to_be_bytes_array(&one).unwrap(); let b768_converted = BigUint::from_bytes_be(&b768_converted); assert_eq!(one, b768_converted); let b768_converted: [u8; 96] = bigint_to_le_bytes_array(&one).unwrap(); let b768_converted = BigUint::from_bytes_le(&b768_converted); assert_eq!(one, b768_converted); let b832_converted: [u8; 104] = bigint_to_be_bytes_array(&one).unwrap(); let b832_converted = BigUint::from_bytes_be(&b832_converted); assert_eq!(one, b832_converted); let b832_converted: [u8; 104] = bigint_to_le_bytes_array(&one).unwrap(); let b832_converted = BigUint::from_bytes_le(&b832_converted); assert_eq!(one, b832_converted); } #[test] fn test_bigint_conversion_invalid_size() { let b8 = BigUint::from_bytes_be(&[1; 8]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b8); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 7], UtilsError> = bigint_to_be_bytes_array(&b8); assert!(matches!(res, Err(UtilsError::InputTooLarge(7)))); let res: Result<[u8; 8], UtilsError> = bigint_to_be_bytes_array(&b8); assert!(res.is_ok()); let b8 = BigUint::from_bytes_le(&[1; 8]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b8); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 7], UtilsError> = bigint_to_le_bytes_array(&b8); assert!(matches!(res, Err(UtilsError::InputTooLarge(7)))); let res: Result<[u8; 8], UtilsError> = bigint_to_le_bytes_array(&b8); assert!(res.is_ok()); let b16 = BigUint::from_bytes_be(&[1; 16]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b16); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 15], UtilsError> = bigint_to_be_bytes_array(&b16); assert!(matches!(res, Err(UtilsError::InputTooLarge(15)))); let res: Result<[u8; 16], UtilsError> = bigint_to_be_bytes_array(&b16); assert!(res.is_ok()); let b16 = BigUint::from_bytes_le(&[1; 16]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b16); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 15], UtilsError> = bigint_to_le_bytes_array(&b16); assert!(matches!(res, Err(UtilsError::InputTooLarge(15)))); let res: Result<[u8; 16], UtilsError> = bigint_to_le_bytes_array(&b16); assert!(res.is_ok()); let b32 = BigUint::from_bytes_be(&[1; 32]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b32); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 31], UtilsError> = bigint_to_be_bytes_array(&b32); assert!(matches!(res, Err(UtilsError::InputTooLarge(31)))); let res: Result<[u8; 32], UtilsError> = bigint_to_be_bytes_array(&b32); assert!(res.is_ok()); let b32 = BigUint::from_bytes_le(&[1; 32]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b32); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 31], UtilsError> = bigint_to_le_bytes_array(&b32); assert!(matches!(res, Err(UtilsError::InputTooLarge(31)))); let res: Result<[u8; 32], UtilsError> = bigint_to_le_bytes_array(&b32); assert!(res.is_ok()); let b64 = BigUint::from_bytes_be(&[1; 64]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b64); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 63], UtilsError> = bigint_to_be_bytes_array(&b64); assert!(matches!(res, Err(UtilsError::InputTooLarge(63)))); let res: Result<[u8; 64], UtilsError> = bigint_to_be_bytes_array(&b64); assert!(res.is_ok()); let b64 = BigUint::from_bytes_le(&[1; 64]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b64); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 63], UtilsError> = bigint_to_le_bytes_array(&b64); assert!(matches!(res, Err(UtilsError::InputTooLarge(63)))); let res: Result<[u8; 64], UtilsError> = bigint_to_le_bytes_array(&b64); assert!(res.is_ok()); let b128 = BigUint::from_bytes_be(&[1; 128]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b128); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 127], UtilsError> = bigint_to_be_bytes_array(&b128); assert!(matches!(res, Err(UtilsError::InputTooLarge(127)))); let res: Result<[u8; 128], UtilsError> = bigint_to_be_bytes_array(&b128); assert!(res.is_ok()); let b128 = BigUint::from_bytes_le(&[1; 128]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b128); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 127], UtilsError> = bigint_to_le_bytes_array(&b128); assert!(matches!(res, Err(UtilsError::InputTooLarge(127)))); let res: Result<[u8; 128], UtilsError> = bigint_to_le_bytes_array(&b128); assert!(res.is_ok()); let b256 = BigUint::from_bytes_be(&[1; 256]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b256); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 255], UtilsError> = bigint_to_be_bytes_array(&b256); assert!(matches!(res, Err(UtilsError::InputTooLarge(255)))); let res: Result<[u8; 256], UtilsError> = bigint_to_be_bytes_array(&b256); assert!(res.is_ok()); let b256 = BigUint::from_bytes_le(&[1; 256]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b256); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 255], UtilsError> = bigint_to_le_bytes_array(&b256); assert!(matches!(res, Err(UtilsError::InputTooLarge(255)))); let res: Result<[u8; 256], UtilsError> = bigint_to_le_bytes_array(&b256); assert!(res.is_ok()); let b512 = BigUint::from_bytes_be(&[1; 512]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b512); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 511], UtilsError> = bigint_to_be_bytes_array(&b512); assert!(matches!(res, Err(UtilsError::InputTooLarge(511)))); let res: Result<[u8; 512], UtilsError> = bigint_to_be_bytes_array(&b512); assert!(res.is_ok()); let b512 = BigUint::from_bytes_le(&[1; 512]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b512); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 511], UtilsError> = bigint_to_le_bytes_array(&b512); assert!(matches!(res, Err(UtilsError::InputTooLarge(511)))); let res: Result<[u8; 512], UtilsError> = bigint_to_le_bytes_array(&b512); assert!(res.is_ok()); let b768 = BigUint::from_bytes_be(&[1; 768]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b768); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 767], UtilsError> = bigint_to_be_bytes_array(&b768); assert!(matches!(res, Err(UtilsError::InputTooLarge(767)))); let res: Result<[u8; 768], UtilsError> = bigint_to_be_bytes_array(&b768); assert!(res.is_ok()); let b768 = BigUint::from_bytes_le(&[1; 768]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b768); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 767], UtilsError> = bigint_to_le_bytes_array(&b768); assert!(matches!(res, Err(UtilsError::InputTooLarge(767)))); let res: Result<[u8; 768], UtilsError> = bigint_to_le_bytes_array(&b768); assert!(res.is_ok()); let b1024 = BigUint::from_bytes_be(&[1; 1024]); let res: Result<[u8; 1], UtilsError> = bigint_to_be_bytes_array(&b1024); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 1023], UtilsError> = bigint_to_be_bytes_array(&b1024); assert!(matches!(res, Err(UtilsError::InputTooLarge(1023)))); let res: Result<[u8; 1024], UtilsError> = bigint_to_be_bytes_array(&b1024); assert!(res.is_ok()); let b1024 = BigUint::from_bytes_le(&[1; 1024]); let res: Result<[u8; 1], UtilsError> = bigint_to_le_bytes_array(&b1024); assert!(matches!(res, Err(UtilsError::InputTooLarge(1)))); let res: Result<[u8; 1023], UtilsError> = bigint_to_le_bytes_array(&b1024); assert!(matches!(res, Err(UtilsError::InputTooLarge(1023)))); let res: Result<[u8; 1024], UtilsError> = bigint_to_le_bytes_array(&b1024); assert!(res.is_ok()); } }
0
solana_public_repos/Lightprotocol/light-protocol/utils
solana_public_repos/Lightprotocol/light-protocol/utils/src/fee.rs
use crate::UtilsError; pub fn compute_rollover_fee( rollover_threshold: u64, tree_height: u32, rent: u64, ) -> Result<u64, UtilsError> { let number_of_transactions = 1 << tree_height; if rollover_threshold > 100 { return Err(UtilsError::InvalidRolloverThreshold); } if rollover_threshold == 0 { return Ok(rent); } // rent / (total_number_of_leaves * (rollover_threshold / 100)) // (with ceil division) Ok((rent * 100).div_ceil(number_of_transactions * rollover_threshold)) } #[test] fn test_compute_rollover_fee() { let rollover_threshold = 100; let tree_height = 26; let rent = 1392890880; let total_number_of_leaves = 1 << tree_height; let fee = compute_rollover_fee(rollover_threshold, tree_height, rent).unwrap(); // assert_ne!(fee, 0u64); assert!((fee + 1) * (total_number_of_leaves * 100 / rollover_threshold) > rent); let rollover_threshold = 50; let fee = compute_rollover_fee(rollover_threshold, tree_height, rent).unwrap(); assert!((fee + 1) * (total_number_of_leaves * 100 / rollover_threshold) > rent); let rollover_threshold: u64 = 95; let fee = compute_rollover_fee(rollover_threshold, tree_height, rent).unwrap(); assert!((fee + 1) * (total_number_of_leaves * 100 / rollover_threshold) > rent); } #[test] fn test_concurrent_tree_compute_rollover_fee() { let merkle_tree_lamports = 9496836480; let queue_lamports = 2293180800; let cpi_context_lamports = 143487360; let rollover_threshold = 95; let height = 26; let rollover_fee = compute_rollover_fee( rollover_threshold, height, merkle_tree_lamports + cpi_context_lamports, ) .unwrap() + compute_rollover_fee(rollover_threshold, height, queue_lamports).unwrap(); let lifetime_lamports = rollover_fee * ((2u64.pow(height)) as f64 * 0.95) as u64; println!("rollover_fee: {}", rollover_fee); println!("lifetime_lamports: {}", lifetime_lamports); println!( "lifetime_lamports < total lamports: {}", lifetime_lamports > merkle_tree_lamports + queue_lamports + cpi_context_lamports ); println!( "lifetime_lamports - total lamports: {}", lifetime_lamports - (merkle_tree_lamports + queue_lamports + cpi_context_lamports) ); assert!(lifetime_lamports > (merkle_tree_lamports + queue_lamports + cpi_context_lamports)); } #[test] fn test_address_tree_compute_rollover_fee() { let merkle_tree_lamports = 9639711360; let queue_lamports = 2293180800; let rollover_threshold = 95; let height = 26; let rollover_fee = compute_rollover_fee(rollover_threshold, height, merkle_tree_lamports) .unwrap() + compute_rollover_fee(rollover_threshold, height, queue_lamports).unwrap(); let lifetime_lamports = rollover_fee * ((2u64.pow(height)) as f64 * 0.95) as u64; println!("rollover_fee: {}", rollover_fee); println!("lifetime_lamports: {}", lifetime_lamports); println!( "lifetime_lamports < total lamports: {}", lifetime_lamports > merkle_tree_lamports + queue_lamports ); println!( "lifetime_lamports - total lamports: {}", lifetime_lamports - (merkle_tree_lamports + queue_lamports) ); assert!(lifetime_lamports > (merkle_tree_lamports + queue_lamports)); }
0
solana_public_repos/Lightprotocol/light-protocol/utils
solana_public_repos/Lightprotocol/light-protocol/utils/src/lib.rs
use std::{ env, io::{self, prelude::*}, process::{Command, Stdio}, thread::spawn, }; use ark_ff::PrimeField; use num_bigint::BigUint; use solana_program::keccak::hashv; use thiserror::Error; pub mod bigint; pub mod fee; pub mod offset; pub mod prime; pub mod rand; #[derive(Debug, Error, PartialEq)] pub enum UtilsError { #[error("Invalid input size, expected at most {0}")] InputTooLarge(usize), #[error("Invalid chunk size")] InvalidChunkSize, #[error("Invalid seeds")] InvalidSeeds, #[error("Invalid rollover thresold")] InvalidRolloverThreshold, } // NOTE(vadorovsky): Unfortunately, we need to do it by hand. // `num_derive::ToPrimitive` doesn't support data-carrying enums. impl From<UtilsError> for u32 { fn from(e: UtilsError) -> u32 { match e { UtilsError::InputTooLarge(_) => 12001, UtilsError::InvalidChunkSize => 12002, UtilsError::InvalidSeeds => 12003, UtilsError::InvalidRolloverThreshold => 12004, } } } impl From<UtilsError> for solana_program::program_error::ProgramError { fn from(e: UtilsError) -> Self { solana_program::program_error::ProgramError::Custom(e.into()) } } pub fn is_smaller_than_bn254_field_size_be(bytes: &[u8; 32]) -> bool { let bigint = BigUint::from_bytes_be(bytes); bigint < ark_bn254::Fr::MODULUS.into() } pub fn hash_to_bn254_field_size_be(bytes: &[u8]) -> Option<([u8; 32], u8)> { let mut bump_seed = [u8::MAX]; // Loops with decreasing bump seed to find a valid hash which is less than // bn254 Fr modulo field size. for _ in 0..u8::MAX { { let mut hashed_value: [u8; 32] = hashv(&[bytes, bump_seed.as_ref()]).to_bytes(); // Truncates to 31 bytes so that value is less than bn254 Fr modulo // field size. hashed_value[0] = 0; if is_smaller_than_bn254_field_size_be(&hashed_value) { return Some((hashed_value, bump_seed[0])); } } bump_seed[0] -= 1; } None } /// Hashes the provided `bytes` with Keccak256 and ensures the result fits /// in the BN254 prime field by repeatedly hashing the inputs with various /// "bump seeds" and truncating the resulting hash to 31 bytes. /// /// The attempted "bump seeds" are bytes from 255 to 0. /// /// # Examples /// /// ``` /// use light_utils::hashv_to_bn254_field_size_be; /// /// hashv_to_bn254_field_size_be(&[b"foo", b"bar"]); /// ``` pub fn hashv_to_bn254_field_size_be(bytes: &[&[u8]]) -> [u8; 32] { let mut hashed_value: [u8; 32] = hashv(bytes).to_bytes(); // Truncates to 31 bytes so that value is less than bn254 Fr modulo // field size. hashed_value[0] = 0; hashed_value } /// Applies `rustfmt` on the given string containing Rust code. The purpose of /// this function is to be able to format autogenerated code (e.g. with `quote` /// macro). pub fn rustfmt(code: String) -> Result<Vec<u8>, anyhow::Error> { let mut cmd = match env::var_os("RUSTFMT") { Some(r) => Command::new(r), None => Command::new("rustfmt"), }; let mut cmd = cmd .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn()?; let mut stdin = cmd.stdin.take().unwrap(); let mut stdout = cmd.stdout.take().unwrap(); let stdin_handle = spawn(move || { stdin.write_all(code.as_bytes()).unwrap(); }); let mut formatted_code = vec![]; io::copy(&mut stdout, &mut formatted_code)?; let _ = cmd.wait(); stdin_handle.join().unwrap(); Ok(formatted_code) } #[cfg(test)] mod tests { use num_bigint::ToBigUint; use solana_program::pubkey::Pubkey; use crate::bigint::bigint_to_be_bytes_array; use super::*; #[test] fn test_is_smaller_than_bn254_field_size_be() { let modulus: BigUint = ark_bn254::Fr::MODULUS.into(); let modulus_bytes: [u8; 32] = bigint_to_be_bytes_array(&modulus).unwrap(); assert!(!is_smaller_than_bn254_field_size_be(&modulus_bytes)); let bigint = modulus.clone() - 1.to_biguint().unwrap(); let bigint_bytes: [u8; 32] = bigint_to_be_bytes_array(&bigint).unwrap(); assert!(is_smaller_than_bn254_field_size_be(&bigint_bytes)); let bigint = modulus + 1.to_biguint().unwrap(); let bigint_bytes: [u8; 32] = bigint_to_be_bytes_array(&bigint).unwrap(); assert!(!is_smaller_than_bn254_field_size_be(&bigint_bytes)); } #[test] fn test_hash_to_bn254_field_size_be() { for _ in 0..10_000 { let input_bytes = Pubkey::new_unique().to_bytes(); // Sample input let (hashed_value, bump) = hash_to_bn254_field_size_be(input_bytes.as_slice()) .expect("Failed to find a hash within BN254 field size"); assert_eq!(bump, 255, "Bump seed should be 0"); assert!( is_smaller_than_bn254_field_size_be(&hashed_value), "Hashed value should be within BN254 field size" ); } let max_input = [u8::MAX; 32]; let (hashed_value, bump) = hash_to_bn254_field_size_be(max_input.as_slice()) .expect("Failed to find a hash within BN254 field size"); assert_eq!(bump, 255, "Bump seed should be 255"); assert!( is_smaller_than_bn254_field_size_be(&hashed_value), "Hashed value should be within BN254 field size" ); } #[test] fn test_hashv_to_bn254_field_size_be() { for _ in 0..10_000 { let input_bytes = [Pubkey::new_unique().to_bytes(); 4]; let input_bytes = input_bytes.iter().map(|x| x.as_slice()).collect::<Vec<_>>(); let hashed_value = hashv_to_bn254_field_size_be(input_bytes.as_slice()); assert!( is_smaller_than_bn254_field_size_be(&hashed_value), "Hashed value should be within BN254 field size" ); } let max_input = [[u8::MAX; 32]; 16]; let max_input = max_input.iter().map(|x| x.as_slice()).collect::<Vec<_>>(); let hashed_value = hashv_to_bn254_field_size_be(max_input.as_slice()); assert!( is_smaller_than_bn254_field_size_be(&hashed_value), "Hashed value should be within BN254 field size" ); } #[test] fn test_rustfmt() { let unformatted_code = "use std::mem; fn main() { println!(\"{}\", mem::size_of::<u64>()); } " .to_string(); let formatted_code = rustfmt(unformatted_code).unwrap(); assert_eq!( String::from_utf8_lossy(&formatted_code), "use std::mem; fn main() { println!(\"{}\", mem::size_of::<u64>()); } " ); } }
0
solana_public_repos/Lightprotocol/light-protocol/utils
solana_public_repos/Lightprotocol/light-protocol/utils/src/prime.rs
/// Finds the lowest prime number which is greater than the provided number /// `n`. pub fn find_next_prime(mut n: u32) -> u32 { // Handle small numbers separately if n <= 2 { return 2; } else if n <= 3 { return 3; } // All prime numbers greater than 3 are of the form 6k + 1 or 6k + 5 (or // 6k - 1). // That's because: // // 6k is divisible by 2 and 3. // 6k + 2 = 2(3k + 1) is divisible by 2. // 6k + 3 = 3(2k + 1) is divisible by 3. // 6k + 4 = 2(3k + 2) is divisible by 2. // // This leaves only 6k + 1 and 6k + 5 as candidates. // Ensure the candidate is of the form 6k - 1 or 6k + 1. let remainder = n % 6; if remainder != 0 { // Check if `n` already satisfies the pattern and is prime. if remainder == 5 && is_prime(n) { return n; } if remainder == 1 && is_prime(n) { return n; } // Add `6 - remainder` to `n`, to it satisfies the `6k` pattern. n = n + 6 - remainder; // Check if `6k - 1` candidate is prime. let candidate = n - 1; if is_prime(candidate) { return candidate; } } // Consequently add `6`, keep checking `6k + 1` and `6k + 5` candidates. loop { let candidate = n + 1; if is_prime(candidate) { return candidate; } let candidate = n + 5; if is_prime(candidate) { return candidate; } n += 6; } } pub fn find_next_prime_with_load_factor(n: u32, load_factor: f64) -> u32 { // SAFETY: These type coercions should not cause any issues. // // * `f64` can precisely represent all integer values up to 2^53, which is // more than `u32::MAX`. `u64` and `usize` would be too large though. // * We want to return and find an integer (prime number), so coercing `f64` // back to `u32` is intentional here. let minimum = n as f64 / load_factor; find_next_prime(minimum as u32) } /// Checks whether the provided number `n` is a prime number. pub fn is_prime(n: u32) -> bool { if n <= 1 { return false; } if n <= 3 { return true; } if n % 2 == 0 || n % 3 == 0 { return false; } let mut i = 5; while i * i <= n { if n % i == 0 || n % (i + 2) == 0 { return false; } i += 6; } true } #[cfg(test)] mod test { use super::*; #[test] fn test_find_next_prime() { assert_eq!(find_next_prime(0), 2); assert_eq!(find_next_prime(2), 2); assert_eq!(find_next_prime(3), 3); assert_eq!(find_next_prime(4), 5); assert_eq!(find_next_prime(10), 11); assert_eq!(find_next_prime(17), 17); assert_eq!(find_next_prime(19), 19); assert_eq!(find_next_prime(28), 29); assert_eq!(find_next_prime(100), 101); assert_eq!(find_next_prime(102), 103); assert_eq!(find_next_prime(105), 107); assert_eq!(find_next_prime(1000), 1009); assert_eq!(find_next_prime(2000), 2003); assert_eq!(find_next_prime(3000), 3001); assert_eq!(find_next_prime(4000), 4001); assert_eq!(find_next_prime(4800), 4801); assert_eq!(find_next_prime(5000), 5003); assert_eq!(find_next_prime(6000), 6007); assert_eq!(find_next_prime(6850), 6857); assert_eq!(find_next_prime(7000), 7001); assert_eq!(find_next_prime(7900), 7901); assert_eq!(find_next_prime(7907), 7907); } #[test] fn test_find_next_prime_with_load_factor() { assert_eq!(find_next_prime_with_load_factor(4800, 0.5), 9601); assert_eq!(find_next_prime_with_load_factor(4800, 0.7), 6857); } #[test] fn test_is_prime() { assert_eq!(is_prime(1), false); assert_eq!(is_prime(2), true); assert_eq!(is_prime(3), true); assert_eq!(is_prime(4), false); assert_eq!(is_prime(17), true); assert_eq!(is_prime(19), true); } }
0
solana_public_repos/Lightprotocol/light-protocol/utils
solana_public_repos/Lightprotocol/light-protocol/utils/src/rand.rs
use std::ops::{Bound, RangeBounds}; use rand::{ distributions::uniform::{SampleRange, SampleUniform}, Rng, }; use crate::prime::find_next_prime; const PRIME_RETRIES: usize = 10; /// Generates a random prime number in the given range. It returns `None` when /// generating such number was not possible. pub fn gen_prime<N, R, T>(rng: &mut N, range: R) -> Option<T> where N: Rng, R: Clone + RangeBounds<T> + SampleRange<T>, T: Into<u32> + From<u32> + Copy + PartialOrd + SampleUniform, { for _ in 0..PRIME_RETRIES { let sample: T = rng.gen_range(range.clone()); let next_prime = find_next_prime(sample.into()); match range.end_bound() { Bound::Included(end) => { if next_prime > (*end).into() { continue; } } Bound::Excluded(end) => { if next_prime >= (*end).into() { continue; } } _ => {} }; return Some(T::from(next_prime)); } None } /// Generates a random value in the given range, excluding the values provided /// in `exclude`. pub fn gen_range_exclude<N, R, T>(rng: &mut N, range: R, exclude: &[T]) -> T where N: Rng, R: Clone + SampleRange<T>, T: PartialEq + SampleUniform, { loop { // This utility is supposed to be used only in unit tests. This `clone` // is harmless and necessary (can't pass a reference to range, it has // to be moved). let sample = rng.gen_range(range.clone()); if !exclude.contains(&sample) { return sample; } } } #[cfg(test)] mod test { use rand::Rng; use crate::prime::is_prime; use super::*; #[test] fn test_gen_prime() { let mut rng = rand::thread_rng(); let mut successful_gens = 0; for i in 0..10_000 { let sample: Option<u32> = gen_prime(&mut rng, 1..10_000); println!("sample {i}: {sample:?}"); if let Some(sample) = sample { successful_gens += 1; assert!(is_prime(sample)); } } println!("generated {successful_gens} prime numbers out of 10000 iterations"); } #[test] fn test_gen_range_exclude() { let mut rng = rand::thread_rng(); for n_excluded in 1..100 { let excluded: Vec<u64> = (0..n_excluded).map(|_| rng.gen_range(0..100)).collect(); for _ in 0..10_000 { let sample = gen_range_exclude(&mut rng, 0..100, excluded.as_slice()); for excluded in excluded.iter() { assert_ne!(&sample, excluded); } } } } }
0
solana_public_repos/Lightprotocol/light-protocol/utils/src
solana_public_repos/Lightprotocol/light-protocol/utils/src/offset/copy.rs
use std::{mem, ptr}; use light_bounded_vec::{ BoundedVec, BoundedVecMetadata, CyclicBoundedVec, CyclicBoundedVecMetadata, }; /// Creates a copy of value of type `T` based on the provided `bytes` buffer. /// /// # Safety /// /// This is higly unsafe. This function doesn't ensure alignment and /// correctness of provided buffer. The responsibility of such checks is on /// the caller. pub unsafe fn read_value_at<T>(bytes: &[u8], offset: &mut usize) -> T where T: Clone, { let size = mem::size_of::<T>(); let ptr = bytes[*offset..*offset + size].as_ptr() as *const T; *offset += size; ptr::read(ptr) } /// Creates a `BoundedVec` from the sequence of values provided in `bytes` buffer. /// /// # Safety /// /// This is higly unsafe. This function doesn't ensure alignment and /// correctness of provided buffer. The responsibility of such checks is on /// the caller. /// /// The `T` type needs to be either a primitive or struct consisting of /// primitives. It cannot contain any nested heap-backed stucture (like vectors, /// slices etc.). pub unsafe fn read_bounded_vec_at<T>( bytes: &[u8], offset: &mut usize, metadata: &BoundedVecMetadata, ) -> BoundedVec<T> where T: Clone, { let size = mem::size_of::<T>() * metadata.capacity(); let ptr = bytes[*offset..*offset + size].as_ptr() as *const T; let mut vec = BoundedVec::with_metadata(metadata); let dst_ptr: *mut T = vec.as_mut_ptr(); for i in 0..metadata.length() { let val = ptr::read(ptr.add(i)); // SAFETY: We ensured the bounds. unsafe { ptr::write(dst_ptr.add(i), val) }; } *offset += size; vec } /// Creates a `CyclicBoundedVec` from the sequence of values provided in /// `bytes` buffer. /// /// # Safety /// /// This is higly unsafe. This function doesn't ensure alignment and /// correctness of provided buffer. The responsibility of such checks is on /// the caller. pub unsafe fn read_cyclic_bounded_vec_at<T>( bytes: &[u8], offset: &mut usize, metadata: &CyclicBoundedVecMetadata, ) -> CyclicBoundedVec<T> where T: Clone, { let size = mem::size_of::<T>() * metadata.capacity(); let src_ptr = bytes[*offset..*offset + size].as_ptr() as *const T; let mut vec = CyclicBoundedVec::with_metadata(metadata); let dst_ptr: *mut T = vec.as_mut_ptr(); for i in 0..metadata.length() { let val = ptr::read(src_ptr.add(i)); // SAFETY: We ensured the bounds. unsafe { ptr::write(dst_ptr.add(i), val) }; } *offset += size; vec } #[cfg(test)] mod test { use std::slice; use super::*; use bytemuck::{Pod, Zeroable}; use memoffset::offset_of; #[test] fn test_value_at() { #[derive(Clone, Copy, Pod, Zeroable)] #[repr(C)] struct TestStruct { a: isize, b: usize, c: i64, d: u64, e: i32, f: u32, g: i16, _padding_1: [u8; 2], h: u16, _padding_2: [u8; 2], i: i8, _padding_3: [i8; 3], j: u8, _padding_4: [i8; 3], } let mut buf = vec![0_u8; mem::size_of::<TestStruct>()]; let s = buf.as_mut_ptr() as *mut TestStruct; unsafe { (*s).a = isize::MIN; (*s).b = usize::MAX; (*s).c = i64::MIN; (*s).d = u64::MAX; (*s).e = i32::MIN; (*s).f = u32::MAX; (*s).g = i16::MIN; (*s).h = u16::MAX; (*s).i = i8::MIN; (*s).j = u8::MAX; let mut offset = offset_of!(TestStruct, a); assert_eq!(offset, 0); assert_eq!(read_value_at::<isize>(&buf, &mut offset), isize::MIN); assert_eq!(offset, 8); let mut offset = offset_of!(TestStruct, b); assert_eq!(offset, 8); assert_eq!(read_value_at::<usize>(&buf, &mut offset), usize::MAX); assert_eq!(offset, 16); let mut offset = offset_of!(TestStruct, c); assert_eq!(offset, 16); assert_eq!(read_value_at::<i64>(&buf, &mut offset), i64::MIN); assert_eq!(offset, 24); let mut offset = offset_of!(TestStruct, d); assert_eq!(offset, 24); assert_eq!(read_value_at::<u64>(&buf, &mut offset), u64::MAX); assert_eq!(offset, 32); let mut offset = offset_of!(TestStruct, e); assert_eq!(offset, 32); assert_eq!(read_value_at::<i32>(&buf, &mut offset), i32::MIN); assert_eq!(offset, 36); let mut offset = offset_of!(TestStruct, f); assert_eq!(offset, 36); assert_eq!(read_value_at::<u32>(&buf, &mut offset), u32::MAX); assert_eq!(offset, 40); let mut offset = offset_of!(TestStruct, g); assert_eq!(offset, 40); assert_eq!(read_value_at::<i16>(&buf, &mut offset), i16::MIN); assert_eq!(offset, 42); let mut offset = offset_of!(TestStruct, h); assert_eq!(offset, 44); assert_eq!(read_value_at::<u16>(&buf, &mut offset), u16::MAX); assert_eq!(offset, 46); let mut offset = offset_of!(TestStruct, i); assert_eq!(offset, 48); assert_eq!(read_value_at::<i8>(&buf, &mut offset), i8::MIN); assert_eq!(offset, 49); let mut offset = offset_of!(TestStruct, j); assert_eq!(offset, 52); assert_eq!(read_value_at::<u8>(&buf, &mut offset), u8::MAX); assert_eq!(offset, 53); } } #[test] fn test_read_bounded_vec_at() { #[derive(Clone, Copy, Pod, Zeroable)] #[repr(C)] struct TestStruct { a: [i64; 32], b: [u64; 32], c: [[u8; 32]; 32], } let mut buf = vec![0_u8; mem::size_of::<TestStruct>()]; let s = buf.as_mut_ptr() as *mut TestStruct; unsafe { for (i, element) in (*s).a.iter_mut().enumerate() { *element = -(i as i64); } for (i, element) in (*s).b.iter_mut().enumerate() { *element = i as u64; } for (i, element) in (*s).c.iter_mut().enumerate() { *element = [i as u8; 32]; } let metadata = BoundedVecMetadata::new_with_length(32, 32); let mut offset = offset_of!(TestStruct, a); assert_eq!(offset, 0); let vec: BoundedVec<i64> = read_bounded_vec_at(&buf, &mut offset, &metadata); for (i, element) in vec.iter().enumerate() { assert_eq!(i as i64, -(*element as i64)); } assert_eq!(offset, 256); let metadata = BoundedVecMetadata::new_with_length(32, 32); let mut offset = offset_of!(TestStruct, b); assert_eq!(offset, 256); let vec: BoundedVec<u64> = read_bounded_vec_at(&buf, &mut offset, &metadata); for (i, element) in vec.iter().enumerate() { assert_eq!(i as u64, *element as u64); } assert_eq!(offset, 512); let metadata = BoundedVecMetadata::new_with_length(32, 32); let mut offset = offset_of!(TestStruct, c); assert_eq!(offset, 512); let vec: BoundedVec<[u8; 32]> = read_bounded_vec_at(&buf, &mut offset, &metadata); for (i, element) in vec.iter().enumerate() { assert_eq!(&[i as u8; 32], element); } assert_eq!(offset, 1536); } } #[test] fn test_read_cyclic_bounded_vec_at() { #[derive(Clone, Copy, Pod, Zeroable)] #[repr(C)] struct TestStruct { a: [i64; 32], b: [u64; 32], c: [[u8; 32]; 32], } let mut buf = vec![0_u8; mem::size_of::<TestStruct>()]; let s = buf.as_mut_ptr() as *mut TestStruct; unsafe { for (i, element) in (*s).a.iter_mut().enumerate() { *element = -(i as i64); } for (i, element) in (*s).b.iter_mut().enumerate() { *element = i as u64; } for (i, element) in (*s).c.iter_mut().enumerate() { *element = [i as u8; 32]; } // Start the cyclic vecs from the middle. let metadata = CyclicBoundedVecMetadata::new_with_indices(32, 32, 14, 13); let mut offset = offset_of!(TestStruct, a); assert_eq!(offset, 0); let vec: CyclicBoundedVec<i64> = read_cyclic_bounded_vec_at(&buf, &mut offset, &metadata); assert_eq!(vec.capacity(), 32); assert_eq!(vec.len(), 32); assert_eq!(vec.first_index(), 14); assert_eq!(vec.last_index(), 13); assert_eq!( vec.iter().collect::<Vec<_>>().as_slice(), &[ &-14, &-15, &-16, &-17, &-18, &-19, &-20, &-21, &-22, &-23, &-24, &-25, &-26, &-27, &-28, &-29, &-30, &-31, &-0, &-1, &-2, &-3, &-4, &-5, &-6, &-7, &-8, &-9, &-10, &-11, &-12, &-13 ] ); assert_eq!(offset, 256); let metadata = CyclicBoundedVecMetadata::new_with_indices(32, 32, 14, 13); let mut offset = offset_of!(TestStruct, b); assert_eq!(offset, 256); let vec: CyclicBoundedVec<u64> = read_cyclic_bounded_vec_at(&buf, &mut offset, &metadata); assert_eq!(vec.capacity(), 32); assert_eq!(vec.len(), 32); assert_eq!(vec.first_index(), 14); assert_eq!(vec.last_index(), 13); assert_eq!( vec.iter().collect::<Vec<_>>().as_slice(), &[ &14, &15, &16, &17, &18, &19, &20, &21, &22, &23, &24, &25, &26, &27, &28, &29, &30, &31, &0, &1, &2, &3, &4, &5, &6, &7, &8, &9, &10, &11, &12, &13 ] ); assert_eq!(offset, 512); let metadata = CyclicBoundedVecMetadata::new_with_indices(32, 32, 14, 13); let mut offset = offset_of!(TestStruct, c); assert_eq!(offset, 512); let vec: CyclicBoundedVec<[u8; 32]> = read_cyclic_bounded_vec_at(&buf, &mut offset, &metadata); assert_eq!(vec.capacity(), 32); assert_eq!(vec.len(), 32); assert_eq!(vec.first_index(), 14); assert_eq!(vec.last_index(), 13); assert_eq!( vec.iter().collect::<Vec<_>>().as_slice(), &[ &[14_u8; 32], &[15_u8; 32], &[16_u8; 32], &[17_u8; 32], &[18_u8; 32], &[19_u8; 32], &[20_u8; 32], &[21_u8; 32], &[22_u8; 32], &[23_u8; 32], &[24_u8; 32], &[25_u8; 32], &[26_u8; 32], &[27_u8; 32], &[28_u8; 32], &[29_u8; 32], &[30_u8; 32], &[31_u8; 32], &[0_u8; 32], &[1_u8; 32], &[2_u8; 32], &[3_u8; 32], &[4_u8; 32], &[5_u8; 32], &[6_u8; 32], &[7_u8; 32], &[8_u8; 32], &[9_u8; 32], &[10_u8; 32], &[11_u8; 32], &[12_u8; 32], &[13_u8; 32], ] ); assert_eq!(offset, 1536); } } #[test] fn test_read_cyclic_bounded_vec_first_last() { let mut vec = CyclicBoundedVec::<u32>::with_capacity(2); vec.push(0); vec.push(37); vec.push(49); let metadata_bytes = vec.metadata().to_le_bytes(); let metadata = CyclicBoundedVecMetadata::from_le_bytes(metadata_bytes); let bytes = unsafe { slice::from_raw_parts( vec.as_mut_ptr() as *mut u8, mem::size_of::<u32>() * vec.capacity(), ) }; let mut offset = 0; let vec_copy: CyclicBoundedVec<u32> = unsafe { read_cyclic_bounded_vec_at(bytes, &mut offset, &metadata) }; assert_eq!(*vec.first().unwrap(), 37); assert_eq!(vec.first(), vec_copy.first()); // Fails. Both should be 37 assert_eq!(*vec.last().unwrap(), 49); assert_eq!(vec.last(), vec_copy.last()); // Fails. Both should be 49 } }
0
solana_public_repos/Lightprotocol/light-protocol/utils/src
solana_public_repos/Lightprotocol/light-protocol/utils/src/offset/zero_copy.rs
use std::mem; /// Casts a part of provided `bytes` buffer with the given `offset` to a /// mutable pointer to `T`. /// /// Should be used for single values. /// /// # Safety /// /// This is higly unsafe. This function doesn't ensure alignment and /// correctness of provided buffer. The responsibility of such checks is on /// the caller. pub unsafe fn read_ptr_at<T>(bytes: &[u8], offset: &mut usize) -> *mut T { let size = mem::size_of::<T>(); let ptr = bytes[*offset..*offset + size].as_ptr() as *mut T; *offset += size; ptr } /// Casts a part of provided `bytes` buffer with the given `offset` to a /// mutable pointer to `T`. /// /// Should be used for array-type sequences. /// /// # Safety /// /// This is higly unsafe. This function doesn't ensure alignment and /// correctness of provided buffer. The responsibility of such checks is on /// the caller. pub unsafe fn read_array_like_ptr_at<T>(bytes: &[u8], offset: &mut usize, len: usize) -> *mut T { let size = mem::size_of::<T>() * len; let ptr = bytes[*offset..*offset + size].as_ptr() as *mut T; *offset += size; ptr } /// Writes provided `data` into provided `bytes` buffer with the given /// `offset`. pub fn write_at<T>(bytes: &mut [u8], data: &[u8], offset: &mut usize) { let size = mem::size_of::<T>(); bytes[*offset..*offset + size].copy_from_slice(data); *offset += size; } #[cfg(test)] mod test { use super::*; use bytemuck::{Pod, Zeroable}; use memoffset::offset_of; #[test] fn test_read_ptr_at() { #[derive(Clone, Copy, Pod, Zeroable)] #[repr(C)] struct TestStruct { a: isize, b: usize, c: i64, d: u64, e: i32, f: u32, g: i16, _padding_1: [u8; 2], h: u16, _padding_2: [u8; 2], i: i8, _padding_3: [i8; 3], j: u8, _padding_4: [i8; 3], } let mut buf = vec![0_u8; mem::size_of::<TestStruct>()]; let s = buf.as_mut_ptr() as *mut TestStruct; unsafe { (*s).a = isize::MIN; (*s).b = usize::MAX; (*s).c = i64::MIN; (*s).d = u64::MAX; (*s).e = i32::MIN; (*s).f = u32::MAX; (*s).g = i16::MIN; (*s).h = u16::MAX; (*s).i = i8::MIN; (*s).j = u8::MAX; let mut offset = offset_of!(TestStruct, a); assert_eq!(offset, 0); assert_eq!(*read_ptr_at::<isize>(&buf, &mut offset), isize::MIN); assert_eq!(offset, 8); let mut offset = offset_of!(TestStruct, b); assert_eq!(offset, 8); assert_eq!(*read_ptr_at::<usize>(&buf, &mut offset), usize::MAX); assert_eq!(offset, 16); let mut offset = offset_of!(TestStruct, c); assert_eq!(offset, 16); assert_eq!(*read_ptr_at::<i64>(&buf, &mut offset), i64::MIN); assert_eq!(offset, 24); let mut offset = offset_of!(TestStruct, d); assert_eq!(offset, 24); assert_eq!(*read_ptr_at::<u64>(&buf, &mut offset), u64::MAX); assert_eq!(offset, 32); let mut offset = offset_of!(TestStruct, e); assert_eq!(offset, 32); assert_eq!(*read_ptr_at::<i32>(&buf, &mut offset), i32::MIN); assert_eq!(offset, 36); let mut offset = offset_of!(TestStruct, f); assert_eq!(offset, 36); assert_eq!(*read_ptr_at::<u32>(&buf, &mut offset), u32::MAX); assert_eq!(offset, 40); let mut offset = offset_of!(TestStruct, g); assert_eq!(offset, 40); assert_eq!(*read_ptr_at::<i16>(&buf, &mut offset), i16::MIN); assert_eq!(offset, 42); let mut offset = offset_of!(TestStruct, h); assert_eq!(offset, 44); assert_eq!(*read_ptr_at::<u16>(&buf, &mut offset), u16::MAX); assert_eq!(offset, 46); let mut offset = offset_of!(TestStruct, i); assert_eq!(offset, 48); assert_eq!(*read_ptr_at::<i8>(&buf, &mut offset), i8::MIN); assert_eq!(offset, 49); let mut offset = offset_of!(TestStruct, j); assert_eq!(offset, 52); assert_eq!(*read_ptr_at::<u8>(&buf, &mut offset), u8::MAX); assert_eq!(offset, 53); } } #[test] fn test_read_array_like_ptr_at() { #[derive(Clone, Copy, Pod, Zeroable)] #[repr(C)] struct TestStruct { a: [i64; 32], b: [u64; 32], } let mut buf = vec![0_u8; mem::size_of::<TestStruct>()]; let s = buf.as_mut_ptr() as *mut TestStruct; unsafe { for (i, element) in (*s).a.iter_mut().enumerate() { *element = -(i as i64); } for (i, element) in (*s).b.iter_mut().enumerate() { *element = i as u64; } let mut offset = offset_of!(TestStruct, a); assert_eq!(offset, 0); let ptr: *mut i64 = read_array_like_ptr_at(&buf, &mut offset, 32); for i in 0..32 { assert_eq!(*(ptr.add(i)), -(i as i64)); } assert_eq!(offset, 256); let mut offset = offset_of!(TestStruct, b); assert_eq!(offset, 256); let ptr: *mut u64 = read_array_like_ptr_at(&buf, &mut offset, 32); for i in 0..32 { assert_eq!(*(ptr.add(i)), i as u64); } assert_eq!(offset, 512); } } #[test] fn test_write_at() { #[derive(Clone, Copy, Pod, Zeroable)] #[repr(C)] struct TestStruct { a: isize, b: usize, c: i64, d: u64, e: i32, f: u32, g: i16, _padding_1: [u8; 2], h: u16, _padding_2: [u8; 2], i: i8, _padding_3: [i8; 3], j: u8, _padding_4: [i8; 3], } let mut buf = vec![0_u8; mem::size_of::<TestStruct>()]; let a: isize = isize::MIN; let b: usize = usize::MAX; let c: i64 = i64::MIN; let d: u64 = u64::MAX; let e: i32 = i32::MIN; let f: u32 = u32::MAX; let g: i16 = i16::MIN; let h: u16 = u16::MAX; let i: i8 = i8::MIN; let j: u8 = u8::MAX; let mut offset = offset_of!(TestStruct, a); assert_eq!(offset, 0); write_at::<isize>(&mut buf, &a.to_le_bytes(), &mut offset); assert_eq!(offset, 8); let mut offset = offset_of!(TestStruct, b); assert_eq!(offset, 8); write_at::<usize>(&mut buf, &b.to_le_bytes(), &mut offset); assert_eq!(offset, 16); let mut offset = offset_of!(TestStruct, c); assert_eq!(offset, 16); write_at::<i64>(&mut buf, &c.to_le_bytes(), &mut offset); assert_eq!(offset, 24); let mut offset = offset_of!(TestStruct, d); assert_eq!(offset, 24); write_at::<u64>(&mut buf, &d.to_le_bytes(), &mut offset); assert_eq!(offset, 32); let mut offset = offset_of!(TestStruct, e); assert_eq!(offset, 32); write_at::<i32>(&mut buf, &e.to_le_bytes(), &mut offset); assert_eq!(offset, 36); let mut offset = offset_of!(TestStruct, f); assert_eq!(offset, 36); write_at::<u32>(&mut buf, &f.to_le_bytes(), &mut offset); assert_eq!(offset, 40); let mut offset = offset_of!(TestStruct, g); assert_eq!(offset, 40); write_at::<i16>(&mut buf, &g.to_le_bytes(), &mut offset); assert_eq!(offset, 42); let mut offset = offset_of!(TestStruct, h); assert_eq!(offset, 44); write_at::<u16>(&mut buf, &h.to_le_bytes(), &mut offset); assert_eq!(offset, 46); let mut offset = offset_of!(TestStruct, i); assert_eq!(offset, 48); write_at::<i8>(&mut buf, &i.to_le_bytes(), &mut offset); assert_eq!(offset, 49); let mut offset = offset_of!(TestStruct, j); assert_eq!(offset, 52); write_at::<u8>(&mut buf, &j.to_le_bytes(), &mut offset); assert_eq!(offset, 53); let s = buf.as_mut_ptr() as *mut TestStruct; unsafe { assert_eq!((*s).a, a); assert_eq!((*s).b, b); assert_eq!((*s).c, c); assert_eq!((*s).d, d); assert_eq!((*s).e, e); assert_eq!((*s).f, f); assert_eq!((*s).g, g); assert_eq!((*s).h, h); assert_eq!((*s).i, i); assert_eq!((*s).j, j); } } }
0
solana_public_repos/Lightprotocol/light-protocol/utils/src
solana_public_repos/Lightprotocol/light-protocol/utils/src/offset/mod.rs
pub mod copy; pub mod zero_copy;
0
solana_public_repos/Lightprotocol/light-protocol/circuit-lib
solana_public_repos/Lightprotocol/light-protocol/circuit-lib/light-prover-client/Cargo.toml
[package] name = "light-prover-client" version = "1.2.0" description = "Crate for interacting with Light Protocol circuits" repository = "https://github.com/Lightprotocol/light-protocol" license = "Apache-2.0" edition = "2021" [features] gnark = ["tokio", "reqwest"] default = ["gnark", "devenv"] devenv = [] [dependencies] # light local deps light-merkle-tree-reference = { path = "../../merkle-tree/reference", version = "1.1.0" } light-hasher = { path = "../../merkle-tree/hasher", version = "1.1.0" } light-indexed-merkle-tree = { path = "../../merkle-tree/indexed", version = "1.1.0" } light-concurrent-merkle-tree = { path = "../../merkle-tree/concurrent", version = "1.1.0" } light-bounded-vec = { path = "../../merkle-tree/bounded-vec", version = "1.1.0" } light-utils = { path = "../../utils", version = "1.1.0" } # ark dependencies ark-serialize = "0.4.2" ark-ec = "0.5.0" ark-ff = "0.4.2" ark-relations = "0.4" ark-bn254 = { version = "0.4" } ark-std = { version = "0.4", default-features = false, features = ["parallel"] } ark-groth16 = { version = "0.4", default-features = false, features = ["parallel"] } ark-crypto-primitives = { version = "0.4" } bytemuck = "1.17.0" # solana groth16-solana = "0.0.3" solana-program = { workspace = true } num-bigint = { version = "0.4.6", features = ["serde"] } once_cell = "1.8" thiserror = "1.0" color-eyre = "=0.6.3" log = "0.4" env_logger = "0.11.2" # 1.3.0 required by package `aes-gcm-siv v0.10.3` zeroize = "=1.3.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.133" num-traits = "0.2.19" tokio = { workspace = true, optional = true } reqwest = { version = "0.11.24", features = ["json", "rustls-tls"], optional = true } sysinfo = "0.33" borsh = ">=0.9, <0.11" [dev-dependencies] duct = "0.13.7" serial_test = "3.2.0"
0
solana_public_repos/Lightprotocol/light-protocol/circuit-lib/light-prover-client
solana_public_repos/Lightprotocol/light-protocol/circuit-lib/light-prover-client/tests/gnark.rs
use light_hasher::{Hasher, Poseidon}; use light_merkle_tree_reference::MerkleTree; use light_prover_client::batch_append_with_proofs::get_batch_append_with_proofs_inputs; use light_prover_client::batch_append_with_subtrees::{ calculate_hash_chain, get_batch_append_with_subtrees_inputs, }; use light_prover_client::batch_update::get_batch_update_inputs; use light_prover_client::gnark::batch_append_with_proofs_json_formatter::BatchAppendWithProofsInputsJson; use light_prover_client::gnark::batch_append_with_subtrees_json_formatter::append_inputs_string; use light_prover_client::gnark::batch_update_json_formatter::update_inputs_string; use light_prover_client::{ batch_address_append::{ get_batch_address_append_inputs_from_tree, get_test_batch_address_append_inputs, }, gnark::batch_address_append_json_formatter::to_json, }; use light_prover_client::gnark::helpers::{spawn_prover, ProofType, ProverConfig}; use light_prover_client::{ gnark::{ constants::{PROVE_PATH, SERVER_ADDRESS}, inclusion_json_formatter::inclusion_inputs_string, }, helpers::init_logger, }; use light_utils::bigint::bigint_to_be_bytes_array; use log::info; use num_bigint::ToBigUint; use reqwest::Client; use serial_test::serial; #[tokio::test] #[ignore] async fn prove_inclusion_full() { init_logger(); spawn_prover( false, ProverConfig { run_mode: None, circuits: vec![ProofType::Inclusion, { ProofType::BatchUpdateTest }], }, ) .await; let client = Client::new(); for number_of_utxos in &[1, 2, 3, 4, 8] { let (inputs, _) = inclusion_inputs_string(*number_of_utxos as usize); let response_result = client .post(&format!("{}{}", SERVER_ADDRESS, PROVE_PATH)) .header("Content-Type", "text/plain; charset=utf-8") .body(inputs) .send() .await .expect("Failed to execute request."); assert!(response_result.status().is_success()); } } #[serial] #[tokio::test] async fn prove_inclusion() { init_logger(); spawn_prover( true, ProverConfig { run_mode: None, circuits: vec![ProofType::Inclusion], }, ) .await; let client = Client::new(); let (inputs, _) = inclusion_inputs_string(1); let response_result = client .post(&format!("{}{}", SERVER_ADDRESS, PROVE_PATH)) .header("Content-Type", "text/plain; charset=utf-8") .body(inputs) .send() .await .expect("Failed to execute request."); assert!(response_result.status().is_success()); } #[serial] #[tokio::test] async fn prove_batch_update() { init_logger(); spawn_prover( true, ProverConfig { run_mode: None, circuits: vec![ProofType::BatchUpdateTest], }, ) .await; const HEIGHT: usize = 26; const CANOPY: usize = 0; let num_insertions = 10; let tx_hash = [0u8; 32]; info!("initializing merkle tree"); let mut merkle_tree = MerkleTree::<Poseidon>::new(HEIGHT, CANOPY); for _ in 0..2 { let mut leaves = vec![]; let mut old_leaves = vec![]; let mut nullifiers = vec![]; for i in 0..num_insertions { let mut bn: [u8; 32] = [0; 32]; bn[31] = i as u8; let leaf: [u8; 32] = Poseidon::hash(&bn).unwrap(); leaves.push(leaf); old_leaves.push(leaf); merkle_tree.append(&leaf).unwrap(); let nullifier = Poseidon::hashv(&[&leaf, &(i as usize).to_be_bytes(), &tx_hash]).unwrap(); nullifiers.push(nullifier); } let mut merkle_proofs = vec![]; let mut path_indices = vec![]; for index in 0..leaves.len() { let proof = merkle_tree.get_proof_of_leaf(index, true).unwrap(); merkle_proofs.push(proof.to_vec()); path_indices.push(index as u32); } let root = merkle_tree.root(); let leaves_hashchain = calculate_hash_chain(&nullifiers); let inputs = get_batch_update_inputs::<HEIGHT>( root, vec![tx_hash; num_insertions], leaves, leaves_hashchain, old_leaves, merkle_proofs, path_indices, num_insertions as u32, ); let client = Client::new(); let inputs = update_inputs_string(&inputs); let response_result = client .post(&format!("{}{}", SERVER_ADDRESS, PROVE_PATH)) .header("Content-Type", "text/plain; charset=utf-8") .body(inputs) .send() .await .expect("Failed to execute request."); let status = response_result.status(); let body = response_result.text().await.unwrap(); assert!( status.is_success(), "Batch append proof generation failed. Status: {}, Body: {}", status, body ); } } #[serial] #[tokio::test] async fn prove_batch_append() { init_logger(); println!("spawning prover"); spawn_prover( true, ProverConfig { run_mode: None, circuits: vec![ProofType::BatchAppendWithSubtreesTest], }, ) .await; println!("prover spawned"); const HEIGHT: usize = 26; const CANOPY: usize = 0; let num_insertions = 10; // Do multiple rounds of batch appends for _ in 0..2 { info!("initializing merkle tree for append."); let merkle_tree = MerkleTree::<Poseidon>::new(HEIGHT, CANOPY); let old_subtrees = merkle_tree.get_subtrees(); let mut leaves = vec![]; // Create leaves for this batch append for i in 0..num_insertions { let mut bn: [u8; 32] = [0; 32]; bn[31] = i as u8; let leaf: [u8; 32] = Poseidon::hash(&bn).unwrap(); leaves.push(leaf); } let leaves_hashchain = calculate_hash_chain(&leaves); // Generate inputs for batch append operation let inputs = get_batch_append_with_subtrees_inputs::<HEIGHT>( merkle_tree.layers[0].len(), old_subtrees.try_into().unwrap(), leaves, leaves_hashchain, ); // Send proof request to the server let client = Client::new(); let inputs = append_inputs_string(&inputs); let response_result = client .post(&format!("{}{}", SERVER_ADDRESS, PROVE_PATH)) .header("Content-Type", "text/plain; charset=utf-8") .body(inputs) .send() .await .expect("Failed to execute request."); let status = response_result.status(); let body = response_result.text().await.unwrap(); assert!( status.is_success(), "Batch append proof generation failed. Status: {}, Body: {}", status, body ); } } #[serial] #[tokio::test] async fn prove_batch_two_append() { init_logger(); // Spawn the prover with specific configuration spawn_prover( true, ProverConfig { run_mode: None, circuits: vec![ProofType::BatchAppendWithProofsTest], }, ) .await; const HEIGHT: usize = 26; const CANOPY: usize = 0; let num_insertions = 10; info!("Initializing Merkle tree for append."); let mut merkle_tree = MerkleTree::<Poseidon>::new(HEIGHT, CANOPY); let mut current_index = 0; for i in 0..2 { let mut leaves = vec![]; let mut old_leaves = vec![]; // Create leaves and append them to the Merkle tree for i in 0..num_insertions { let mut bn: [u8; 32] = [0; 32]; bn[31] = i as u8; let leaf: [u8; 32] = Poseidon::hash(&bn).unwrap(); // assuming old leaves are all zero (not nullified) leaves.push(leaf); // Append nullifier or ero value if i % 2 == 0 { let nullifier = Poseidon::hashv(&[&leaf, &[0u8; 32]]).unwrap(); merkle_tree.append(&nullifier).unwrap(); } else { merkle_tree.append(&[0u8; 32]).unwrap(); } } // Generate Merkle proofs and prepare path indices let mut merkle_proofs = vec![]; for index in current_index..current_index + num_insertions { let proof = merkle_tree.get_proof_of_leaf(index, true).unwrap(); let leaf = merkle_tree.get_leaf(index); old_leaves.push(leaf); merkle_proofs.push(proof.to_vec()); } // Retrieve tree root and compute leaves hash chain let root = merkle_tree.root(); let leaves_hashchain = calculate_hash_chain(&leaves); // Generate inputs for BatchAppendWithProofsCircuit let inputs = get_batch_append_with_proofs_inputs::<HEIGHT>( root, (i * num_insertions) as u32, leaves.clone(), leaves_hashchain, old_leaves.clone(), merkle_proofs.clone(), num_insertions as u32, ); // Serialize inputs to JSON let client = Client::new(); let inputs_json = BatchAppendWithProofsInputsJson::from_inputs(&inputs).to_string(); // Send proof request to server let response_result = client .post(&format!("{}{}", SERVER_ADDRESS, PROVE_PATH)) .header("Content-Type", "text/plain; charset=utf-8") .body(inputs_json) .send() .await .expect("Failed to execute request."); let status = response_result.status(); let body = response_result.text().await.unwrap(); assert!( status.is_success(), "Batch append proof generation failed. Status: {}, Body: {}", status, body ); current_index += num_insertions; } } #[test] pub fn print_circuit_test_data_json_formatted() { let addresses = vec![31_u32.to_biguint().unwrap(), 30_u32.to_biguint().unwrap()]; let start_index = 2; let tree_height = 4; let inputs = get_test_batch_address_append_inputs(addresses, start_index, tree_height); let json_output = to_json(&inputs); println!("{}", json_output); } #[test] pub fn print_circuit_test_data_with_existing_tree() { use light_hasher::Poseidon; use light_indexed_merkle_tree::{array::IndexedArray, reference::IndexedMerkleTree}; const TREE_HEIGHT: usize = 4; let new_element_values = vec![31_u32.to_biguint().unwrap(), 30_u32.to_biguint().unwrap()]; let mut relayer_indexing_array = IndexedArray::<Poseidon, usize>::default(); relayer_indexing_array.init().unwrap(); let mut relayer_merkle_tree = IndexedMerkleTree::<Poseidon, usize>::new(TREE_HEIGHT, 0).unwrap(); relayer_merkle_tree.init().unwrap(); let start_index = relayer_merkle_tree.merkle_tree.rightmost_index; let current_root = relayer_merkle_tree.root(); let mut low_element_values = Vec::new(); let mut low_element_indices = Vec::new(); let mut low_element_next_indices = Vec::new(); let mut low_element_next_values = Vec::new(); let mut low_element_proofs: Vec<Vec<[u8; 32]>> = Vec::new(); for new_element_value in &new_element_values { let non_inclusion_proof = relayer_merkle_tree .get_non_inclusion_proof(new_element_value, &relayer_indexing_array) .unwrap(); low_element_values.push(non_inclusion_proof.leaf_lower_range_value); low_element_indices.push(non_inclusion_proof.leaf_index); low_element_next_indices.push(non_inclusion_proof.next_index); low_element_next_values.push(non_inclusion_proof.leaf_higher_range_value); low_element_proofs.push(non_inclusion_proof.merkle_proof.as_slice().to_vec()); } let new_element_values = new_element_values .iter() .map(|v| bigint_to_be_bytes_array::<32>(&v).unwrap()) .collect(); let inputs = get_batch_address_append_inputs_from_tree::<TREE_HEIGHT>( start_index, current_root, low_element_values, low_element_next_values, low_element_indices, low_element_next_indices, low_element_proofs, new_element_values, relayer_merkle_tree .merkle_tree .get_subtrees() .try_into() .unwrap(), ); let json_output = to_json(&inputs); let reference_output = r#"{ "BatchSize": 2, "HashchainHash": "0x1e94e9fed8440d50ff872bedcc6a6c460f9c6688ac167f68e288057e63109410", "LowElementIndices": [ "0x0", "0x0" ], "LowElementNextIndices": [ "0x1", "0x2" ], "LowElementNextValues": [ "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0x1f" ], "LowElementProofs": [ [ "0x1ea416eeb40218b540c1cfb8dbe91f6d54e8a29edc30a39e326b4057a7d963f5", "0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864", "0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1", "0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238" ], [ "0x1ea416eeb40218b540c1cfb8dbe91f6d54e8a29edc30a39e326b4057a7d963f5", "0x864f3eb12bb83a5cdc9ff6fdc8b985aa4b87292c5eef49201065277170e8c51", "0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1", "0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238" ] ], "LowElementValues": [ "0x0", "0x0" ], "NewElementProofs": [ [ "0x0", "0x2cfd59ee6c304f7f1e82d9e7e857a380e991fb02728f09324baffef2807e74fa", "0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1", "0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238" ], [ "0x29794d28dddbdb020ec3974ecc41bcf64fb695eb222bde71f2a130e92852c0eb", "0x15920e98b921491171b9b2b0a8ac1545e10b58e9c058822b6de9f4179bbd2e7c", "0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1", "0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238" ] ], "NewElementValues": [ "0x1f", "0x1e" ], "NewRoot": "0x2a62d5241a6d3659df612b996ad729abe32f425bfec249f060983013ba2cfdb8", "OldRoot": "0x909e8762fb09c626001b19f6441a2cd2da21b1622c6970ec9c4863ec9c09855", "PublicInputHash": "0x31a64ce5adc664d1092fd7353a76b4fe0a3e63ad0cf313d66a6bc89e5e4a840", "StartIndex": 2, "TreeHeight": 4 }"#; println!("{}", json_output); assert_eq!(json_output, reference_output); } #[serial] #[tokio::test] async fn prove_batch_address_append() { use light_hasher::Poseidon; use light_indexed_merkle_tree::{array::IndexedArray, reference::IndexedMerkleTree}; init_logger(); // Spawn the prover with specific configuration spawn_prover( true, ProverConfig { run_mode: None, circuits: vec![ProofType::BatchAddressAppendTest], }, ) .await; const TREE_HEIGHT: usize = 26; // Initialize test data let new_element_values = vec![31_u32.to_biguint().unwrap()]; // Initialize indexing structures let mut relayer_indexing_array = IndexedArray::<Poseidon, usize>::default(); relayer_indexing_array.init().unwrap(); let mut relayer_merkle_tree = IndexedMerkleTree::<Poseidon, usize>::new(TREE_HEIGHT, 0).unwrap(); relayer_merkle_tree.init().unwrap(); let start_index = relayer_merkle_tree.merkle_tree.rightmost_index; let current_root = relayer_merkle_tree.root(); // Prepare proof components let mut low_element_values = Vec::new(); let mut low_element_indices = Vec::new(); let mut low_element_next_indices = Vec::new(); let mut low_element_next_values = Vec::new(); let mut low_element_proofs: Vec<Vec<[u8; 32]>> = Vec::new(); // Generate non-inclusion proofs for each element for new_element_value in &new_element_values { let non_inclusion_proof = relayer_merkle_tree .get_non_inclusion_proof(new_element_value, &relayer_indexing_array) .unwrap(); low_element_values.push(non_inclusion_proof.leaf_lower_range_value); low_element_indices.push(non_inclusion_proof.leaf_index); low_element_next_indices.push(non_inclusion_proof.next_index); low_element_next_values.push(non_inclusion_proof.leaf_higher_range_value); low_element_proofs.push(non_inclusion_proof.merkle_proof.as_slice().to_vec()); } // Convert big integers to byte arrays let new_element_values = new_element_values .iter() .map(|v| bigint_to_be_bytes_array::<32>(v).unwrap()) .collect(); // Generate circuit inputs let inputs = get_batch_address_append_inputs_from_tree::<TREE_HEIGHT>( start_index, current_root, low_element_values, low_element_next_values, low_element_indices, low_element_next_indices, low_element_proofs, new_element_values, relayer_merkle_tree .merkle_tree .get_subtrees() .try_into() .unwrap(), ); // Convert inputs to JSON format let inputs_json = to_json(&inputs); // Send proof request to server let client = Client::new(); let response_result = client .post(&format!("{}{}", SERVER_ADDRESS, PROVE_PATH)) .header("Content-Type", "text/plain; charset=utf-8") .body(inputs_json) .send() .await .expect("Failed to execute request."); // Verify response let status = response_result.status(); let body = response_result.text().await.unwrap(); assert!( status.is_success(), "Batch address append proof generation failed. Status: {}, Body: {}", status, body ); }
0
solana_public_repos/Lightprotocol/light-protocol/circuit-lib/light-prover-client
solana_public_repos/Lightprotocol/light-protocol/circuit-lib/light-prover-client/scripts/rapidsnark_bench.sh
#!/bin/bash LOG_FILE="execution_times.log" echo "Benchmarking started..." >> $LOG_FILE for ((i=1; i<10; i++)) do echo "Running iteration $i" work_dir="test-data/merkle22_$i" start_time=$(date +%s%6N) prover "$work_dir/circuit.zkey" "$work_dir/22_$i.wtns" "$work_dir/proof_merkle22_$i.json" "$work_dir/public_inputs_merkle22_$i.json" sleep 1 end_time=$(date +%s%6N) execution_time=$(echo "scale=3; ($end_time - $start_time) / 1000 - 1000" | bc) echo "Iteration $i took $execution_time milliseconds" >> $LOG_FILE done
0
solana_public_repos/Lightprotocol/light-protocol/circuit-lib/light-prover-client
solana_public_repos/Lightprotocol/light-protocol/circuit-lib/light-prover-client/src/prove_utils.rs
use groth16_solana::decompression::{decompress_g1, decompress_g2}; use crate::errors::CircuitsError; pub struct ProofResult { pub proof: ProofCompressed, pub public_inputs: Vec<[u8; 32]>, } #[derive(Debug)] pub struct ProofCompressed { pub a: [u8; 32], pub b: [u8; 64], pub c: [u8; 32], } impl ProofCompressed { pub fn try_decompress(&self) -> Result<Proof, CircuitsError> { let proof_a = decompress_g1(&self.a)?; let proof_b = decompress_g2(&self.b)?; let proof_c = decompress_g1(&self.c)?; Ok(Proof { a: proof_a, b: proof_b, c: proof_c, }) } } pub struct Proof { pub a: [u8; 64], pub b: [u8; 128], pub c: [u8; 64], }
0