index
int64
0
0
repo_id
stringlengths
16
181
file_path
stringlengths
28
270
content
stringlengths
1
11.6M
__index_level_0__
int64
0
10k
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/methods.test.ts
import { queryPaymentMethodStatus, queryPaymentMethods } from '@proton/shared/lib/api/payments'; import { BLACK_FRIDAY } from '@proton/shared/lib/constants'; import { PAYMENT_METHOD_TYPES } from './constants'; import { Autopay, PaymentMethodFlows, PaymentMethodStatus, SavedPaymentMethod } from './interface'; import { PaymentMethods, initializePaymentMethods } from './methods'; let status: PaymentMethodStatus; beforeEach(() => { status = { Card: true, Paypal: true, Apple: true, Cash: true, Bitcoin: true, }; }); describe('getNewMethods()', () => { it('should include card when card is available', () => { const methods = new PaymentMethods(status, [], 500, '', 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'card')).toBe(true); }); it('should not include card when card is not available', () => { status.Card = false; const methods = new PaymentMethods(status, [], 500, '', 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'card')).toBe(false); }); // tests for PayPal it('should include PayPal when PayPal is available', () => { const methods = new PaymentMethods(status, [], 500, '', 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'paypal')).toBe(true); }); it('should not include PayPal when PayPal is not available due to amount less than minimum', () => { const methods = new PaymentMethods(status, [], 50, '', 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'paypal')).toBe(false); }); it('should not include PayPal when already used as payment method', () => { const methods = new PaymentMethods( status, [ { ID: '1', Type: PAYMENT_METHOD_TYPES.PAYPAL, Order: 500, Details: { BillingAgreementID: 'BA-123', PayerID: '123', Payer: '123', }, }, ], 500, '', 'subscription' ); expect(methods.getNewMethods().some((method) => method.type === 'paypal')).toBe(false); }); it('should include Bitcoin when Bitcoin is available', () => { const methods = new PaymentMethods(status, [], 500, '', 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'bitcoin')).toBe(true); }); it('should not include Bitcoin when Bitcoin is not available due to coupon', () => { const methods = new PaymentMethods(status, [], 500, BLACK_FRIDAY.COUPON_CODE, 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'bitcoin')).toBe(false); }); it.each(['signup', 'human-verification'] as PaymentMethodFlows[])( 'should not include Bitcoin when Bitcoin is not available due to flow %s', (flow) => { const methods = new PaymentMethods(status, [], 500, '', flow); expect(methods.getNewMethods().some((method) => method.type === 'bitcoin')).toBe(false); } ); it('should not include bitcoin due to amount less than minimum', () => { const methods = new PaymentMethods(status, [], 50, '', 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'bitcoin')).toBe(false); }); it('should include Cash when Cash is available', () => { const methods = new PaymentMethods(status, [], 500, '', 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'cash')).toBe(true); }); it('should not include Cash when Cash is not available due to coupon', () => { const methods = new PaymentMethods(status, [], 500, BLACK_FRIDAY.COUPON_CODE, 'subscription'); expect(methods.getNewMethods().some((method) => method.type === 'cash')).toBe(false); }); it.each(['signup', 'signup-pass', 'human-verification'] as PaymentMethodFlows[])( 'should not include Cash when Cash is not available due to flow %s', (flow) => { const methods = new PaymentMethods(status, [], 500, '', flow); expect(methods.getNewMethods().some((method) => method.type === 'cash')).toBe(false); } ); }); describe('getUsedMethods()', () => { it('should return used methods: paypal and cards', () => { const methods = new PaymentMethods( status, [ { ID: '1', Type: PAYMENT_METHOD_TYPES.PAYPAL, Order: 500, Details: { BillingAgreementID: 'BA-123', PayerID: '123', Payer: '123', }, }, { ID: '2', Type: PAYMENT_METHOD_TYPES.CARD, Order: 501, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }, // one more card { ID: '3', Type: PAYMENT_METHOD_TYPES.CARD, Order: 502, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '11', ExpYear: '2031', ZIP: '12345', Country: 'US', Last4: '4242', Brand: 'Visa', }, }, ], 500, '', 'subscription' ); expect(methods.getUsedMethods().some((method) => method.type === 'paypal')).toBe(true); expect(methods.getUsedMethods().some((method) => method.value === '1')).toBe(true); expect(methods.getUsedMethods().filter((method) => method.type === 'card').length).toBe(2); expect(methods.getUsedMethods().some((method) => method.value === '2')).toBe(true); expect(methods.getUsedMethods().some((method) => method.value === '3')).toBe(true); }); }); describe('getAvailablePaymentMethods()', () => { it('should return combination of new and used methods', () => { const methods = new PaymentMethods( status, [ { ID: '1', Type: PAYMENT_METHOD_TYPES.PAYPAL, Order: 500, Details: { BillingAgreementID: 'BA-123', PayerID: '123', Payer: '123', }, }, { ID: '2', Type: PAYMENT_METHOD_TYPES.CARD, Order: 501, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }, // one more card { ID: '3', Type: PAYMENT_METHOD_TYPES.CARD, Order: 502, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '11', ExpYear: '2031', ZIP: '12345', Country: 'US', Last4: '4242', Brand: 'Visa', }, }, ], 500, '', 'subscription' ); const availableMethods = methods.getAvailablePaymentMethods(); expect(availableMethods.usedMethods.some((method) => method.type === 'paypal')).toBe(true); expect(availableMethods.usedMethods.some((method) => method.value === '1')).toBe(true); expect(availableMethods.usedMethods.filter((method) => method.type === 'card').length).toBe(2); expect(availableMethods.usedMethods.some((method) => method.value === '2')).toBe(true); expect(availableMethods.usedMethods.some((method) => method.value === '3')).toBe(true); // if paypal already saved, it can't be a new method too expect(availableMethods.methods.some((method) => method.type === 'paypal')).toBe(false); expect(availableMethods.methods.some((method) => method.type === 'card')).toBe(true); }); }); describe('getLastUsedMethod()', () => { it('should return last used method', () => { const methods = new PaymentMethods( status, [ { ID: '1', Type: PAYMENT_METHOD_TYPES.PAYPAL, Order: 500, Details: { BillingAgreementID: 'BA-123', PayerID: '123', Payer: '123', }, }, { ID: '2', Type: PAYMENT_METHOD_TYPES.CARD, Order: 501, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }, // one more card { ID: '3', Type: PAYMENT_METHOD_TYPES.CARD, Order: 502, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '11', ExpYear: '2031', ZIP: '12345', Country: 'US', Last4: '4242', Brand: 'Visa', }, }, ], 500, '', 'subscription' ); const lastUsedMethod = methods.getLastUsedMethod(); expect(lastUsedMethod).toEqual({ type: PAYMENT_METHOD_TYPES.PAYPAL, paymentMethodId: '1', value: '1', isSaved: true, isExpired: false, }); }); }); describe('getSavedMethodById()', () => { it('should return the correct saved method by id', () => { const methods = new PaymentMethods( status, [ { ID: '1', Type: PAYMENT_METHOD_TYPES.PAYPAL, Order: 500, Details: { BillingAgreementID: 'BA-123', PayerID: '123', Payer: '123', }, }, { ID: '2', Type: PAYMENT_METHOD_TYPES.CARD, Order: 501, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }, // one more card { ID: '3', Type: PAYMENT_METHOD_TYPES.CARD, Order: 502, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '11', ExpYear: '2031', ZIP: '12345', Country: 'US', Last4: '4242', Brand: 'Visa', }, }, ], 500, '', 'subscription' ); const savedMethod = methods.getSavedMethodById('2'); expect(savedMethod).toEqual({ ID: '2', Type: PAYMENT_METHOD_TYPES.CARD, Order: 501, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }); }); }); describe('initializePaymentMethods()', () => { it('should correctly initialize payment methods', async () => { const apiMock = jest.fn(); const paymentMethodStatus: PaymentMethodStatus = { Card: true, Paypal: true, Apple: true, Cash: true, Bitcoin: true, }; const paymentMethods: SavedPaymentMethod[] = [ { ID: '1', Type: PAYMENT_METHOD_TYPES.CARD, Order: 500, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }, ]; apiMock.mockImplementation(({ url }) => { if (url === queryPaymentMethods().url) { return { PaymentMethods: paymentMethods, }; } if (url === queryPaymentMethodStatus().url) { return paymentMethodStatus; } }); const methods = await initializePaymentMethods( apiMock, undefined, undefined, true, 500, 'coupon', 'subscription' as PaymentMethodFlows ); expect(methods).toBeDefined(); expect(methods.flow).toEqual('subscription'); expect(methods.amount).toEqual(500); expect(methods.coupon).toEqual('coupon'); expect(methods.getAvailablePaymentMethods().methods.length).toBeGreaterThan(0); }); it('should correctly initialize payment methods when user is not authenticated', async () => { const apiMock = jest.fn(); const paymentMethodStatus: PaymentMethodStatus = { Card: true, Paypal: true, Apple: true, Cash: true, Bitcoin: true, }; apiMock.mockImplementation(({ url }) => { if (url === queryPaymentMethodStatus().url) { return paymentMethodStatus; } }); const methods = await initializePaymentMethods( apiMock, undefined, undefined, false, 500, 'coupon', 'subscription' as PaymentMethodFlows ); expect(methods).toBeDefined(); expect(methods.flow).toEqual('subscription'); expect(methods.amount).toEqual(500); expect(methods.coupon).toEqual('coupon'); expect(methods.getAvailablePaymentMethods().methods.length).toBeGreaterThan(0); }); });
7,200
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/methods.ts
import { queryPaymentMethodStatus, queryPaymentMethods } from '@proton/shared/lib/api/payments'; import { BLACK_FRIDAY, MIN_BITCOIN_AMOUNT, MIN_PAYPAL_AMOUNT } from '@proton/shared/lib/constants'; import { Api } from '@proton/shared/lib/interfaces'; import { isExpired as getIsExpired } from './cardDetails'; import { PAYMENT_METHOD_TYPES } from './constants'; import { AvailablePaymentMethod, PaymentMethodFlows, PaymentMethodStatus, SavedPaymentMethod } from './interface'; export class PaymentMethods { public get amount(): number { return this._amount; } public set amount(value: number) { this._amount = value; } public get coupon(): string { return this._coupon; } public set coupon(value: string) { this._coupon = value; } public get flow(): PaymentMethodFlows { return this._flow; } public set flow(value: PaymentMethodFlows) { this._flow = value; } constructor( public paymentMethodStatus: PaymentMethodStatus, public paymentMethods: SavedPaymentMethod[], private _amount: number, private _coupon: string, private _flow: PaymentMethodFlows ) {} getAvailablePaymentMethods(): { usedMethods: AvailablePaymentMethod[]; methods: AvailablePaymentMethod[] } { const usedMethods = this.getUsedMethods(); const methods = this.getNewMethods(); return { usedMethods, methods, }; } /** * Formats the list of saved payment methods. It can be then used to render the list of payment methods. * Depending on your application, you might need to enrich the list with additional UI-specific information, e.g. * name of the payment method, or icon, etc. */ getUsedMethods(): AvailablePaymentMethod[] { const usedMethods: AvailablePaymentMethod[] = this.paymentMethods .filter((paymentMethod) => { const isExistingCard = paymentMethod.Type === PAYMENT_METHOD_TYPES.CARD && this.paymentMethodStatus.Card; const isExistingPaypal = paymentMethod.Type === PAYMENT_METHOD_TYPES.PAYPAL && this.paymentMethodStatus.Paypal; // Only Paypal and Card can be saved/used payment methods. // E.g. it's not possible to make Bitcoin/Cash a saved payment method. return isExistingCard || isExistingPaypal; }) .map((paymentMethod) => { const isExpired = paymentMethod.Type === PAYMENT_METHOD_TYPES.CARD ? getIsExpired(paymentMethod.Details) : false; const method: AvailablePaymentMethod = { type: paymentMethod.Type, paymentMethodId: paymentMethod.ID, value: paymentMethod.ID, isSaved: true, isExpired, }; return method; }); return usedMethods; } /** * @returns a list of new (i.e. non-saved) payment methods. Each method is individually checked for availability * and filtered out otherwise. The availability is controlled by the paymentMethodStatus object and by the selected * payment flow. */ getNewMethods(): AvailablePaymentMethod[] { const methods: AvailablePaymentMethod[] = [ { available: this.isCardAvailable(), type: PAYMENT_METHOD_TYPES.CARD, value: PAYMENT_METHOD_TYPES.CARD, isSaved: false, }, { available: this.isPaypalAvailable(), type: PAYMENT_METHOD_TYPES.PAYPAL, value: PAYMENT_METHOD_TYPES.PAYPAL, isSaved: false, }, { available: this.isBitcoinAvailable(), type: PAYMENT_METHOD_TYPES.BITCOIN, value: PAYMENT_METHOD_TYPES.BITCOIN, isSaved: false, }, { available: this.isCashAvailable(), type: PAYMENT_METHOD_TYPES.CASH, value: PAYMENT_METHOD_TYPES.CASH, isSaved: false, }, ] .filter(({ available }) => available) .map(({ type, value, isSaved }) => ({ type, value, isSaved })); return methods; } getLastUsedMethod(): AvailablePaymentMethod | undefined { const usedMethods = this.getUsedMethods(); return usedMethods.length ? usedMethods[0] : undefined; } getSavedMethodById(id: string): SavedPaymentMethod | undefined { return this.paymentMethods.find((paymentMethod) => paymentMethod.ID === id); } private isCashAvailable() { const isSignup = this.flow === 'signup' || this.flow === 'signup-pass' || this.flow === 'signup-vpn'; const isHumanVerification = this.flow === 'human-verification'; return !isSignup && !isHumanVerification && this.coupon !== BLACK_FRIDAY.COUPON_CODE; } private isBitcoinAvailable() { const isSignup = this.flow === 'signup' || this.flow === 'signup-vpn'; // for signup-pass, bitcoin IS available const isHumanVerification = this.flow === 'human-verification'; const isInvoice = this.flow === 'invoice'; return ( this.paymentMethodStatus.Bitcoin && !isSignup && !isHumanVerification && !isInvoice && this.coupon !== BLACK_FRIDAY.COUPON_CODE && this.amount >= MIN_BITCOIN_AMOUNT ); } private isPaypalAvailable() { const alreadyHasPayPal = this.paymentMethods.some(({ Type }) => Type === PAYMENT_METHOD_TYPES.PAYPAL); const isPaypalAmountValid = this.amount >= MIN_PAYPAL_AMOUNT; const isInvoice = this.flow === 'invoice'; return this.paymentMethodStatus.Paypal && !alreadyHasPayPal && (isPaypalAmountValid || isInvoice); } private isCardAvailable() { return this.paymentMethodStatus.Card; } } async function getPaymentMethods(api: Api): Promise<SavedPaymentMethod[]> { const response = await api<{ PaymentMethods: SavedPaymentMethod[] }>(queryPaymentMethods()); return response.PaymentMethods ?? []; } async function getPaymentMethodStatus(api: Api): Promise<PaymentMethodStatus> { return api<PaymentMethodStatus>(queryPaymentMethodStatus()); } /** * Initialize payment methods object. If user is authenticated, fetches saved payment methods. **/ export async function initializePaymentMethods( api: Api, maybePaymentMethodStatus: PaymentMethodStatus | undefined, maybePaymentMethods: SavedPaymentMethod[] | undefined, isAuthenticated: boolean, amount: number, coupon: string, flow: PaymentMethodFlows ) { const paymentMethodStatusPromise = maybePaymentMethodStatus ?? getPaymentMethodStatus(api); const paymentMethodsPromise = (() => { if (maybePaymentMethods) { return maybePaymentMethods; } if (isAuthenticated) { return getPaymentMethods(api); } return []; })(); const [paymentMethodStatus, paymentMethods] = await Promise.all([ paymentMethodStatusPromise, paymentMethodsPromise, ]); return new PaymentMethods(paymentMethodStatus, paymentMethods, amount, coupon, flow); }
7,201
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/utils.ts
import { PAYMENT_METHOD_TYPES } from './constants'; import { TokenPayment, TokenPaymentMethod } from './interface'; /** * Prepare parameters to be sent to API */ export const toTokenPaymentMethod = (Token: string): TokenPaymentMethod => { const Payment: TokenPayment = { Type: PAYMENT_METHOD_TYPES.TOKEN, Details: { Token, }, }; return { Payment, }; };
7,202
0
petrpan-code/ProtonMail/WebClients/packages/components/payments/core
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/payment-processors/cardPayment.test.ts
import { MOCK_TOKEN_RESPONSE, addTokensResolver, addTokensResponse, apiMock } from '@proton/testing'; import { CardModel, getDefaultCard } from '../cardDetails'; import { PAYMENT_METHOD_TYPES, PAYMENT_TOKEN_STATUS } from '../constants'; import { AmountAndCurrency, ChargeablePaymentToken, TokenPaymentMethod } from '../interface'; import { CardPaymentProcessor } from './cardPayment'; describe('CardPaymentProcessor', () => { let paymentProcessor: CardPaymentProcessor; const mockVerifyPayment = jest.fn(); const mockHandler = jest.fn(); const amountAndCurrency: AmountAndCurrency = { Amount: 1000, Currency: 'USD', }; const onTokenIsChargeable = jest.fn().mockResolvedValue(null); let mockCard: CardModel; beforeEach(() => { addTokensResponse(); paymentProcessor = new CardPaymentProcessor( mockVerifyPayment, apiMock, amountAndCurrency, false, onTokenIsChargeable ); mockCard = { number: '4111111111111111', month: '01', year: '32', cvc: '123', zip: '12345', country: 'US', }; paymentProcessor.updateState({ card: mockCard, }); }); afterEach(() => { jest.clearAllMocks(); }); it('should update state correctly', () => { const newState = { cardSubmitted: true }; paymentProcessor.updateState(newState); expect(paymentProcessor.cardSubmitted).toEqual(true); }); it('should call handler when state is updated', () => { paymentProcessor.onStateUpdated(mockHandler); const newState = { cardSubmitted: true }; paymentProcessor.updateState(newState); expect(mockHandler).toHaveBeenCalledWith({ cardSubmitted: true, }); }); it('should remove handler correctly by id', () => { const id = paymentProcessor.onStateUpdated(mockHandler); paymentProcessor.removeHandler(id); const newState = { cardSubmitted: true }; paymentProcessor.updateState(newState); expect(mockHandler).not.toHaveBeenCalled(); }); it('should remove handler correctly by handler', () => { paymentProcessor.onStateUpdated(mockHandler); paymentProcessor.removeHandler(mockHandler); const newState = { cardSubmitted: true }; paymentProcessor.updateState(newState); expect(mockHandler).not.toHaveBeenCalled(); }); it('should clear all handlers', () => { const otherHandler = jest.fn(); paymentProcessor.onStateUpdated(mockHandler); paymentProcessor.onStateUpdated(otherHandler); paymentProcessor.clearHandlers(); const newState = { cardSubmitted: true }; paymentProcessor.updateState(newState); expect(mockHandler).not.toHaveBeenCalled(); expect(otherHandler).not.toHaveBeenCalled(); }); it('should return ChargeablePaymentToken right away when amount is 0', async () => { paymentProcessor.amountAndCurrency = { Amount: 0, Currency: 'USD', }; await paymentProcessor.fetchPaymentToken(); const result = await paymentProcessor.verifyPaymentToken(); expect(result).toEqual({ type: PAYMENT_METHOD_TYPES.CARD, chargeable: true, ...paymentProcessor.amountAndCurrency, }); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should throw error when method is card but the data is not valid', async () => { paymentProcessor.updateState({ card: getDefaultCard() }); await expect(paymentProcessor.fetchPaymentToken()).rejects.toThrowError(); }); it('should call onTokenIsChargeable when amount is 0 upon returing the token', async () => { paymentProcessor.amountAndCurrency = { Amount: 0, Currency: 'USD', }; await paymentProcessor.fetchPaymentToken(); await paymentProcessor.verifyPaymentToken(); expect(onTokenIsChargeable).toHaveBeenCalled(); }); it('should call onTokenIsChargeable when amount is not 0 upon returing the token', async () => { const expectedResult: ChargeablePaymentToken = { type: PAYMENT_METHOD_TYPES.CARD, ...paymentProcessor.amountAndCurrency, chargeable: true, Payment: { Type: PAYMENT_METHOD_TYPES.TOKEN, Details: { Token: MOCK_TOKEN_RESPONSE.Token, }, }, }; await paymentProcessor.fetchPaymentToken(); const result = await paymentProcessor.verifyPaymentToken(); expect(result).toEqual(expectedResult); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should set processingPayment to false when preparePaymentToken throws error', async () => { const { reject } = addTokensResolver(); const promise = paymentProcessor.fetchPaymentToken(); const error = new Error('error'); reject(error); await expect(promise).rejects.toThrowError(error); }); it('should call mockVerifyPayment', async () => { addTokensResponse({ ...MOCK_TOKEN_RESPONSE, Status: PAYMENT_TOKEN_STATUS.STATUS_PENDING, ApprovalURL: 'https://verify.proton.me', ReturnHost: 'https://proton.me', } as any); const returnToken: TokenPaymentMethod = { Payment: { Type: PAYMENT_METHOD_TYPES.TOKEN, Details: { Token: 'token123', }, }, }; mockVerifyPayment.mockResolvedValue(returnToken); await paymentProcessor.fetchPaymentToken(); const result = await paymentProcessor.verifyPaymentToken(); expect(mockVerifyPayment).toHaveBeenCalledWith({ ApprovalURL: 'https://verify.proton.me', ReturnHost: 'https://proton.me', Token: 'token123', Payment: { Details: { CVC: '123', Country: 'US', ExpMonth: '01', ExpYear: '2032', Number: '4111111111111111', ZIP: '12345', }, Type: 'card', }, addCardMode: false, }); expect(result).toEqual( expect.objectContaining({ type: PAYMENT_METHOD_TYPES.CARD, chargeable: true, ...paymentProcessor.amountAndCurrency, Payment: { Details: { Token: 'token123', }, Type: 'token', }, }) ); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should throw when verifyPaymentToken is called before fetchPaymentToken', async () => { await expect(paymentProcessor.verifyPaymentToken()).rejects.toThrowError(); }); it('should throw when verification failed', async () => { addTokensResponse({ ...MOCK_TOKEN_RESPONSE, Status: PAYMENT_TOKEN_STATUS.STATUS_PENDING, ApprovalURL: 'https://verify.proton.me', ReturnHost: 'https://proton.me', } as any); const error = new Error('error'); mockVerifyPayment.mockRejectedValue(error); await paymentProcessor.fetchPaymentToken(); await expect(paymentProcessor.verifyPaymentToken()).rejects.toThrowError(error); }); it('should remove the payment token', async () => { await paymentProcessor.fetchPaymentToken(); expect(paymentProcessor.fetchedPaymentToken).toBeTruthy(); paymentProcessor.reset(); expect(paymentProcessor.fetchedPaymentToken).toEqual(null); }); });
7,203
0
petrpan-code/ProtonMail/WebClients/packages/components/payments/core
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/payment-processors/cardPayment.ts
import { Api } from '@proton/shared/lib/interfaces'; import { CardModel, getDefaultCard, isValid, toDetails } from '../cardDetails'; import { PAYMENT_METHOD_TYPES } from '../constants'; import { PaymentVerificator, createPaymentTokenForCard } from '../createPaymentToken'; import { AmountAndCurrency, ChargeablePaymentParameters, ChargeablePaymentToken, NonChargeablePaymentToken, TokenPaymentMethod, WrappedCardPayment, } from '../interface'; import { InvalidDataError, PaymentProcessor } from './paymentProcessor'; export interface CardPaymentProcessorState { cardSubmitted: boolean; card: CardModel; } export class InvalidCardDataError extends InvalidDataError { constructor(message?: string) { super(message); this.name = 'InvalidCardDataError'; // Flag to ignore this error and not send it to Sentry (this as any).ignore = true; } } export class CardPaymentProcessor extends PaymentProcessor<CardPaymentProcessorState> { public fetchedPaymentToken: ChargeablePaymentToken | NonChargeablePaymentToken | null = null; get cardSubmitted() { return this.state.cardSubmitted; } get card() { return this.state.card; } constructor( /** * A function that is called when the payment token is non-chargeable. It's supposed to be used to show the * confirmation dialog to the user. The function is supposed to return a promise that resolves when the user * confirms the payment, and rejects when the user cancels the payment. */ public verifyPayment: PaymentVerificator, public api: Api, /** * The payment token will be fetched for the specific amount and currency. Once it's changes, the * pre-fetched payment must be reset, and the token must be fetched again. */ amountAndCurrency: AmountAndCurrency, /** * If this flag is set to `true`, then the payment token will be fetched without specifying the amount and * currency. This is useful when you want to verify the card without charging it. For example, when user adds a * card to the account, but not use it right away for subscription or top-up. */ private verifyOnly: boolean, onTokenIsChargeable?: (data: ChargeablePaymentParameters) => Promise<unknown> ) { super( { card: getDefaultCard(), cardSubmitted: false, }, amountAndCurrency, onTokenIsChargeable ); } async fetchPaymentToken(): Promise<ChargeablePaymentToken | NonChargeablePaymentToken | null> { if (this.amountAndCurrency.Amount === 0 && !this.verifyOnly) { return null; } if (!this.handleCardSubmit()) { throw new InvalidCardDataError(); } this.fetchedPaymentToken = await createPaymentTokenForCard( this.getPaymentParameters(), this.api, this.verifyOnly ? undefined : this.amountAndCurrency ); return this.fetchedPaymentToken; } async verifyPaymentToken(): Promise<ChargeablePaymentParameters> { if (this.amountAndCurrency.Amount === 0 && !this.verifyOnly) { // The amount is 0, so there is no payment token to verify. // We can just return the payment parameters, and they can be charged right away. return this.tokenCreated(); } if (this.fetchedPaymentToken === null) { throw new Error('Payment token was not fetched. Please call fetchPaymentToken() first.'); } if (this.fetchedPaymentToken.chargeable) { // Is it already chargeable? Great! Then format it, mark is as ChargeablePaymentParameters, and return. return this.tokenCreated(this.fetchedPaymentToken); } // Otherwise, actually call the payment verificator. It will typically open a modal and/or a new tab // where user needs to confirm the payment. const token: TokenPaymentMethod = await this.verifyPayment({ Payment: this.getPaymentParameters().Payment, Token: this.fetchedPaymentToken.Payment.Details.Token, ApprovalURL: this.fetchedPaymentToken.approvalURL, ReturnHost: this.fetchedPaymentToken.returnHost, addCardMode: this.verifyOnly, }); return this.tokenCreated(token); } reset() { this.fetchedPaymentToken = null; } updateCardProperty(key: keyof CardModel, value: string) { this.updateState({ card: { ...this.state.card, [key]: value, }, }); } private handleCardSubmit(): boolean { if (this.amountAndCurrency.Amount === 0 && !this.verifyOnly) { return true; } this.updateState({ cardSubmitted: true }); if (isValid(this.state.card)) { return true; } return false; } private tokenCreated(token?: TokenPaymentMethod): ChargeablePaymentParameters { const result: ChargeablePaymentParameters = { type: PAYMENT_METHOD_TYPES.CARD, chargeable: true, ...this.amountAndCurrency, ...token, }; this.onTokenIsChargeable?.(result); return result; } private getPaymentParameters(): WrappedCardPayment { return { Payment: { Type: PAYMENT_METHOD_TYPES.CARD, Details: toDetails(this.state.card), }, } as WrappedCardPayment; } }
7,204
0
petrpan-code/ProtonMail/WebClients/packages/components/payments/core
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/payment-processors/paymentProcessor.test.ts
import { CardModel, getDefaultCard } from '../cardDetails'; import { AmountAndCurrency, ChargeablePaymentParameters, ChargeablePaymentToken, NonChargeablePaymentToken, } from '../interface'; import { PaymentProcessor } from './paymentProcessor'; class PaymentProcessorTest extends PaymentProcessor<{ card: CardModel }> { fetchPaymentToken(): Promise<ChargeablePaymentToken | NonChargeablePaymentToken> { throw new Error('Method not implemented.'); } verifyPaymentToken(): Promise<ChargeablePaymentParameters> { throw new Error('Method not implemented.'); } } describe('PaymentProcessor', () => { let paymentProcessor: PaymentProcessor<{ card: CardModel }>; const amountAndCurrency: AmountAndCurrency = { Amount: 1000, Currency: 'USD', }; const mockHandler = jest.fn(); beforeEach(() => { paymentProcessor = new PaymentProcessorTest({ card: getDefaultCard() }, amountAndCurrency); }); afterEach(() => { jest.clearAllMocks(); }); it('should call the handler when the state is updated', () => { paymentProcessor.onStateUpdated(mockHandler); const newState = { card: { ...getDefaultCard(), number: '4242424242424242' } }; paymentProcessor.updateState(newState); expect(mockHandler).toHaveBeenCalledWith(newState); }); it('should return an id when a new handler is added', () => { const id = paymentProcessor.onStateUpdated(mockHandler); expect(typeof id).toEqual('string'); }); it('should not call the handler when the state is updated after the processor was destroyed', () => { paymentProcessor.onStateUpdated(mockHandler); paymentProcessor.destroy(); const newState = { card: { ...getDefaultCard(), number: '4242424242424242' } }; paymentProcessor.updateState(newState); expect(mockHandler).not.toHaveBeenCalled(); }); it('should not call the handler when the state is updated after the handler was removed', () => { const id = paymentProcessor.onStateUpdated(mockHandler); paymentProcessor.removeHandler(id); const newState = { card: { ...getDefaultCard(), number: '4242424242424242' } }; paymentProcessor.updateState(newState); expect(mockHandler).not.toHaveBeenCalled(); }); it('should not call the handler when the state is updated after the handler was removed by handler instance', () => { paymentProcessor.onStateUpdated(mockHandler); paymentProcessor.removeHandler(mockHandler); const newState = { card: { ...getDefaultCard(), number: '4242424242424242' } }; paymentProcessor.updateState(newState); expect(mockHandler).not.toHaveBeenCalled(); }); });
7,205
0
petrpan-code/ProtonMail/WebClients/packages/components/payments/core
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/payment-processors/paymentProcessor.ts
import { AmountAndCurrency, ChargeablePaymentParameters, ChargeablePaymentToken, NonChargeablePaymentToken, } from '../interface'; type UpdateHandler<T> = (stateDiff: Partial<T>) => void; export class InvalidDataError extends Error { constructor(message?: string) { super(message); this.name = 'InvalidDataError'; } } /** * Base class for payment processors. Defines the interfaces for the most important methods: * `fetchPaymentToken` and `verifyPaymentToken`. Defines a concept of state, which is implemented by the subclasses. * Provides a set of common utils to update the state and subscribe to state changes. */ export abstract class PaymentProcessor<T = any> { private updatedHandlers: { id: string; handler: UpdateHandler<T>; }[] = []; constructor( protected state: T, public amountAndCurrency: AmountAndCurrency, public onTokenIsChargeable?: (data: ChargeablePaymentParameters) => Promise<unknown> ) {} /** * Fetches the payment token. That's a token generated by the Proton's backend. The return type acknowledges that * result might be different depending on the payment processor and input parameters. For example, if a credit card * doesn't require 3DS, then the token becomes chargeable right away. If it does, then the token is non-chargeable, * and requires user confirmation. Sometimes the token is even `null`, for example, if the Amount is 0. * That might be the case, for example, when user has enough credits to make subscription. */ abstract fetchPaymentToken(): Promise<ChargeablePaymentToken | NonChargeablePaymentToken | null>; /** * An important detail is that this method returns {@link ChargeablePaymentParameters} instead of * {@link ChargeablePaymentToken}. The crucial difference is that sometimes the payment parameters can be used for * a certain operation even without a payment token. Like in the case of a subscription when user alread has enough * credits. * This method is supposed to be called after `fetchPaymentToken`. */ abstract verifyPaymentToken(): Promise<ChargeablePaymentParameters>; updateState(state: Partial<T>) { this.state = { ...this.state, ...state }; for (const { handler } of this.updatedHandlers) { handler(state); } } onStateUpdated(handler: UpdateHandler<T>, { initial = false } = {}) { const id = Math.random().toString(36).slice(2, 11); this.updatedHandlers.push({ id, handler, }); if (initial) { handler(this.state); } return id; } destroy() { this.clearHandlers(); } clearHandlers() { this.updatedHandlers = []; } removeHandler(idOrHandler: string | UpdateHandler<T>) { if (typeof idOrHandler === 'string') { this.updatedHandlers = this.updatedHandlers.filter(({ id }) => id !== idOrHandler); } else { this.updatedHandlers = this.updatedHandlers.filter(({ handler }) => handler !== idOrHandler); } } }
7,206
0
petrpan-code/ProtonMail/WebClients/packages/components/payments/core
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/payment-processors/paypalPayment.test.ts
import { MAX_CREDIT_AMOUNT, MIN_PAYPAL_AMOUNT } from '@proton/shared/lib/constants'; import { MOCK_TOKEN_RESPONSE, addTokensResolver, addTokensResponse, apiMock } from '@proton/testing'; import { PAYMENT_METHOD_TYPES, PAYMENT_TOKEN_STATUS } from '../constants'; import { AmountAndCurrency, ChargeablePaymentParameters, TokenPaymentMethod } from '../interface'; import { PaypalPaymentProcessor, PaypalWrongAmountError } from './paypalPayment'; describe('PaypalPaymentProcessor', () => { let paymentProcessor: PaypalPaymentProcessor; const mockVerifyPayment = jest.fn(); const mockHandler = jest.fn(); const amountAndCurrency: AmountAndCurrency = { Amount: 1000, Currency: 'USD', }; const onTokenIsChargeable = jest.fn().mockResolvedValue(null); const isCredit = false; function resetPaymentProcessor() { paymentProcessor = new PaypalPaymentProcessor( mockVerifyPayment, apiMock, amountAndCurrency, isCredit, onTokenIsChargeable ); } beforeEach(() => { addTokensResponse(); resetPaymentProcessor(); }); afterEach(() => { jest.clearAllMocks(); }); it('should call handler when token is fetched', async () => { paymentProcessor.onStateUpdated(mockHandler); const result = await paymentProcessor.fetchPaymentToken(); expect(mockHandler).toHaveBeenCalledWith({ fetchedPaymentToken: expect.objectContaining({ Amount: 1000, Currency: 'USD', chargeable: true, type: PAYMENT_METHOD_TYPES.PAYPAL, Payment: expect.objectContaining({ Details: { Token: MOCK_TOKEN_RESPONSE.Token, }, }), }), }); expect(result).toEqual( expect.objectContaining({ Amount: 1000, Currency: 'USD', chargeable: true, type: PAYMENT_METHOD_TYPES.PAYPAL, Payment: expect.objectContaining({ Details: { Token: MOCK_TOKEN_RESPONSE.Token, }, }), }) ); }); it('should return ChargeablePaymentToken right away when amount is 0', async () => { paymentProcessor.amountAndCurrency = { Amount: 0, Currency: 'USD', }; await paymentProcessor.fetchPaymentToken(); const result = await paymentProcessor.verifyPaymentToken(); expect(result).toEqual( expect.objectContaining({ type: PAYMENT_METHOD_TYPES.PAYPAL, chargeable: true, ...paymentProcessor.amountAndCurrency, }) ); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should call onTokenIsChargeable when amount is 0 upon returing the token', async () => { paymentProcessor.amountAndCurrency = { Amount: 0, Currency: 'USD', }; await paymentProcessor.fetchPaymentToken(); await paymentProcessor.verifyPaymentToken(); expect(onTokenIsChargeable).toHaveBeenCalled(); }); it('should call onTokenIsChargeable when amount is not 0 upon returing the token', async () => { const expectedResult: ChargeablePaymentParameters = { type: PAYMENT_METHOD_TYPES.PAYPAL, ...paymentProcessor.amountAndCurrency, chargeable: true, Payment: { Type: PAYMENT_METHOD_TYPES.TOKEN, Details: { Token: MOCK_TOKEN_RESPONSE.Token, }, }, }; await paymentProcessor.fetchPaymentToken(); const result = await paymentProcessor.verifyPaymentToken(); expect(result).toEqual(expectedResult); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should set processingPayment to false when preparePaymentToken throws error', async () => { const { reject } = addTokensResolver(); const promise = paymentProcessor.fetchPaymentToken(); const error = new Error('error'); reject(error); await expect(promise).rejects.toThrowError(error); }); it('should call mockVerifyPayment', async () => { addTokensResponse({ ...MOCK_TOKEN_RESPONSE, Status: PAYMENT_TOKEN_STATUS.STATUS_PENDING, ApprovalURL: 'https://verify.proton.me', ReturnHost: 'https://proton.me', } as any); const returnToken: TokenPaymentMethod = { Payment: { Type: PAYMENT_METHOD_TYPES.TOKEN, Details: { Token: 'token123', }, }, }; mockVerifyPayment.mockResolvedValue(returnToken); await paymentProcessor.fetchPaymentToken(); const result = await paymentProcessor.verifyPaymentToken(); expect(mockVerifyPayment).toHaveBeenCalledWith({ ApprovalURL: 'https://verify.proton.me', Payment: { Type: PAYMENT_METHOD_TYPES.PAYPAL, }, ReturnHost: 'https://proton.me', Token: 'token123', }); expect(result).toEqual( expect.objectContaining({ type: PAYMENT_METHOD_TYPES.PAYPAL, chargeable: true, ...paymentProcessor.amountAndCurrency, Payment: { Details: { Token: 'token123', }, Type: 'token', }, }) ); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should throw error when verifyPaymentToken is called without token', async () => { await expect(paymentProcessor.verifyPaymentToken()).rejects.toThrowError('Payment token is not fetched'); }); it('should save the error and re-trhrow it', async () => { addTokensResponse({ ...MOCK_TOKEN_RESPONSE, Status: PAYMENT_TOKEN_STATUS.STATUS_PENDING, ApprovalURL: 'https://verify.proton.me', ReturnHost: 'https://proton.me', } as any); mockVerifyPayment.mockRejectedValue(new Error('some error')); await paymentProcessor.fetchPaymentToken(); await expect(paymentProcessor.verifyPaymentToken()).rejects.toThrowError('some error'); expect(paymentProcessor.verificationError).toEqual(new Error('Paypal payment verification failed')); }); it('should update disabled state depending on the amount', () => { paymentProcessor.setAmountAndCurrency({ Amount: 0, Currency: 'USD', }); expect(paymentProcessor.disabled).toBe(false); paymentProcessor.setAmountAndCurrency({ Amount: MIN_PAYPAL_AMOUNT - 1, Currency: 'USD', }); expect(paymentProcessor.disabled).toBe(true); paymentProcessor.setAmountAndCurrency({ Amount: MIN_PAYPAL_AMOUNT, Currency: 'USD', }); expect(paymentProcessor.disabled).toBe(false); paymentProcessor.setAmountAndCurrency({ Amount: (MAX_CREDIT_AMOUNT + MIN_PAYPAL_AMOUNT) / 2, Currency: 'EUR', }); expect(paymentProcessor.disabled).toBe(false); paymentProcessor.setAmountAndCurrency({ Amount: MAX_CREDIT_AMOUNT, Currency: 'USD', }); expect(paymentProcessor.disabled).toBe(false); paymentProcessor.setAmountAndCurrency({ Amount: MAX_CREDIT_AMOUNT + 1, Currency: 'USD', }); expect(paymentProcessor.disabled).toBe(true); }); it('should throw when amount is not in range', async () => { paymentProcessor.setAmountAndCurrency({ Amount: MIN_PAYPAL_AMOUNT - 1, Currency: 'USD', }); await expect(() => paymentProcessor.fetchPaymentToken()).rejects.toThrowError(PaypalWrongAmountError); resetPaymentProcessor(); paymentProcessor.setAmountAndCurrency({ Amount: MAX_CREDIT_AMOUNT + 1, Currency: 'USD', }); await expect(() => paymentProcessor.fetchPaymentToken()).rejects.toThrowError(PaypalWrongAmountError); resetPaymentProcessor(); paymentProcessor.setAmountAndCurrency({ Amount: MIN_PAYPAL_AMOUNT, Currency: 'USD', }); const token = await paymentProcessor.fetchPaymentToken(); expect(token).toBeDefined(); }); it('should throw when amount is not in range (no resets)', async () => { paymentProcessor.setAmountAndCurrency({ Amount: MIN_PAYPAL_AMOUNT - 1, Currency: 'USD', }); await expect(() => paymentProcessor.fetchPaymentToken()).rejects.toThrowError(PaypalWrongAmountError); paymentProcessor.setAmountAndCurrency({ Amount: MAX_CREDIT_AMOUNT + 1, Currency: 'USD', }); await expect(() => paymentProcessor.fetchPaymentToken()).rejects.toThrowError(PaypalWrongAmountError); paymentProcessor.setAmountAndCurrency({ Amount: MIN_PAYPAL_AMOUNT, Currency: 'USD', }); const token = await paymentProcessor.fetchPaymentToken(); expect(token).toBeDefined(); }); // It's necessary to keep the error to support the retry mechanism properly. // Properly means: when there is an error, the user sees Retry button. When the user clicks on it, // the error is removed and the token is fetched again. Very important: the token is NOT verified until user // clicks on the button the second time. This is because Safari doesn't allow to open a new window without // a synchronous user action handling. So, we need to wait for the user to click on the button and then // open the window. it('should remove token and KEEP error on reset()', () => { const error = new Error('error'); paymentProcessor.updateState({ fetchedPaymentToken: 'token' as any, verificationError: error, }); paymentProcessor.reset(); expect(paymentProcessor.fetchedPaymentToken).toBeNull(); expect(paymentProcessor.verificationError).toBe(error); }); });
7,207
0
petrpan-code/ProtonMail/WebClients/packages/components/payments/core
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/payment-processors/paypalPayment.ts
import { createToken } from '@proton/shared/lib/api/payments'; import { MAX_CREDIT_AMOUNT, MIN_CREDIT_AMOUNT, MIN_PAYPAL_AMOUNT } from '@proton/shared/lib/constants'; import { Api } from '@proton/shared/lib/interfaces'; import { PAYMENT_METHOD_TYPES } from '../constants'; import { PaymentVerificator, formatToken } from '../createPaymentToken'; import { AmountAndCurrency, ChargeablePaymentParameters, ChargeablePaymentToken, NonChargeablePaymentToken, TokenPaymentMethod, WrappedPaypalPayment, } from '../interface'; import { PaymentProcessor } from './paymentProcessor'; export type PaypalPaymentState = { fetchedPaymentToken: ChargeablePaymentToken | NonChargeablePaymentToken | null; verificationError: any; disabled: boolean; }; export class PaypalWrongAmountError extends Error {} export class PaypalPaymentProcessor extends PaymentProcessor<PaypalPaymentState> { get fetchedPaymentToken(): ChargeablePaymentToken | NonChargeablePaymentToken | null { return this.state.fetchedPaymentToken; } get verificationError(): any { return this.state.verificationError; } get disabled(): boolean { return this.state.disabled; } constructor( public verifyPayment: PaymentVerificator, public api: Api, amountAndCurrency: AmountAndCurrency, private isCredit: boolean, onTokenIsChargeable?: (data: ChargeablePaymentParameters) => Promise<unknown>, private ignoreAmountCheck?: boolean ) { super( { fetchedPaymentToken: null, verificationError: null, disabled: false, }, amountAndCurrency, onTokenIsChargeable ); } async fetchPaymentToken(): Promise<ChargeablePaymentToken | NonChargeablePaymentToken | null> { if (this.amountAndCurrency.Amount === 0) { return null; } this.reset(); this.updateState({ verificationError: null, }); const checkAmountResult = this.checkAmount(); if (!checkAmountResult.isInRange) { throw new PaypalWrongAmountError( `Amount should be between ${checkAmountResult.minAmount} and ${checkAmountResult.maxAmount}. The current amount is ${checkAmountResult.currentAmount}.` ); } let paypalToken; try { paypalToken = await this.api( createToken({ ...this.amountAndCurrency, Payment: { Type: this.getType(), }, }) ); } catch (error: any) { this.updateState({ verificationError: error, }); throw error; } const fetchedPaymentToken = formatToken(paypalToken, this.getType(), this.amountAndCurrency); this.updateState({ fetchedPaymentToken, }); return fetchedPaymentToken; } async verifyPaymentToken(): Promise<ChargeablePaymentParameters> { if (this.amountAndCurrency.Amount === 0) { return this.tokenCreated(); } if (this.fetchedPaymentToken === null) { throw new Error('Payment token is not fetched'); } if (this.fetchedPaymentToken.chargeable) { return this.tokenCreated(this.fetchedPaymentToken); } let token: TokenPaymentMethod; try { token = await this.verifyPayment({ Payment: this.getPaymentParameters().Payment, Token: this.fetchedPaymentToken.Payment.Details.Token, ApprovalURL: this.fetchedPaymentToken.approvalURL, ReturnHost: this.fetchedPaymentToken.returnHost, }); } catch (error: any) { this.updateState({ verificationError: new Error('Paypal payment verification failed'), }); throw error; } return this.tokenCreated(token); } reset() { this.updateState({ fetchedPaymentToken: null, }); } setAmountAndCurrency(amountAndCurrency: AmountAndCurrency) { this.amountAndCurrency = amountAndCurrency; const disabled = !this.checkAmount().isInRange; this.updateState({ disabled }); } private tokenCreated(token?: TokenPaymentMethod): ChargeablePaymentParameters { const result: ChargeablePaymentParameters = { type: this.getType(), chargeable: true, ...this.amountAndCurrency, ...token, }; this.onTokenIsChargeable?.(result); return result; } private getPaymentParameters(): WrappedPaypalPayment { return { Payment: { Type: this.getType(), }, } as WrappedPaypalPayment; } private getType(): PAYMENT_METHOD_TYPES.PAYPAL | PAYMENT_METHOD_TYPES.PAYPAL_CREDIT { return this.isCredit ? PAYMENT_METHOD_TYPES.PAYPAL_CREDIT : PAYMENT_METHOD_TYPES.PAYPAL; } private checkAmount() { const isInRange = (this.amountAndCurrency.Amount >= MIN_PAYPAL_AMOUNT && this.amountAndCurrency.Amount <= MAX_CREDIT_AMOUNT) || // 0 is allowed because in this case we don't need to fetch token this.amountAndCurrency.Amount === 0 || this.ignoreAmountCheck; return { isInRange, currentAmount: this.amountAndCurrency.Amount, minAmount: MIN_CREDIT_AMOUNT, maxAmount: MAX_CREDIT_AMOUNT, }; } }
7,208
0
petrpan-code/ProtonMail/WebClients/packages/components/payments/core
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/payment-processors/savedPayment.test.ts
import { MOCK_TOKEN_RESPONSE, addTokensResponse, apiMock } from '@proton/testing'; import { PAYMENT_METHOD_TYPES, PAYMENT_TOKEN_STATUS } from '../constants'; import { AmountAndCurrency, Autopay, SavedPaymentMethod, TokenPaymentMethod } from '../interface'; import { SavedPaymentProcessor } from './savedPayment'; describe('SavedPaymentProcessor', () => { let savedPaymentProcessor: SavedPaymentProcessor; const mockVerifyPayment = jest.fn(); const amountAndCurrency: AmountAndCurrency = { Amount: 1000, Currency: 'USD', }; const onTokenIsChargeable = jest.fn().mockResolvedValue(null); const savedMethod: SavedPaymentMethod = { ID: '123', Type: PAYMENT_METHOD_TYPES.CARD, Order: 500, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2032', ZIP: '12345', Country: 'US', Last4: '4242', Brand: 'Visa', }, }; beforeEach(() => { addTokensResponse(); savedPaymentProcessor = new SavedPaymentProcessor( mockVerifyPayment, apiMock, amountAndCurrency, savedMethod, onTokenIsChargeable ); }); afterEach(() => { jest.clearAllMocks(); }); it('should fetch payment token', async () => { const result = await savedPaymentProcessor.fetchPaymentToken(); expect(result).toEqual({ Amount: 1000, Currency: 'USD', Payment: { Details: { Token: 'token123' }, Type: 'token' }, chargeable: true, type: 'card', }); }); it('should throw error when verifyPaymentToken is called before fetchPaymentToken', async () => { await expect(savedPaymentProcessor.verifyPaymentToken()).rejects.toThrowError( 'Payment token was not fetched. Please call fetchPaymentToken() first.' ); }); it('should return ChargeablePaymentToken when amount is 0', async () => { savedPaymentProcessor.amountAndCurrency = { Amount: 0, Currency: 'USD', }; await savedPaymentProcessor.fetchPaymentToken(); const result = await savedPaymentProcessor.verifyPaymentToken(); expect(result).toEqual( expect.objectContaining({ type: PAYMENT_METHOD_TYPES.CARD, chargeable: true, ...savedPaymentProcessor.amountAndCurrency, }) ); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should return ChargeablePaymentToken when fetchedPaymentToken is chargeable', async () => { await savedPaymentProcessor.fetchPaymentToken(); const result = await savedPaymentProcessor.verifyPaymentToken(); expect(result).toEqual( expect.objectContaining({ type: PAYMENT_METHOD_TYPES.CARD, chargeable: true, ...savedPaymentProcessor.amountAndCurrency, Payment: { Details: { Token: 'token123', }, Type: 'token', }, }) ); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should call verifyPayment when fetchedPaymentToken is not chargeable', async () => { addTokensResponse({ ...MOCK_TOKEN_RESPONSE, Status: PAYMENT_TOKEN_STATUS.STATUS_PENDING, ApprovalURL: 'https://verify.proton.me', ReturnHost: 'https://proton.me', } as any); const returnToken: TokenPaymentMethod = { Payment: { Type: PAYMENT_METHOD_TYPES.TOKEN, Details: { Token: 'token123', }, }, }; mockVerifyPayment.mockResolvedValue(returnToken); await savedPaymentProcessor.fetchPaymentToken(); const result = await savedPaymentProcessor.verifyPaymentToken(); expect(mockVerifyPayment).toHaveBeenCalledWith({ ApprovalURL: 'https://verify.proton.me', ReturnHost: 'https://proton.me', Token: 'token123', }); expect(result).toEqual( expect.objectContaining({ type: PAYMENT_METHOD_TYPES.CARD, chargeable: true, ...savedPaymentProcessor.amountAndCurrency, Payment: { Details: { Token: 'token123', }, Type: 'token', }, }) ); expect(onTokenIsChargeable).toHaveBeenCalledWith(result); }); it('should throw when verifyPaymentToken throws', async () => { addTokensResponse({ ...MOCK_TOKEN_RESPONSE, Status: PAYMENT_TOKEN_STATUS.STATUS_PENDING, ApprovalURL: 'https://verify.proton.me', ReturnHost: 'https://proton.me', } as any); mockVerifyPayment.mockRejectedValue(new Error('error')); await savedPaymentProcessor.fetchPaymentToken(); await expect(savedPaymentProcessor.verifyPaymentToken()).rejects.toThrowError('error'); }); it('should reset payment token', async () => { await savedPaymentProcessor.fetchPaymentToken(); expect(savedPaymentProcessor.fetchedPaymentToken).toBeTruthy(); savedPaymentProcessor.reset(); expect(savedPaymentProcessor.fetchedPaymentToken).toEqual(null); }); });
7,209
0
petrpan-code/ProtonMail/WebClients/packages/components/payments/core
petrpan-code/ProtonMail/WebClients/packages/components/payments/core/payment-processors/savedPayment.ts
import { Api } from '@proton/shared/lib/interfaces'; import { PAYMENT_METHOD_TYPES } from '../constants'; import { PaymentVerificator, createPaymentTokenForExistingPayment } from '../createPaymentToken'; import { AmountAndCurrency, ChargeablePaymentParameters, ChargeablePaymentToken, NonChargeablePaymentToken, SavedPaymentMethod, TokenPaymentMethod, } from '../interface'; import { PaymentProcessor } from './paymentProcessor'; interface SavedPaymentState { method: { paymentMethodId: string; type: PAYMENT_METHOD_TYPES.CARD | PAYMENT_METHOD_TYPES.PAYPAL; }; } export class SavedPaymentProcessor extends PaymentProcessor<SavedPaymentState> { public fetchedPaymentToken: ChargeablePaymentToken | NonChargeablePaymentToken | null = null; constructor( public verifyPayment: PaymentVerificator, public api: Api, amountAndCurrency: AmountAndCurrency, savedMethod: SavedPaymentMethod, onTokenIsChargeable?: (data: ChargeablePaymentParameters) => Promise<unknown> ) { super( { method: { paymentMethodId: savedMethod.ID, type: savedMethod.Type, }, }, amountAndCurrency, onTokenIsChargeable ); } async fetchPaymentToken(): Promise<ChargeablePaymentToken | NonChargeablePaymentToken | null> { if (this.amountAndCurrency.Amount === 0) { return null; } this.fetchedPaymentToken = await createPaymentTokenForExistingPayment( this.state.method.paymentMethodId, this.state.method.type, this.api, this.amountAndCurrency ); return this.fetchedPaymentToken; } async verifyPaymentToken(): Promise<ChargeablePaymentParameters> { if (this.amountAndCurrency.Amount === 0) { return this.tokenCreated(); } if (this.fetchedPaymentToken === null) { throw new Error('Payment token was not fetched. Please call fetchPaymentToken() first.'); } if (this.fetchedPaymentToken.chargeable) { return this.tokenCreated(this.fetchedPaymentToken); } const token: TokenPaymentMethod = await this.verifyPayment({ Token: this.fetchedPaymentToken.Payment.Details.Token, ApprovalURL: this.fetchedPaymentToken.approvalURL, ReturnHost: this.fetchedPaymentToken.returnHost, }); return this.tokenCreated(token); } updateSavedMethod(savedMethod: SavedPaymentMethod) { this.state.method = { paymentMethodId: savedMethod.ID, type: savedMethod.Type, }; } reset() { this.fetchedPaymentToken = null; } private tokenCreated(token?: TokenPaymentMethod): ChargeablePaymentParameters { const result: ChargeablePaymentParameters = { type: PAYMENT_METHOD_TYPES.CARD, chargeable: true, ...this.amountAndCurrency, ...token, }; this.onTokenIsChargeable?.(result); return result; } }
7,210
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/index.ts
export * from './useMethods'; export * from './usePaymentFacade'; export { useCard } from './useCard';
7,211
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/interface.ts
import { ChargeablePaymentParameters, PaymentProcessor } from '../core'; export interface PaymentProcessorHook { fetchPaymentToken: () => Promise<unknown>; fetchingToken: boolean; verifyPaymentToken: () => Promise<ChargeablePaymentParameters>; verifyingToken: boolean; paymentProcessor?: PaymentProcessor; processPaymentToken: () => Promise<ChargeablePaymentParameters>; processingToken: boolean; meta: { type: 'paypal' | 'paypal-credit' | 'card' | 'saved'; data?: any; }; }
7,212
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/useCard.test.ts
import { renderHook } from '@testing-library/react-hooks'; import { addTokensResponse, apiMock } from '@proton/testing'; import { InvalidCardDataError, PAYMENT_TOKEN_STATUS } from '../core'; import { Props, useCard } from './useCard'; const mockVerifyPayment = jest.fn(); const onChargeableMock = jest.fn(); beforeEach(() => { jest.clearAllMocks(); addTokensResponse(); }); it('should render', () => { const { result } = renderHook(() => useCard( { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock }, { api: apiMock, verifyPayment: mockVerifyPayment } ) ); expect(result.current).toBeDefined(); expect(result.current.meta.type).toEqual('card'); }); it('should return empty card by default', () => { const { result } = renderHook(() => useCard( { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock }, { api: apiMock, verifyPayment: mockVerifyPayment } ) ); expect(result.current.card).toEqual({ number: '', month: '', year: '', cvc: '', zip: '', country: 'US', }); expect(result.current.submitted).toEqual(false); expect(result.current.errors).toEqual({}); }); it('should have false loading statuses by default', () => { const { result } = renderHook(() => useCard( { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock }, { api: apiMock, verifyPayment: mockVerifyPayment } ) ); expect(result.current.fetchingToken).toEqual(false); expect(result.current.verifyingToken).toEqual(false); expect(result.current.processingToken).toEqual(false); }); it('should update amount and currency', () => { const { result, rerender } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock }, } ); expect(result.current.paymentProcessor.amountAndCurrency).toEqual({ Amount: 100, Currency: 'USD' }); rerender({ amountAndCurrency: { Amount: 200, Currency: 'EUR' }, onChargeable: onChargeableMock }); expect(result.current.paymentProcessor.amountAndCurrency).toEqual({ Amount: 200, Currency: 'EUR' }); }); it('should not update the processor between renders', () => { const { result, rerender } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock }, } ); const processor = result.current.paymentProcessor; rerender({ amountAndCurrency: { Amount: 200, Currency: 'EUR' }, onChargeable: onChargeableMock }); expect(result.current.paymentProcessor).toEqual(processor); }); it('should destroy the processor on unmount', () => { const { result, unmount } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock }, } ); const processor = result.current.paymentProcessor; processor.destroy = jest.fn(); unmount(); expect(processor.destroy).toHaveBeenCalled(); }); it('should throw an error if there are no card details and user wants to fetch token', async () => { const { result } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock }, } ); await expect(result.current.fetchPaymentToken()).rejects.toThrowError(InvalidCardDataError); }); it('should fetch payment token when the card is provided initially', async () => { const { result } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock, initialCard: { number: '4242424242424242', month: '12', year: '2032', cvc: '123', zip: '12345', country: 'US', }, }, } ); expect(result.current.fetchingToken).toEqual(false); expect(result.current.verifyingToken).toEqual(false); expect(result.current.processingToken).toEqual(false); const tokenPromise = result.current.fetchPaymentToken(); expect(result.current.fetchingToken).toEqual(true); expect(result.current.verifyingToken).toEqual(false); expect(result.current.processingToken).toEqual(true); const token = await tokenPromise; expect(result.current.fetchingToken).toEqual(false); expect(result.current.verifyingToken).toEqual(false); expect(result.current.processingToken).toEqual(false); expect(token).toEqual({ Amount: 100, Currency: 'USD', Payment: { Type: 'token', Details: { Token: 'token123', }, }, chargeable: true, type: 'card', }); }); it('should fetch payment token when the card is provided later', async () => { const { result } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock, }, } ); expect(result.current.fetchingToken).toEqual(false); result.current.setCardProperty('number', '4242424242424242'); result.current.setCardProperty('month', '12'); result.current.setCardProperty('year', '2032'); result.current.setCardProperty('cvc', '123'); result.current.setCardProperty('zip', '12345'); result.current.setCardProperty('country', 'US'); expect(result.current.fetchingToken).toEqual(false); const tokenPromise = result.current.fetchPaymentToken(); expect(result.current.fetchingToken).toEqual(true); const token = await tokenPromise; expect(result.current.fetchingToken).toEqual(false); expect(token).toEqual({ Amount: 100, Currency: 'USD', Payment: { Type: 'token', Details: { Token: 'token123', }, }, chargeable: true, type: 'card', }); }); it('should reset payment token when currency or amount is changed', async () => { const { result, rerender } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock, }, } ); result.current.setCardProperty('number', '4242424242424242'); result.current.setCardProperty('month', '12'); result.current.setCardProperty('year', '2032'); result.current.setCardProperty('cvc', '123'); result.current.setCardProperty('zip', '12345'); result.current.setCardProperty('country', 'US'); await result.current.fetchPaymentToken(); expect(result.current.paymentProcessor.fetchedPaymentToken).toEqual({ Amount: 100, Currency: 'USD', Payment: { Type: 'token', Details: { Token: 'token123', }, }, chargeable: true, type: 'card', }); rerender({ amountAndCurrency: { Amount: 200, Currency: 'EUR' }, onChargeable: onChargeableMock, }); expect(result.current.paymentProcessor.fetchedPaymentToken).toEqual(null); }); it('should verify the payment token', async () => { addTokensResponse().pending(); mockVerifyPayment.mockResolvedValue({ Payment: { Type: 'token', Details: { Token: 'token123', }, }, }); const { result } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock, }, } ); result.current.setCardProperty('number', '4242424242424242'); result.current.setCardProperty('month', '12'); result.current.setCardProperty('year', '2032'); result.current.setCardProperty('cvc', '123'); result.current.setCardProperty('zip', '12345'); result.current.setCardProperty('country', 'US'); await result.current.fetchPaymentToken(); expect(result.current.paymentProcessor.fetchedPaymentToken).toEqual({ Amount: 100, Currency: 'USD', Payment: { Type: 'token', Details: { Token: 'token123', }, }, chargeable: false, type: 'card', status: PAYMENT_TOKEN_STATUS.STATUS_PENDING, approvalURL: 'https://verify.proton.me', returnHost: 'https://account.proton.me', }); const verifyPromise = result.current.verifyPaymentToken(); expect(result.current.fetchingToken).toEqual(false); expect(result.current.verifyingToken).toEqual(true); expect(result.current.processingToken).toEqual(true); const verifiedToken = await verifyPromise; expect(verifiedToken).toEqual({ Payment: { Type: 'token', Details: { Token: 'token123', }, }, Amount: 100, Currency: 'USD', chargeable: true, type: 'card', }); }); it('should throw an error during token processing if verification failed', async () => { addTokensResponse().pending(); mockVerifyPayment.mockRejectedValue(new Error('Verification failed')); const { result } = renderHook( (props: Props) => useCard(props, { api: apiMock, verifyPayment: mockVerifyPayment }), { initialProps: { amountAndCurrency: { Amount: 100, Currency: 'USD' }, onChargeable: onChargeableMock, }, } ); result.current.setCardProperty('number', '4242424242424242'); result.current.setCardProperty('month', '12'); result.current.setCardProperty('year', '2032'); result.current.setCardProperty('cvc', '123'); result.current.setCardProperty('zip', '12345'); result.current.setCardProperty('country', 'US'); await result.current.fetchPaymentToken(); expect(result.current.paymentProcessor.fetchedPaymentToken).toEqual({ Amount: 100, Currency: 'USD', Payment: { Type: 'token', Details: { Token: 'token123', }, }, chargeable: false, type: 'card', status: PAYMENT_TOKEN_STATUS.STATUS_PENDING, approvalURL: 'https://verify.proton.me', returnHost: 'https://account.proton.me', }); const processPromise = result.current.processPaymentToken(); expect(result.current.fetchingToken).toEqual(false); expect(result.current.verifyingToken).toEqual(true); expect(result.current.processingToken).toEqual(true); await expect(processPromise).rejects.toThrow('Verification failed'); expect(result.current.fetchingToken).toEqual(false); expect(result.current.verifyingToken).toEqual(false); expect(result.current.processingToken).toEqual(false); expect(result.current.paymentProcessor.fetchedPaymentToken).toEqual(null); });
7,213
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/useCard.ts
import { useEffect, useState } from 'react'; import { useLoading } from '@proton/hooks'; import { Api } from '@proton/shared/lib/interfaces'; import noop from '@proton/utils/noop'; import { AmountAndCurrency, CardModel, CardPaymentProcessor, CardPaymentProcessorState, ChargeablePaymentParameters, PaymentVerificator, getErrors, } from '../core'; import { PaymentProcessorHook } from './interface'; import { usePaymentProcessor } from './usePaymentProcessor'; export type CardFieldStatus = { number: boolean; month: boolean; year: boolean; cvc: boolean; zip: boolean; country: boolean; }; export const getInitialFieldStatus = (): CardFieldStatus => ({ number: true, month: true, year: true, cvc: true, zip: true, country: true, }); export interface Dependencies { api: Api; verifyPayment: PaymentVerificator; } export interface Props { amountAndCurrency: AmountAndCurrency; initialCard?: CardModel; onChargeable?: (data: ChargeablePaymentParameters) => Promise<unknown>; verifyOnly?: boolean; } export type CardProcessorHook = PaymentProcessorHook & { card: CardModel; fieldsStatus: CardFieldStatus; setCardProperty: (key: keyof CardModel, value: any) => void; paymentProcessor: CardPaymentProcessor; errors: Record<string, string>; submitted: boolean; }; /** * React wrapper for {@link CardPaymentProcessor}. It provides a set of proxies and also some additional functionality * like `processPaymentToken` method that supposed to be the main action. It also provides some data usefull for * the credit card component. */ export const useCard = ( { amountAndCurrency, initialCard, verifyOnly, onChargeable }: Props, { api, verifyPayment }: Dependencies ): CardProcessorHook => { const [errors, setErrors] = useState<Record<string, string>>({}); const [submitted, setSubmitted] = useState(false); const paymentProcessor = usePaymentProcessor( () => new CardPaymentProcessor(verifyPayment, api, amountAndCurrency, !!verifyOnly, onChargeable) ); const [card, setCard] = useState(paymentProcessor.card); const [fetchingToken, withFetchingToken] = useLoading(); const [verifyingToken, withVerifyingToken] = useLoading(); const processingToken = fetchingToken || verifyingToken; useEffect(() => { paymentProcessor.onTokenIsChargeable = onChargeable; }, [onChargeable]); useEffect(() => { paymentProcessor.amountAndCurrency = amountAndCurrency; paymentProcessor.reset(); }, [amountAndCurrency]); useEffect(() => { if (initialCard) { paymentProcessor.updateState({ card: initialCard }); } const setters: Record<keyof CardPaymentProcessorState, (...args: any[]) => any> = { card: setCard, cardSubmitted: setSubmitted, }; paymentProcessor.onStateUpdated( (updatedProperties) => { for (const [key, value] of Object.entries(updatedProperties)) { const setter = setters[key as keyof CardPaymentProcessorState]; setter?.(value); } setErrors(getErrors(paymentProcessor.card)); }, { initial: true, } ); return () => paymentProcessor.destroy(); }, []); const reset = () => paymentProcessor.reset(); const fetchPaymentToken = async () => withFetchingToken(paymentProcessor.fetchPaymentToken()); const verifyPaymentToken = () => { const tokenPromise = paymentProcessor.verifyPaymentToken(); withVerifyingToken(tokenPromise).catch(noop); return tokenPromise; }; const processPaymentToken = async () => { if (!paymentProcessor.fetchedPaymentToken) { await fetchPaymentToken(); } try { return await verifyPaymentToken(); } catch (error) { reset(); throw error; } }; const fields = Object.keys(errors) as (keyof CardModel)[]; const fieldsStatus: CardFieldStatus = getInitialFieldStatus(); for (const field of fields) { fieldsStatus[field] = !errors[field]; } return { fetchPaymentToken, fetchingToken, verifyPaymentToken, verifyingToken, card, setCardProperty: (key: keyof CardModel, value: any) => paymentProcessor.updateCardProperty(key, value), errors: submitted ? errors : {}, fieldsStatus, submitted, paymentProcessor, processPaymentToken, processingToken, meta: { type: 'card', }, }; };
7,214
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/useMethods.test.ts
import { renderHook } from '@testing-library/react-hooks'; import { queryPaymentMethodStatus, queryPaymentMethods } from '@proton/shared/lib/api/payments'; import { wait } from '@proton/shared/lib/helpers/promise'; import { addApiMock, apiMock } from '@proton/testing'; import { Autopay, PAYMENT_METHOD_TYPES, PaymentMethodStatus, SavedPaymentMethod } from '../core'; import { useMethods } from './useMethods'; let paymentMethodStatus: PaymentMethodStatus; let paymentMethods: SavedPaymentMethod[] = [ { ID: '1', Type: PAYMENT_METHOD_TYPES.CARD, Order: 500, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }, ]; beforeEach(() => { jest.clearAllMocks(); paymentMethodStatus = { Card: true, Paypal: true, Apple: true, Cash: true, Bitcoin: true, }; addApiMock(queryPaymentMethods().url, () => ({ PaymentMethods: paymentMethods, })); addApiMock(queryPaymentMethodStatus().url, () => paymentMethodStatus); }); it('should render', () => { const { result } = renderHook(() => useMethods( { paymentMethodStatus, amount: 100, flow: 'credit', }, { api: apiMock, isAuthenticated: true, } ) ); expect(result.current).toBeDefined(); }); it('should initialize payment methods', async () => { const { result, waitForNextUpdate } = renderHook(() => useMethods( { paymentMethodStatus, amount: 1000, flow: 'credit', }, { api: apiMock, isAuthenticated: true, } ) ); expect(result.current.loading).toBe(true); await waitForNextUpdate(); expect(result.current.loading).toBe(false); expect(result.current.savedMethods).toEqual(paymentMethods); expect(result.current.selectedMethod).toEqual({ isExpired: false, isSaved: true, paymentMethodId: '1', type: PAYMENT_METHOD_TYPES.CARD, value: '1', }); expect(result.current.savedSelectedMethod).toEqual({ ID: '1', Type: PAYMENT_METHOD_TYPES.CARD, Order: 500, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }); expect(result.current.status).toEqual(paymentMethodStatus); expect(result.current.isNewPaypal).toBe(false); expect(result.current.usedMethods).toEqual([ { isExpired: false, isSaved: true, paymentMethodId: '1', type: PAYMENT_METHOD_TYPES.CARD, value: '1', }, ]); expect(result.current.newMethods).toEqual([ { isSaved: false, type: PAYMENT_METHOD_TYPES.CARD, value: PAYMENT_METHOD_TYPES.CARD, }, { isSaved: false, type: PAYMENT_METHOD_TYPES.PAYPAL, value: PAYMENT_METHOD_TYPES.PAYPAL, }, { isSaved: false, type: PAYMENT_METHOD_TYPES.BITCOIN, value: PAYMENT_METHOD_TYPES.BITCOIN, }, { isSaved: false, type: PAYMENT_METHOD_TYPES.CASH, value: PAYMENT_METHOD_TYPES.CASH, }, ]); expect(result.current.lastUsedMethod).toEqual({ isExpired: false, isSaved: true, paymentMethodId: '1', type: PAYMENT_METHOD_TYPES.CARD, value: '1', }); }); it('should update methods when amount changes', async () => { const { result, waitForNextUpdate, rerender } = renderHook( ({ amount }) => useMethods( { paymentMethodStatus, amount, flow: 'credit', }, { api: apiMock, isAuthenticated: true, } ), { initialProps: { amount: 1000, }, } ); await waitForNextUpdate(); expect(result.current.loading).toBe(false); expect(result.current.savedMethods).toEqual(paymentMethods); rerender({ amount: 100, }); expect(result.current.newMethods).toEqual([ { isSaved: false, type: PAYMENT_METHOD_TYPES.CARD, value: PAYMENT_METHOD_TYPES.CARD, }, { isSaved: false, type: PAYMENT_METHOD_TYPES.CASH, value: PAYMENT_METHOD_TYPES.CASH, }, ]); }); it('should get saved method by its ID', async () => { const { result, waitForNextUpdate } = renderHook(() => useMethods( { paymentMethodStatus, amount: 1000, flow: 'credit', }, { api: apiMock, isAuthenticated: true, } ) ); await waitForNextUpdate(); expect(result.current.getSavedMethodByID('1')).toEqual({ ID: '1', Type: PAYMENT_METHOD_TYPES.CARD, Order: 500, Autopay: Autopay.ENABLE, Details: { Name: 'Arthur Morgan', ExpMonth: '12', ExpYear: '2030', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, }); }); it('should set selected method', async () => { const { result, waitForNextUpdate } = renderHook(() => useMethods( { paymentMethodStatus, amount: 1000, flow: 'credit', }, { api: apiMock, isAuthenticated: true, } ) ); await waitForNextUpdate(); expect(result.current.selectedMethod).toEqual({ isExpired: false, isSaved: true, paymentMethodId: '1', type: PAYMENT_METHOD_TYPES.CARD, value: '1', }); result.current.selectMethod('card'); expect(result.current.selectedMethod).toEqual({ isSaved: false, type: PAYMENT_METHOD_TYPES.CARD, value: PAYMENT_METHOD_TYPES.CARD, }); expect(result.current.savedSelectedMethod).toEqual(undefined); expect(result.current.isNewPaypal).toBe(false); result.current.selectMethod('paypal'); expect(result.current.selectedMethod).toEqual({ isSaved: false, type: PAYMENT_METHOD_TYPES.PAYPAL, value: PAYMENT_METHOD_TYPES.PAYPAL, }); expect(result.current.savedSelectedMethod).toEqual(undefined); expect(result.current.isNewPaypal).toBe(true); }); it('should update amount correctly even if the initialization is slow', async () => { addApiMock(queryPaymentMethodStatus().url, async () => { await wait(100); return paymentMethodStatus; }); const { result, waitForNextUpdate, rerender } = renderHook( ({ amount }) => useMethods( { paymentMethodStatus, amount, flow: 'credit', }, { api: apiMock, isAuthenticated: true, } ), { initialProps: { amount: 0, }, } ); expect(result.current.loading).toBe(true); rerender({ amount: 10000, }); await waitForNextUpdate(); expect(result.current.loading).toBe(false); expect(result.current.newMethods.length).toBe(4); expect(result.current.newMethods).toEqual([ { isSaved: false, type: PAYMENT_METHOD_TYPES.CARD, value: PAYMENT_METHOD_TYPES.CARD, }, { isSaved: false, type: PAYMENT_METHOD_TYPES.PAYPAL, value: PAYMENT_METHOD_TYPES.PAYPAL, }, { isSaved: false, type: PAYMENT_METHOD_TYPES.BITCOIN, value: PAYMENT_METHOD_TYPES.BITCOIN, }, { isSaved: false, type: PAYMENT_METHOD_TYPES.CASH, value: PAYMENT_METHOD_TYPES.CASH, }, ]); });
7,215
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/useMethods.ts
import { useEffect, useRef, useState } from 'react'; import { Api } from '@proton/shared/lib/interfaces'; import { AvailablePaymentMethod, PAYMENT_METHOD_TYPES, PaymentMethodFlows, PaymentMethodStatus, PaymentMethodType, PaymentMethods, SavedPaymentMethod, initializePaymentMethods, isExistingPaymentMethod, } from '../core'; export type OnMethodChangedHandler = (method: AvailablePaymentMethod) => void; export interface Props { amount: number; coupon?: string | null; flow: PaymentMethodFlows; paymentMethodStatus?: PaymentMethodStatus; paymentMethods?: SavedPaymentMethod[]; onMethodChanged?: OnMethodChangedHandler; } interface Dependencies { api: Api; isAuthenticated: boolean; } export type MethodsHook = { loading: boolean; usedMethods: AvailablePaymentMethod[]; newMethods: AvailablePaymentMethod[]; allMethods: AvailablePaymentMethod[]; lastUsedMethod: AvailablePaymentMethod | undefined; selectedMethod: AvailablePaymentMethod | undefined; savedSelectedMethod: SavedPaymentMethod | undefined; selectMethod: (id?: string) => AvailablePaymentMethod | undefined; getSavedMethodByID: (id: string | undefined) => SavedPaymentMethod | undefined; status: PaymentMethodStatus | undefined; savedMethods: SavedPaymentMethod[] | undefined; isNewPaypal: boolean; }; type UsedAndNewMethods = { usedMethods: AvailablePaymentMethod[]; newMethods: AvailablePaymentMethod[]; }; export const useMethods = ( { paymentMethodStatus, paymentMethods, amount, coupon, flow, onMethodChanged }: Props, { api, isAuthenticated }: Dependencies ): MethodsHook => { const paymentMethodsRef = useRef<PaymentMethods>(); const pendingDataRef = useRef<{ pendingAmount?: number; pendingCoupon?: string | null; pendingFlow?: PaymentMethodFlows; }>(); const [loading, setLoading] = useState(true); const [availableMethods, setAvailableMethods] = useState<UsedAndNewMethods>({ usedMethods: [], newMethods: [], }); const [selectedMethod, setSelectedMethod] = useState<AvailablePaymentMethod | undefined>(); const [status, setStatus] = useState<PaymentMethodStatus | undefined>(); const [savedMethods, setSavedMethods] = useState<SavedPaymentMethod[] | undefined>(); const getComputedMethods = (availableMethodsParam?: UsedAndNewMethods) => { const { usedMethods, newMethods } = availableMethodsParam ?? availableMethods; const allMethods = [...usedMethods, ...newMethods]; const lastUsedMethod = usedMethods[usedMethods.length - 1]; return { allMethods, lastUsedMethod, usedMethods, newMethods, }; }; const updateMethods = () => { const { usedMethods, methods: newMethods } = paymentMethodsRef.current!.getAvailablePaymentMethods(); const result = { usedMethods, newMethods, }; setAvailableMethods(result); return result; }; useEffect(() => { async function run() { paymentMethodsRef.current = await initializePaymentMethods( api, paymentMethodStatus, paymentMethods, isAuthenticated, amount, coupon ?? '', flow ); // Initialization might take some time, so we need to check if there is any pending data // If for example the amount changes before initialization is done, then it won't be updated by the usual // useEffect handler below. In this case we need to update the amount manually. // Same goes for coupon and flow. if (pendingDataRef.current) { // Getting the saved values and clearing the pending right away, because this is a one-time thing const { pendingAmount, pendingCoupon, pendingFlow } = pendingDataRef.current; pendingDataRef.current = undefined; // Updating the coupon paymentMethodsRef.current.coupon = pendingCoupon ?? ''; // Updating the amount if (typeof pendingAmount === 'number') { paymentMethodsRef.current.amount = pendingAmount; } // Updating the flow if (pendingFlow) { paymentMethodsRef.current.flow = pendingFlow; } } setStatus(paymentMethodsRef.current.paymentMethodStatus); setSavedMethods(paymentMethodsRef.current.paymentMethods); const methods = updateMethods(); const { allMethods } = getComputedMethods(methods); setSelectedMethod(allMethods[0]); setLoading(false); } run(); }, []); useEffect(() => { if (!paymentMethodsRef.current) { pendingDataRef.current = { pendingAmount: amount, pendingCoupon: coupon, pendingFlow: flow, }; return; } paymentMethodsRef.current.amount = amount; paymentMethodsRef.current.coupon = coupon ?? ''; paymentMethodsRef.current.flow = flow; updateMethods(); }, [amount, coupon, flow]); const { usedMethods, newMethods, allMethods, lastUsedMethod } = getComputedMethods(); const getSavedMethodByID = (paymentMethodID: string | undefined): SavedPaymentMethod | undefined => { if (!paymentMethodsRef.current || !paymentMethodID) { return; } return paymentMethodsRef.current.getSavedMethodById(paymentMethodID); }; const selectMethod = (id?: PaymentMethodType) => { if (!id) { setSelectedMethod(undefined); return; } const method = allMethods.find((method) => method.value === id); if (method) { if (selectedMethod?.value !== method.value) { onMethodChanged?.(method); } setSelectedMethod(method); return method; } }; const savedSelectedMethod = getSavedMethodByID(selectedMethod?.value); const isNewPaypal = selectedMethod?.type === PAYMENT_METHOD_TYPES.PAYPAL && !isExistingPaymentMethod(selectedMethod?.value); return { selectedMethod, savedSelectedMethod, selectMethod, loading, usedMethods, newMethods, allMethods, lastUsedMethod, getSavedMethodByID, status, savedMethods, isNewPaypal, }; };
7,216
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/usePaymentFacade.ts
import { useMemo, useRef } from 'react'; import { buyCredit, payInvoice, subscribe } from '@proton/shared/lib/api/payments'; import { ProductParam } from '@proton/shared/lib/apps/product'; import { Api, Currency, Cycle, PlanIDs } from '@proton/shared/lib/interfaces'; import { AmountAndCurrency, ChargeablePaymentParameters, PAYMENT_METHOD_TYPES, PaymentMethodFlows, PaymentMethodStatus, PaymentMethodType, PaymentVerificator, SavedPaymentMethod, isExistingPaymentMethod, } from '../core'; import { useCard } from './useCard'; import { OnMethodChangedHandler, useMethods } from './useMethods'; import { usePaypal } from './usePaypal'; import { useSavedMethod } from './useSavedMethod'; export interface OperationsSubscriptionData { Plans: PlanIDs; Cycle: Cycle; Codes?: string[]; product: ProductParam; } export interface OperationsInvoiceData { invoiceId: string; } export interface OperationsData { subscription?: OperationsSubscriptionData; invoice?: OperationsInvoiceData; } /** * Common operations that can be performed with a chargeable payment token. The operations are ment to be available in * onChargeable callback. */ export interface Operations { buyCredit: () => Promise<unknown>; payInvoice: () => Promise<unknown>; subscribe: () => Promise<unknown>; } function getOperations(api: Api, params: ChargeablePaymentParameters, operationsData: OperationsData): Operations { return { buyCredit: async () => { return api(buyCredit(params)); }, payInvoice: async () => { if (!operationsData?.invoice) { throw new Error('The operations data for invoice must be provided in the facade'); } return api(payInvoice(operationsData.invoice.invoiceId, params)); }, subscribe: async () => { if (!operationsData?.subscription) { throw new Error('The operations data for subscription must be provided in the facade'); } const { product, ...data } = operationsData.subscription; return api({ ...subscribe( { ...params, ...data, }, product ), timeout: 60000 * 2, }); }, }; } /** * Stores the data for operations. That's meant to bypass React's rendering cycle. * Perhaps will be changed in the future. */ const usePaymentContext = () => { const subscriptionData = useRef<OperationsSubscriptionData>(); const invoiceData = useRef<OperationsInvoiceData>(); return { setSubscriptionData: (data: OperationsSubscriptionData | undefined) => { subscriptionData.current = data; }, getSubscriptionData: () => { return subscriptionData.current; }, setInvoiceData: (data: OperationsInvoiceData | undefined) => { invoiceData.current = data; }, getInvoiceData: () => { return invoiceData.current; }, getOperationsData: (): OperationsData => { return { subscription: subscriptionData.current, invoice: invoiceData.current, }; }, }; }; /** * The idea of this hook is gather together all the payment methods and their implementation and provide some * meaningful default configurations. This facade might be reused in other apps (like static) in the future. * The implementation attempts to avoid dependencies on the monorepo's client-specific code. There are still some * leftovers that might be removed later. */ export const usePaymentFacade = ( { amount, currency, onChargeable, coupon, flow, onMethodChanged, paymentMethods, paymentMethodStatus, }: { amount: number; currency: Currency; onChargeable: ( operations: Operations, data: { chargeablePaymentParameters: ChargeablePaymentParameters; source: PaymentMethodType; context: OperationsData; } ) => Promise<unknown>; coupon?: string; flow: PaymentMethodFlows; onMethodChanged?: OnMethodChangedHandler; paymentMethods?: SavedPaymentMethod[]; paymentMethodStatus?: PaymentMethodStatus; }, { api, isAuthenticated, verifyPayment, verifyPaymentPaypal, }: { api: Api; isAuthenticated: boolean; verifyPayment: PaymentVerificator; verifyPaymentPaypal: PaymentVerificator; } ) => { const amountAndCurrency: AmountAndCurrency = useMemo( () => ({ Amount: amount, Currency: currency, }), [amount, currency] ); const paymentContext = usePaymentContext(); const methods = useMethods( { amount, coupon: coupon ?? '', flow, onMethodChanged, paymentMethods, paymentMethodStatus, }, { api, isAuthenticated, } ); const savedMethod = useSavedMethod( { amountAndCurrency, savedMethod: methods.savedSelectedMethod, onChargeable: (params, paymentMethodId) => onChargeable(getOperations(api, params, paymentContext.getOperationsData()), { chargeablePaymentParameters: params, source: paymentMethodId, context: paymentContext.getOperationsData(), }), }, { api, verifyPayment, } ); const card = useCard( { amountAndCurrency, onChargeable: (params) => onChargeable(getOperations(api, params, paymentContext.getOperationsData()), { chargeablePaymentParameters: params, source: PAYMENT_METHOD_TYPES.CARD, context: paymentContext.getOperationsData(), }), }, { api, verifyPayment, } ); const paypalIgnoreAmountCheck = flow === 'invoice'; const paypal = usePaypal( { amountAndCurrency, isCredit: false, onChargeable: (params) => onChargeable(getOperations(api, params, paymentContext.getOperationsData()), { chargeablePaymentParameters: params, source: PAYMENT_METHOD_TYPES.PAYPAL, context: paymentContext.getOperationsData(), }), ignoreAmountCheck: paypalIgnoreAmountCheck, }, { api, verifyPayment: verifyPaymentPaypal, } ); const paypalCredit = usePaypal( { amountAndCurrency, isCredit: true, onChargeable: (params) => onChargeable(getOperations(api, params, paymentContext.getOperationsData()), { chargeablePaymentParameters: params, source: PAYMENT_METHOD_TYPES.PAYPAL_CREDIT, context: paymentContext.getOperationsData(), }), ignoreAmountCheck: paypalIgnoreAmountCheck, }, { api, verifyPayment: verifyPaymentPaypal, } ); const paymentMethodType: PaymentMethodType | undefined = methods.selectedMethod?.value; const selectedProcessor = useMemo(() => { if (isExistingPaymentMethod(paymentMethodType)) { return savedMethod; } if (paymentMethodType === PAYMENT_METHOD_TYPES.CARD) { return card; } if (paymentMethodType === PAYMENT_METHOD_TYPES.PAYPAL) { return paypal; } if (paymentMethodType === PAYMENT_METHOD_TYPES.PAYPAL_CREDIT) { return paypalCredit; } }, [paymentMethodType, card, savedMethod, paypal, paypalCredit]); return { methods, savedMethod, card, paypal, paypalCredit, selectedProcessor, flow, amount, currency, paymentContext, }; };
7,217
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/usePaymentProcessor.ts
import { useRef } from 'react'; import { PaymentProcessor } from '../core'; /** * An internal helper to avoid re-initialization of the payment processor on every render. * The init function is supposed to return a new instance of the payment processor. */ export const usePaymentProcessor = <T extends PaymentProcessor>(init: () => T) => { const paymentProcessorRef = useRef<T | null>(null); if (!paymentProcessorRef.current) { paymentProcessorRef.current = init(); } const paymentProcessor = paymentProcessorRef.current; return paymentProcessor; };
7,218
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/usePaypal.test.ts
import { renderHook } from '@testing-library/react-hooks'; import { MAX_CREDIT_AMOUNT, MIN_PAYPAL_AMOUNT } from '@proton/shared/lib/constants'; import { addTokensResponse, apiMock } from '@proton/testing'; import { PAYMENT_METHOD_TYPES } from '../core'; import { usePaypal } from './usePaypal'; const onChargeableMock = jest.fn(); const verifyPaymentMock = jest.fn(); beforeEach(() => { jest.clearAllMocks(); }); it('should render', () => { const { result } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current).toBeDefined(); expect(result.current.meta.type).toBe('paypal'); expect(result.current.fetchingToken).toBe(false); expect(result.current.verifyingToken).toBe(false); expect(result.current.verificationError).toBe(null); expect(result.current.tokenFetched).toBe(false); expect(result.current.processingToken).toBe(false); expect(result.current.paymentProcessor).toBeDefined(); }); it('should render with credit', () => { const { result } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: true, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current).toBeDefined(); expect(result.current.meta.type).toBe('paypal-credit'); expect(result.current.fetchingToken).toBe(false); expect(result.current.verifyingToken).toBe(false); expect(result.current.verificationError).toBe(null); expect(result.current.tokenFetched).toBe(false); expect(result.current.processingToken).toBe(false); expect(result.current.paymentProcessor).toBeDefined(); }); it('should destroy payment processor on unmount', () => { const { result, unmount } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); result.current.paymentProcessor!.destroy = jest.fn(); unmount(); expect(result.current.paymentProcessor!.destroy).toHaveBeenCalledTimes(1); }); it('should update fetchedPaymentToken', () => { const { result } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current.tokenFetched).toBe(false); result.current.paymentProcessor!.updateState({ fetchedPaymentToken: 'token' }); expect(result.current.tokenFetched).toBe(true); }); it('should update verificationError', () => { const { result } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current.verificationError).toBe(null); result.current.paymentProcessor!.updateState({ verificationError: 'error' }); expect(result.current.verificationError).toBe('error'); }); it('should update verificationError when token fetching fails', async () => { addTokensResponse().throw(); const { result } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current.verificationError).toBe(null); const tokenPromise = result.current.fetchPaymentToken(); await expect(tokenPromise).rejects.toThrowError(new Error()); expect(result.current.verificationError).toEqual(new Error()); }); it('should update verificationError when token verification fails', async () => { addTokensResponse().pending(); verifyPaymentMock.mockRejectedValueOnce(new Error('From the endpoint')); const { result } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current.verificationError).toBe(null); await result.current.fetchPaymentToken(); const tokenPromise = result.current.verifyPaymentToken(); await expect(tokenPromise).rejects.toThrowError(new Error('From the endpoint')); expect(result.current.verificationError).toEqual(new Error('Paypal payment verification failed')); }); it('should remove pre-fetched token if verification fails', async () => { addTokensResponse().pending(); verifyPaymentMock.mockRejectedValueOnce(new Error('From the endpoint')); const { result } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); await result.current.fetchPaymentToken(); expect(result.current.tokenFetched).toBe(true); const tokenPromise = result.current.processPaymentToken(); await expect(tokenPromise).rejects.toThrowError(new Error('From the endpoint')); expect(result.current.tokenFetched).toBe(false); }); it('should process payment token', async () => { addTokensResponse().pending(); const { result } = renderHook(() => usePaypal( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); const tokenPromise = result.current.processPaymentToken(); expect(result.current.processingToken).toBe(true); const token = await tokenPromise; expect(token).toEqual({ Amount: 1000, Currency: 'USD', chargeable: true, type: PAYMENT_METHOD_TYPES.PAYPAL, }); expect(result.current.tokenFetched).toBe(true); expect(result.current.verifyingToken).toBe(false); expect(result.current.verificationError).toBe(null); expect(result.current.processingToken).toBe(false); }); it('should update desabled state when the amount changes', () => { const { result, rerender } = renderHook( ({ Amount }) => usePaypal( { amountAndCurrency: { Amount, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ), { initialProps: { Amount: 0, }, } ); expect(result.current.disabled).toBe(false); rerender({ Amount: MIN_PAYPAL_AMOUNT - 1, }); expect(result.current.disabled).toBe(true); rerender({ Amount: MIN_PAYPAL_AMOUNT, }); expect(result.current.disabled).toBe(false); rerender({ Amount: (MIN_PAYPAL_AMOUNT + MAX_CREDIT_AMOUNT) / 2, }); expect(result.current.disabled).toBe(false); rerender({ Amount: MAX_CREDIT_AMOUNT, }); expect(result.current.disabled).toBe(false); rerender({ Amount: MAX_CREDIT_AMOUNT + 1, }); expect(result.current.disabled).toBe(true); rerender({ Amount: 0, }); expect(result.current.disabled).toBe(false); }); it('should have isInitialState === true when tokenFetched === false and verificationError === null', () => { const { result } = renderHook( ({ Amount }) => usePaypal( { amountAndCurrency: { Amount, Currency: 'USD', }, isCredit: false, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ), { initialProps: { Amount: 0, }, } ); expect(result.current.isInitialState).toBe(true); result.current.paymentProcessor!.updateState({ fetchedPaymentToken: 'token' }); expect(result.current.isInitialState).toBe(false); result.current.paymentProcessor!.updateState({ fetchedPaymentToken: null }); expect(result.current.isInitialState).toBe(true); result.current.paymentProcessor!.updateState({ verificationError: 'error' }); expect(result.current.isInitialState).toBe(false); result.current.paymentProcessor!.updateState({ verificationError: null }); expect(result.current.isInitialState).toBe(true); result.current.paymentProcessor!.updateState({ fetchedPaymentToken: 'token', verificationError: 'error' }); expect(result.current.isInitialState).toBe(false); result.current.reset(); expect(result.current.isInitialState).toBe(false); // because reset() must NOT clear verificationError. result.current.paymentProcessor!.updateState({ verificationError: null }); expect(result.current.isInitialState).toBe(true); });
7,219
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/usePaypal.ts
import { useEffect, useState } from 'react'; import { useLoading } from '@proton/hooks'; import { Api } from '@proton/shared/lib/interfaces'; import noop from '@proton/utils/noop'; import { AmountAndCurrency, ChargeablePaymentParameters, PaymentVerificator } from '../core'; import { PaypalPaymentProcessor } from '../core/payment-processors/paypalPayment'; import { PaymentProcessorHook } from './interface'; import { usePaymentProcessor } from './usePaymentProcessor'; interface Props { amountAndCurrency: AmountAndCurrency; isCredit: boolean; onChargeable?: (data: ChargeablePaymentParameters) => Promise<unknown>; ignoreAmountCheck?: boolean; } interface Dependencies { api: Api; verifyPayment: PaymentVerificator; } export type PaypalProcessorHook = PaymentProcessorHook & { reset: () => void; tokenFetched: boolean; verificationError: any; disabled: boolean; isInitialState: boolean; meta: { type: 'paypal' | 'paypal-credit'; }; }; /** * React wrapper for {@link PaypalPaymentProcessor}. It provides a set of proxies and also some additional functionality * like `processPaymentToken` method that supposed to be the main action. */ export const usePaypal = ( { amountAndCurrency, isCredit, onChargeable, ignoreAmountCheck }: Props, { api, verifyPayment }: Dependencies ): PaypalProcessorHook => { const paymentProcessor = usePaymentProcessor( () => new PaypalPaymentProcessor(verifyPayment, api, amountAndCurrency, isCredit, onChargeable, ignoreAmountCheck) ); const [fetchingToken, withFetchingToken] = useLoading(); const [verifyingToken, withVerifyingToken] = useLoading(); const [verificationError, setVerificationError] = useState<any>(null); const [disabled, setDisabled] = useState(false); const processingToken = fetchingToken || verifyingToken; const [tokenFetched, setTokenFetched] = useState(false); const isInitialState = !tokenFetched && !verificationError; useEffect(() => { paymentProcessor.setAmountAndCurrency(amountAndCurrency); }, [amountAndCurrency]); useEffect(() => { paymentProcessor.onTokenIsChargeable = onChargeable; }, [onChargeable]); useEffect(() => { paymentProcessor.onStateUpdated((state) => { if (Object.hasOwn(state, 'fetchedPaymentToken')) { setTokenFetched(state.fetchedPaymentToken !== null); } if (Object.hasOwn(state, 'verificationError')) { setVerificationError(state.verificationError); } if (Object.hasOwn(state, 'disabled')) { setDisabled(!!state.disabled); } }); return () => paymentProcessor.destroy(); }, []); const reset = () => paymentProcessor.reset(); const fetchPaymentToken = async () => withFetchingToken(paymentProcessor.fetchPaymentToken()); const verifyPaymentToken = async () => { const tokenPromise = paymentProcessor.verifyPaymentToken(); withVerifyingToken(tokenPromise).catch(noop); return tokenPromise; }; const processPaymentToken = async () => { if (!paymentProcessor.fetchedPaymentToken) { await fetchPaymentToken(); } try { return await verifyPaymentToken(); } catch (error) { reset(); throw error; } }; return { tokenFetched, fetchPaymentToken, fetchingToken, verifyPaymentToken, verifyingToken, paymentProcessor, reset, processPaymentToken, processingToken, verificationError, disabled, isInitialState, meta: { type: isCredit ? 'paypal-credit' : 'paypal', }, }; };
7,220
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/useSavedMethod.test.ts
import { renderHook } from '@testing-library/react-hooks'; import { addTokensResponse, apiMock } from '@proton/testing'; import { Autopay, PAYMENT_METHOD_TYPES, PaymentMethodPaypal, SavedPaymentMethod } from '../core'; import { useSavedMethod } from './useSavedMethod'; const onChargeableMock = jest.fn(); const verifyPaymentMock = jest.fn(); beforeEach(() => { jest.clearAllMocks(); }); const savedMethod: SavedPaymentMethod = { Order: 500, ID: '1', Type: PAYMENT_METHOD_TYPES.CARD, Details: { Name: 'John Doe', ExpMonth: '12', ExpYear: '2032', ZIP: '12345', Country: 'US', Last4: '1234', Brand: 'Visa', }, Autopay: Autopay.ENABLE, }; it('should render', async () => { const { result } = renderHook(() => useSavedMethod( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, savedMethod, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current).toBeDefined(); expect(result.current.meta.type).toBe('saved'); expect(result.current.fetchingToken).toBe(false); expect(result.current.verifyingToken).toBe(false); expect(result.current.processingToken).toBe(false); expect(result.current.paymentProcessor).toBeDefined(); }); it('should render without savedMethod', async () => { const { result } = renderHook(() => useSavedMethod( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current).toBeDefined(); expect(result.current.meta.type).toBe('saved'); expect(result.current.fetchingToken).toBe(false); expect(result.current.verifyingToken).toBe(false); expect(result.current.processingToken).toBe(false); expect(result.current.paymentProcessor).toBeUndefined(); }); it('should destroy paymentProcessor', async () => { const { result, unmount } = renderHook(() => useSavedMethod( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, savedMethod, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); expect(result.current).toBeDefined(); expect(result.current.meta.type).toBe('saved'); expect(result.current.fetchingToken).toBe(false); expect(result.current.verifyingToken).toBe(false); expect(result.current.processingToken).toBe(false); expect(result.current.paymentProcessor).toBeDefined(); result.current.paymentProcessor!.destroy = jest.fn(); unmount(); expect(result.current.paymentProcessor!.destroy).toBeCalled(); }); it('should fetch token', async () => { addTokensResponse(); const { result } = renderHook(() => useSavedMethod( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, savedMethod, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); const tokenPromise = result.current.fetchPaymentToken(); expect(result.current.fetchingToken).toBe(true); const token = await tokenPromise; expect(token).toEqual({ Amount: 1000, Currency: 'USD', Payment: { Type: 'token', Details: { Token: 'token123', }, }, chargeable: true, type: 'card', }); }); it('should process token', async () => { addTokensResponse().pending(); verifyPaymentMock.mockResolvedValue({ Payment: { Type: 'token', Details: { Token: 'token123', }, }, }); const { result } = renderHook(() => useSavedMethod( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, savedMethod, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); await result.current.fetchPaymentToken(); expect(result.current.fetchingToken).toBe(false); const processPromise = result.current.processPaymentToken(); expect(result.current.processingToken).toBe(true); const token = await processPromise; expect(result.current.processingToken).toBe(false); expect(token).toEqual({ Amount: 1000, Currency: 'USD', Payment: { Type: 'token', Details: { Token: 'token123', }, }, chargeable: true, type: 'card', }); }); it('should throw during verification if there is no saved method', async () => { const { result } = renderHook(() => useSavedMethod( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); await expect(() => result.current.verifyPaymentToken()).rejects.toThrowError('There is no saved method to verify'); }); it('should reset token if verification failed', async () => { addTokensResponse().pending(); verifyPaymentMock.mockRejectedValue(new Error('Verification failed')); const { result } = renderHook(() => useSavedMethod( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, savedMethod, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ) ); await expect(result.current.processPaymentToken()).rejects.toThrowError('Verification failed'); expect(result.current.verifyingToken).toBe(false); expect(result.current.paymentProcessor?.fetchedPaymentToken).toEqual(null); }); it('should update the saved method', async () => { const { result, rerender } = renderHook( ({ savedMethod }) => useSavedMethod( { amountAndCurrency: { Amount: 1000, Currency: 'USD', }, savedMethod, onChargeable: onChargeableMock, }, { api: apiMock, verifyPayment: verifyPaymentMock, } ), { initialProps: { savedMethod: savedMethod as SavedPaymentMethod, }, } ); expect((result.current.paymentProcessor as any).state.method.paymentMethodId).toEqual(savedMethod.ID); expect((result.current.paymentProcessor as any).state.method.type).toEqual(savedMethod.Type); const newSavedMethod: PaymentMethodPaypal = { Order: 400, ID: '2', Type: PAYMENT_METHOD_TYPES.PAYPAL, Details: { BillingAgreementID: 'BA-123', PayerID: 'pid123', Payer: 'payer123', }, }; rerender({ savedMethod: newSavedMethod, }); expect((result.current.paymentProcessor as any).state.method.paymentMethodId).toEqual(newSavedMethod.ID); expect((result.current.paymentProcessor as any).state.method.type).toEqual(newSavedMethod.Type); });
7,221
0
petrpan-code/ProtonMail/WebClients/packages/components/payments
petrpan-code/ProtonMail/WebClients/packages/components/payments/react-extensions/useSavedMethod.ts
import { useEffect, useRef } from 'react'; import { useLoading } from '@proton/hooks'; import { Api } from '@proton/shared/lib/interfaces'; import noop from '@proton/utils/noop'; import { AmountAndCurrency, ChargeablePaymentParameters, ExistingPaymentMethod, PaymentVerificator, SavedPaymentMethod, } from '../core'; import { SavedPaymentProcessor } from '../core/payment-processors/savedPayment'; import { PaymentProcessorHook } from './interface'; export interface Props { amountAndCurrency: AmountAndCurrency; savedMethod?: SavedPaymentMethod; onChargeable: (data: ChargeablePaymentParameters, paymentMethodId: ExistingPaymentMethod) => Promise<unknown>; } export interface Dependencies { verifyPayment: PaymentVerificator; api: Api; } export interface SavedMethodProcessorHook extends PaymentProcessorHook { paymentProcessor?: SavedPaymentProcessor; } /** * React wrapper for {@link SavedPaymentProcessor}. It provides a set of proxies and also some additional functionality * like `processPaymentToken` method that supposed to be the main action. The saved payment method can be either a card * or PayPal (not paypal-credit which can't be saved by design, as it supposed to provide one-time payment). */ export const useSavedMethod = ( { amountAndCurrency, savedMethod, onChargeable }: Props, { verifyPayment, api }: Dependencies ): SavedMethodProcessorHook => { const paymentProcessorRef = useRef<SavedPaymentProcessor>(); if (!paymentProcessorRef.current && savedMethod) { paymentProcessorRef.current = new SavedPaymentProcessor( verifyPayment, api, amountAndCurrency, savedMethod, (chargeablePaymentParameters: ChargeablePaymentParameters) => onChargeable(chargeablePaymentParameters, savedMethod.ID) ); } const paymentProcessor = paymentProcessorRef.current; const [fetchingToken, withFetchingToken] = useLoading(); const [verifyingToken, withVerifyingToken] = useLoading(); const processingToken = fetchingToken || verifyingToken; useEffect(() => { return () => paymentProcessor?.destroy(); }, []); useEffect(() => { if (paymentProcessor) { paymentProcessor.amountAndCurrency = amountAndCurrency; } }, [amountAndCurrency]); useEffect(() => { if (paymentProcessor && savedMethod) { paymentProcessor.onTokenIsChargeable = (chargeablePaymentParameters: ChargeablePaymentParameters) => onChargeable(chargeablePaymentParameters, savedMethod.ID); paymentProcessor.updateSavedMethod(savedMethod); } }, [savedMethod, onChargeable]); const reset = () => paymentProcessor?.reset(); const fetchPaymentToken = async () => withFetchingToken(paymentProcessor?.fetchPaymentToken()); const verifyPaymentToken = async () => { const tokenPromise = paymentProcessor?.verifyPaymentToken(); if (!tokenPromise) { throw new Error('There is no saved method to verify'); } withVerifyingToken(tokenPromise).catch(noop); return tokenPromise; }; const processPaymentToken = async () => { if (!paymentProcessor?.fetchedPaymentToken) { await fetchPaymentToken(); } try { return await verifyPaymentToken(); } catch (error) { reset(); throw error; } }; return { fetchPaymentToken, fetchingToken, verifyPaymentToken, verifyingToken, processPaymentToken, processingToken, paymentProcessor, meta: { type: 'saved', data: savedMethod, }, }; };
7,222
0
petrpan-code/ProtonMail/WebClients/packages/components
petrpan-code/ProtonMail/WebClients/packages/components/typings/css.d.ts
declare module 'csstype' { // eslint-disable-next-line @typescript-eslint/no-unused-vars interface Properties<T> { // allow css variables [index: string]: unknown; } }
7,223
0
petrpan-code/ProtonMail/WebClients/packages/components
petrpan-code/ProtonMail/WebClients/packages/components/typings/index.d.ts
declare module '*.svg'; declare module '*.jpg'; declare module '*.png'; declare module '*.webm'; declare module '*.mp4'; declare module '*.md'; // TODO: Import from @proton/shared declare module 'ical.js'; declare module 'pm-srp'; declare module 'is-valid-domain'; // Used to replace Node bundled punycode library which is now deprecated declare module 'punycode.js'; declare module '*.raw.scss';
7,224
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/cross-storage/.eslintrc.js
module.exports = { extends: ['@proton/eslint-config-proton'], parser: '@typescript-eslint/parser', env: { jasmine: true, }, parserOptions: { tsconfigRootDir: __dirname, project: './tsconfig.json', }, ignorePatterns: ['.eslintrc.js'], };
7,225
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/cross-storage/package.json
{ "name": "@proton/cross-storage", "main": "lib/index.ts", "module": "lib/index.ts", "scripts": { "check-types": "tsc", "lint": "eslint ." }, "dependencies": { "@proton/shared": "workspace:packages/shared" }, "devDependencies": { "@proton/eslint-config-proton": "workspace:packages/eslint-config-proton", "eslint": "^8.54.0", "typescript": "^5.3.2" } }
7,226
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/cross-storage/tsconfig.json
{ "extends": "../../tsconfig.base.json", "compilerOptions": { "types": ["webpack-env", "jasmine"] } }
7,227
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/account-impl/guestInstance.ts
import { getAppHref } from '@proton/shared/lib/apps/helper'; import { APPS, isSSOMode } from '@proton/shared/lib/constants'; import create from '../lib/guest'; import { Action, GetLocalStorageKeysMessageResponse, GetLocalStorageMessageResponse, ProtonMessages, RemoveLocalStorageMessageResponse, SetLocalStorageMessageResponse, } from './interface'; const createProtonInstance = (url: string) => create<ProtonMessages>(url); type CrossStorageInstance = ReturnType<typeof createProtonInstance>; export let instance: ReturnType<typeof createHandlers>; const createHandlers = ({ postAndGetMessage }: CrossStorageInstance) => { const getLocalStorage = (key: string) => { return postAndGetMessage<GetLocalStorageMessageResponse>({ type: Action.getLocalStorage, payload: { key, }, }); }; const getLocalStorageKeys = () => { return postAndGetMessage<GetLocalStorageKeysMessageResponse>({ type: Action.getLocalStorageKeys, }); }; const setLocalStorage = (key: string, value: string): Promise<void | undefined> => { return postAndGetMessage<SetLocalStorageMessageResponse>({ type: Action.setLocalStorage, payload: { key, value, }, }); }; const removeLocalStorage = (key: string): Promise<void | undefined> => { return postAndGetMessage<RemoveLocalStorageMessageResponse>({ type: Action.removeLocalStorage, payload: { key, }, }); }; return { setLocalStorage, getLocalStorage, getLocalStorageKeys, removeLocalStorage, }; }; export const setupGuestCrossStorage = () => { if (!isSSOMode) { return; } instance = createHandlers(createProtonInstance(getAppHref('/storage.html', APPS.PROTONACCOUNT))); };
7,228
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/account-impl/hostInstance.ts
import { getItem, removeItem, setItem } from '@proton/shared/lib/helpers/storage'; import { createHost } from '../lib'; import { Action, ProtonMessageResponses, ProtonMessages } from './interface'; const handler = async (message: ProtonMessages): Promise<ProtonMessageResponses | undefined> => { if (message.type === Action.getLocalStorage) { return getItem(message.payload.key); } if (message.type === Action.getLocalStorageKeys) { return [...Object.keys(window.localStorage)]; } if (message.type === Action.setLocalStorage) { setItem(message.payload.key, message.payload.value); return; } if (message.type === Action.removeLocalStorage) { removeItem(message.payload.key); return; } throw new Error(`Unknown message type`); }; export const setupHostCrossStorage = () => { createHost<ProtonMessages, ProtonMessageResponses>(handler); };
7,229
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/account-impl/interface.ts
export enum Action { getLocalStorage = 'getLocalStorage', getLocalStorageKeys = 'getLocalStorageKeys', setLocalStorage = 'setLocalStorage', removeLocalStorage = 'removeLocalStorage', } export interface GetLocalStorageMessage { type: Action.getLocalStorage; payload: { key: string; }; } export type GetLocalStorageMessageResponse = string | null | undefined; export interface GetLocalStorageKeysMessage { type: Action.getLocalStorageKeys; } export type GetLocalStorageKeysMessageResponse = string[] | null | undefined; export interface SetLocalStorageMessage { type: Action.setLocalStorage; payload: { key: string; value: string; }; } export type SetLocalStorageMessageResponse = undefined; export interface RemoveLocalStorageMessage { type: Action.removeLocalStorage; payload: { key: string; }; } export type RemoveLocalStorageMessageResponse = undefined; export type ProtonMessages = | GetLocalStorageMessage | GetLocalStorageKeysMessage | SetLocalStorageMessage | RemoveLocalStorageMessage; export type ProtonMessageResponses = | GetLocalStorageMessageResponse | GetLocalStorageKeysMessageResponse | SetLocalStorageMessageResponse | RemoveLocalStorageMessageResponse;
7,230
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/lib/errors.ts
// eslint-disable-next-line max-classes-per-file export class CrossStorageTimeoutError extends Error { public trace = false; constructor() { super('Cross-storage timeout'); Object.setPrototypeOf(this, CrossStorageTimeoutError.prototype); } } export class CrossStorageUnsupportedError extends Error { public trace = false; constructor() { super('Cross-storage unsupported'); Object.setPrototypeOf(this, CrossStorageUnsupportedError.prototype); } }
7,231
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/lib/guest.ts
import { CrossStorageTimeoutError, CrossStorageUnsupportedError } from './errors'; import { CrossStorageMessage } from './interface'; import { getIsSupported } from './support'; enum States { INIT, SUCCESS, ERROR, } const createGuest = <MessagePayload>(urlTarget: string) => { let iframe: HTMLIFrameElement; let state = States.INIT; let promiseHandler: { promise: Promise<void>; resolve: () => void; reject: (error: Error) => void; }; const url = new URL(urlTarget); let id = 0; const messagePromiseCache: { [key: string]: { resolve: (value: any) => void; reject: (error: Error) => void }; } = {}; const initPromise = () => { promiseHandler = {} as any; promiseHandler.promise = new Promise<void>((resolve, reject) => { promiseHandler.resolve = resolve; promiseHandler.reject = reject; }); }; const initListener = (origin: string) => { const timeoutHandle = window.setTimeout(() => { state = States.ERROR; promiseHandler.reject(new CrossStorageTimeoutError()); }, 5000); window.addEventListener('message', (event: MessageEvent<CrossStorageMessage>) => { if (!iframe) { return; } const contentWindow = iframe?.contentWindow; if (!iframe || !contentWindow || event.origin !== origin || event.source !== contentWindow) { return; } const eventData = event.data; if (!eventData?.type) { return; } if (eventData.type === 'init') { window.clearTimeout(timeoutHandle); if (getIsSupported(eventData.payload.value)) { state = States.SUCCESS; promiseHandler.resolve(); } else { state = States.ERROR; promiseHandler.reject(new CrossStorageUnsupportedError()); } } if (state !== States.SUCCESS) { return; } if (eventData.type === 'response') { if (eventData.status === 'success') { messagePromiseCache[eventData.id]?.resolve(eventData.payload); } if (eventData.status === 'error') { messagePromiseCache[eventData.id]?.reject(eventData.payload); } } }); }; const loadIframe = (url: string) => { iframe = window.document.createElement('iframe'); iframe.src = url; iframe.width = '0'; iframe.height = '0'; iframe.style.display = 'none'; document.body.appendChild(iframe); }; const getState = () => { return state; }; const getMessagePromise = <T>(id: number) => { return new Promise<T>((resolve, reject) => { messagePromiseCache[id] = { resolve, reject, }; }); }; const postMessage = async (message: CrossStorageMessage) => { await promiseHandler.promise; iframe.contentWindow?.postMessage(message, url.origin); }; const postAndGetMessage = async <T>(messagePayload: MessagePayload) => { const messageId = id++; const promise = getMessagePromise<T>(messageId); await postMessage({ type: 'message', id: messageId, payload: messagePayload, }); return promise; }; const initChildCrossStorage = () => { initPromise(); initListener(url.origin); loadIframe(url.toString()); }; initChildCrossStorage(); return { getState, postAndGetMessage, }; }; export default createGuest;
7,232
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/lib/host.ts
import { getSecondLevelDomain } from '@proton/shared/lib/helpers/url'; import { CrossStorageMessage, ResponseMessage } from './interface'; import { getTestKeyValue, setTestKeyValue } from './support'; const createHost = <Message, MessageResponse>(handler: (message: Message) => Promise<MessageResponse>) => { const isEmbedded = window.location !== window.parent.location; const hostSecondLevelDomain = getSecondLevelDomain(window.location.hostname); if (!isEmbedded) { throw new Error('Window not embedded'); } const postMessage = (message: CrossStorageMessage, origin: string) => { return window.parent.postMessage(message, origin); }; postMessage({ type: 'init', payload: { value: getTestKeyValue(window) } }, '*'); const reply = (origin: string, payload: ResponseMessage<MessageResponse>) => { postMessage(payload, origin); }; window.addEventListener('message', (event: MessageEvent<CrossStorageMessage>) => { const { source, data, origin } = event; if ( source !== window.parent || getSecondLevelDomain(new URL(origin).hostname) !== hostSecondLevelDomain || data?.type !== 'message' ) { return; } handler(data.payload) .then((response) => { reply(origin, { id: data.id, type: 'response', status: 'success', payload: response }); }) .catch((e) => { reply(origin, { id: data.id, type: 'response', status: 'error', payload: e, }); }); }); }; export const initMainHost = () => { setTestKeyValue(window); }; export default createHost;
7,233
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/lib/index.ts
export { default as createHost, initMainHost } from './host'; export { default as createGuest } from './guest';
7,234
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/lib/interface.ts
export interface ResponseMessage<T> { type: 'response'; status: 'success' | 'error'; id: number; payload: T | Error; } export interface Message<T> { type: 'message'; id: number; payload: T; } export interface InitMessage { type: 'init'; payload: { value: string | undefined | null; }; } export type CrossStorageMessage = InitMessage | Message<any> | ResponseMessage<any>;
7,235
0
petrpan-code/ProtonMail/WebClients/packages/cross-storage
petrpan-code/ProtonMail/WebClients/packages/cross-storage/lib/support.ts
export const setTestKeyValue = (window: Window) => { try { window.localStorage.setItem('cs', '1'); } catch (e: any) { return undefined; } }; export const getIsSupported = (value: any) => { return value === '1'; }; export const getTestKeyValue = (window: Window) => { try { return window.localStorage.getItem('cs'); } catch (e: any) { return undefined; } };
7,236
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/crypto/.eslintrc.js
module.exports = { extends: ['@proton/eslint-config-proton'], parser: '@typescript-eslint/parser', env: { mocha: true, }, plugins: ['chai-friendly'], parserOptions: { tsconfigRootDir: __dirname, project: './tsconfig.json', }, ignorePatterns: ['.eslintrc.js'], rules: { 'no-unused-expressions': 'off', '@typescript-eslint/no-unused-expressions': 'off', 'chai-friendly/no-unused-expressions': ['error', { allowShortCircuit: true }], 'no-restricted-imports': 'off', // currently only used to guard against `pmcrypto` imports }, };
7,237
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/crypto/README.md
The crypto package interfaces the apps with the underlying OpenPGP crypto libs of pmcrypto and OpenPGP.js. > **`pmcrypto` no longer needs to be directly imported by the apps and other packages, you should always use `@proton/crypto` instead.** ## Usage The utils functions that `pmcrypto` exported (e.g. `arrayToBinaryString`) are now accessible under `@proton/crypto/lib/utils`. Crypto-related functions are instead handled by the `CryptoProxy`, which is initialized together with the apps (see [this section](web-worker-integration) for more info on the setup). ### Examples <details> <summary><b>Importing/exporting public and private keys</b></summary> #### Importing/exporting public and private keys `OpenPGPKey` objects have been replaced by `PrivateKeyReference` and `PublicKeyReference` ones, as key material stored away from main thread. To import keys: ```js const recipientPublicKey = await CryptoProxy.importPublicKey({ armoredKey: '...' }); // or `binaryKey` // To import a private key, the passphrase must be known // (otherwise, either wait for it to be available, or import as public key) const senderPrivateKey = await CryptoProxy.importPrivateKey({ armoredKey: '...', // or `binaryKey` passphrase: 'key decryption passphrase', // If the key is expected to be already decrypted (rare, but it can happen for keys uploaded by the user), you have to pass `passphrase: null`. }); ``` To export keys to be able to transfer them: ```js // on public key export, if a private key is given, only the public key material is extracted and serialized const armoredPublicKey = await CryptoProxy.exportPublicKey({ key: senderPrivateKey, format: 'armored', // or 'binary' }); // on private key export, the key will be encrypted before serialization, using the given `passhrapse` const armoredPrivateKey = await CryptoProxy.exportPrivateKey({ key: senderPrivateKey, passphrase: 'key encryption passphrase', format: 'armored', // or 'binary' }); ``` To delete the keys from memory once they are no longer needed: ```js // invalidate a specific key reference await CryptoProxy.clearKey({ key: senderPrivateKey }); // after this, passing `senderPrivateKey` to the `CryptoProxy` will result in an error // invalidate all keys previously imported and generated using the `CryptoProxy` await CryptoProxy.clearKeyStore(); ``` </details> <details> <summary><b>Encrypt/sign and decrypt/verify string or binary data using keys</b></summary> #### Encrypt/sign and decrypt/verify string or binary data using keys To encrypt and sign: ```js // import the required keys const senderPublicKey = await CryptoProxy.importPublicKey(...); const recipientPrivateKey = await CryptoProxy.importPrivateKey(...); const { message: armoredMessage, signature: armoredSignature, encryptedSignature: armoredEncryptedSignature, } = await CryptoProxy.encryptMessage({ textData: 'text data to encrypt', // or `binaryData` for Uint8Arrays encryptionKeys: recipientPublicKey, // and/or `passwords` signingKeys: senderPrivateKey, detached: true, format: 'armored' // or 'binary' to output a binary message and signature }); // share `armoredMessage` ``` To decrypt and verify: ```js // import the required keys const senderPublicKey = await CryptoProxy.importPublicKey(...); const recipientPrivateKey = await CryptoProxy.importPrivateKey(...); const { data: decryptedData, verified, verificationErrors } = await CryptoProxy.decryptMessage({ armoredMessage, // or `binaryMessage` armoredEncryptedSignature, // or 'binaryEncryptedSignature'/'armoredSignature'/'binarySignature' decryptionKeys: recipientPrivateKey // and/or 'passwords'/'sessionKey' verificationKeys: senderPublicKey }); if (verified === VERIFICATION_STATUS.SIGNED_AND_VALID) { console.log(decryptedData) } else if (verified === VERIFICATION_STATUS.SIGNED_AND_INVALID) { console.log(verificationErrors) } ``` </details> <details> <summary><b>Encrypt/decrypt using the session key</b></summary> #### Encrypt/decrypt using the session key directly ```js // First generate the session key const sessionKey = await CryptoProxy.generateSessionKey({ recipientKeys: recipientPublicKey }); // Then encrypt the data with it const { message: armoredMessage } = await CryptoProxy.encryptMessage({ textData: 'text data to encrypt', // or `binaryData` for Uint8Arrays sessionKey, encryptionKeys: recipientPublicKey, // and/or `passwords`, used to encrypt the session key signingKeys: senderPrivateKey, }); ``` To decrypt, you can again provide the session key directly: ```js // Then encrypt the data with it const { data } = await CryptoProxy.decryptMessage({ armoredMessage, // or `binaryMessage` sessionKeys: sessionKey, verificationKeys: senderPublicKey, }); ``` You can also encrypt the session key on its own: ```js const armoredEncryptedSessionKey = await encryptSessionKey({ ...sessionKey, encryptionKeys, // and/or passwords format: 'armored', // or 'binary' }); // And decrypt it with: const sessionKey = await CryptoProxy.decryptSessionKey({ armoredMessage: armoredEncryptedSessionKey, // or `binaryMessage` decryptionsKeys, // or `passwords` }); ``` </details> ## Web Worker Integration The CryptoProxy redirects crypto request to whatever endpoint is set via `CryptoProxy.setEndpoint`. Only one endpoint can be set at a time. To release an endpoint and possibly set a new one, call `CryptoProxy.releaseEndpoint`. This package implements a worker pool `CryptoWorkerPool` that the apps can use as endpoint, out of the box: ```js import { CryptoWorkerPool } from '@proton/crypto/lib/worker/workerPool'; async function setupCryptoWorker() { await CryptoWorkerPool.init(); // CryptoWorkerPool is a singleton CryptoProxy.setEndpoint( CryptoWorkerPool, (endpoint) => endpoint.destroy() // destroy the CryptoWorkerPool when the CryptoProxy endpoint is released ); } ``` Using workers is necessary since crypto operations are likely to freeze the UI if run in the main thread. However, if you have an existing app-specific worker, you might not need to spawn separate workers, as described below. <!-- ## App-specific workers --> ### Setting up CryptoProxy inside a worker (with separate key store than the main thread) If a custom app worker needs to call the CryptoProxy (even indirectly, to e.g. use `@proton/shared` functions), it can create and use a CryptoApi instance directly, thus avoiding going through a separate worker to resolve the requests: ```js import { Api: CryptoApi } from '@proton/crypto/lib/worker/api' CryptoProxy.setEndpoint(new CryptoApi(), endpoint => endpoint.clearKeyStore()); ``` Note that the CryptoApi imports OpenPGP.js, and it should not be used or imported in the main thread, but only inside workers (you might want to use dynamic imports in this sense). The CryptoProxy initialized in this way is totally separate from the CryptoProxy initialized in the main thread, and it will not share key store with it. If you need a shared key store (which is preferable than trasferring keys manually to and from the worker), see the next section. ### Using custom worker as CryptoProxy endpoint for the main thread (with shared key store) To have a single app-specific worker that takes care of some app-specific requests, as well as the CryptoProxy ones from the main thread, it's possible to extend the CryptoApi. Example setup: ```js // in `customWorker.ts`: import { expose, transferHandlers } from 'comlink'; import { CryptoProxy, PrivateKeyReference, PublicKeyReference } from '@proton/crypto'; import { Api as CryptoApi } from '@proton/crypto/lib/worker/api'; import { workerTransferHandlers } from '@proton/crypto/lib/worker/transferHandlers'; class CustomWorkerApi extends CryptoApi { constructor() { super(); CryptoProxy.setEndpoint(this); // if needed, set endpoint (e.g. for @proton/shared) in the worker itself } // decrypt and encrypt to a different key, saving some communication overhead async reEncryptMessage({ armoredMessage, decryptionKeys, encryptionKeys, }: { armoredMessage: string, decryptionKeys: PrivateKeyReference[], encryptionKeys: PublicKeyReference[], }) { const { data: binaryData } = await this.decryptMessage({ armoredMessage, decryptionKeys, format: 'binary' }); return this.encryptMessage({ binaryData, encryptionKeys }); } } // set up transfer handlers for the CryptoApi (you might have to set up your own as well) workerTransferHandlers.forEach(({ name, handler }) => transferHandlers.set(name, handler)); // initialize underlying crypto libraries CustomWorkerApi.init(); expose(CustomWorkerApi); ``` ```js // in main thread: import { wrap, transferHandlers } from 'comlink'; import { mainThreadTransferHandlers } from '@proton/crypto/lib/worker/transferHandlers'; import { CryptoProxy } from '@proton/crypto'; const RemoteCustomWorker = wrap<typeof CustomWorkerApi>(new Worker(new URL('./customWorker.ts', import.meta.url))); // set up transfer handlers for the CryptoApi (you might have to set up your own as well) mainThreadTransferHandlers.forEach(({ name, handler }) => transferHandlers.set(name, handler)); async function doStuff() { // start the worker const customWorkerInstance = await new RemoteCustomWorker(); // set it as CryptoProxy endpoint CryptoProxy.setEndpoint(customWorkerInstance); // the CryptoProxy requests will now be directed to your custom worker const oldKey = await CryptoProxy.importPrivateKey(...); // or `customWorkerInstance.importPrivateKey` const newKey = await CryptoProxy.generateKey(...); // or `customWorkerInstance.generateKey` // the custom functions need to be referenced directly, since the CryptoProxy is not aware of them await customWorkerInstance.reEncryptMessage({ armoredMessage: '...', decryptionKeys: [oldKey], encryptionKeys: [newKey] }); } ``` ## Testing Chrome and Firefox are used for tests. With Chrome and Firefox installed, running test should work out of the box. To use a different Chromium-based browser, set the environment variable CHROME_BIN to point to the corresponding executable.
7,238
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/crypto/package.json
{ "name": "@proton/crypto", "description": "", "license": "MIT", "author": "ProtonMail", "sideEffects": false, "main": "./lib/index.ts", "scripts": { "check-types": "tsc", "lint": "eslint lib test --ext .js,.ts,tsx --quiet --cache", "pretty": "prettier --write $(find lib -type f -name '*.js' -o -name '*.ts' -o -name '*.tsx')", "test": "karma start test/karma.conf.js" }, "dependencies": { "comlink": "^4.4.1", "pmcrypto": "npm:@protontech/pmcrypto@^7.14.2" }, "devDependencies": { "@proton/eslint-config-proton": "workspace:packages/eslint-config-proton", "@types/chai": "^4.3.11", "@types/chai-as-promised": "^7.1.8", "@types/mocha": "^10.0.5", "chai": "^4.3.10", "chai-as-promised": "^7.1.1", "eslint": "^8.54.0", "eslint-plugin-chai-friendly": "^0.7.2", "karma": "^6.4.2", "karma-chrome-launcher": "^3.2.0", "karma-firefox-launcher": "^2.1.2", "karma-mocha": "^2.0.1", "karma-mocha-reporter": "^2.2.5", "karma-webpack": "^5.0.0", "mocha": "^10.2.0", "playwright": "^1.40.0", "prettier": "^3.1.0", "ts-loader": "^9.5.1", "typescript": "^5.3.2", "webpack": "^5.89.0", "webpack-cli": "^5.1.4" } }
7,239
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/crypto/tsconfig.json
{ "extends": "../../tsconfig.base.json", "compilerOptions": { "types": ["webpack-env", "mocha"] } }
7,240
0
petrpan-code/ProtonMail/WebClients/packages/crypto
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/index.ts
import type { PartialConfig } from 'pmcrypto'; export * from './serverTime'; export * from './proxy'; export * from './worker/api.models'; export const S2kTypeForConfig: { [key: string]: PartialConfig['s2kType'] } = { // Cannot access `enums` value directly to avoid importing openpgp in the main thread argon2: 4, iterated: 3, };
7,241
0
petrpan-code/ProtonMail/WebClients/packages/crypto
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/serverTime.ts
import { updateServerTime as internalUpdateServerTime, serverTime } from 'pmcrypto/lib/serverTime'; let updateCalled = false; /** * If `updateServerTime()` is never called, then `serverTime()` returns the device time. * This helpers is for debugging purposes and returns whether the serverTime is indeed being used. */ const wasServerTimeEverUpdated = () => updateCalled; const updateServerTime = (serverDate: Date) => { updateCalled = true; return internalUpdateServerTime(serverDate); }; export { serverTime, updateServerTime, wasServerTimeEverUpdated };
7,242
0
petrpan-code/ProtonMail/WebClients/packages/crypto
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/utils.ts
// This module should be kept free of functions that require 'openpgp' const ifDefined = <T, R>(cb: (input: T) => R) => <U extends T | undefined>(input: U) => { return (input !== undefined ? cb(input as T) : undefined) as U extends T ? R : undefined; }; export const encodeUtf8 = ifDefined((input: string) => unescape(encodeURIComponent(input))); export const decodeUtf8 = ifDefined((input: string) => decodeURIComponent(escape(input))); export const encodeBase64 = ifDefined((input: string) => btoa(input).trim()); export const decodeBase64 = ifDefined((input: string) => atob(input.trim())); export const encodeUtf8Base64 = ifDefined((input: string) => encodeBase64(encodeUtf8(input))); export const decodeUtf8Base64 = ifDefined((input: string) => decodeUtf8(decodeBase64(input))); const isString = (data: any): data is string | String => { return typeof data === 'string' || data instanceof String; }; /** * Convert a string to an array of 8-bit integers * @param str String to convert * @returns An array of 8-bit integers */ export const binaryStringToArray = (str: string) => { if (!isString(str)) { throw new Error('binaryStringToArray: Data must be in the form of a string'); } const result = new Uint8Array(str.length); for (let i = 0; i < str.length; i++) { result[i] = str.charCodeAt(i); } return result; }; /** * Encode an array of 8-bit integers as a string * @param bytes data to encode * @return string-encoded bytes */ export const arrayToBinaryString = (bytes: Uint8Array) => { const result = []; const bs = 1 << 14; const j = bytes.length; for (let i = 0; i < j; i += bs) { // @ts-ignore Uint8Array treated as number[] result.push(String.fromCharCode.apply(String, bytes.subarray(i, i + bs < j ? i + bs : j))); } return result.join(''); }; /** * Convert a hex string to an array of 8-bit integers * @param hex A hex string to convert * @returns An array of 8-bit integers */ export const hexStringToArray = (hex: string) => { const result = new Uint8Array(hex.length >> 1); for (let k = 0; k < result.length; k++) { const i = k << 1; result[k] = parseInt(hex.substring(i, i + 2), 16); } return result; }; /** * Convert an array of 8-bit integers to a hex string * @param bytes Array of 8-bit integers to convert * @returns Hexadecimal representation of the array */ export const arrayToHexString = (bytes: Uint8Array) => { const hexAlphabet = '0123456789abcdef'; let s = ''; bytes.forEach((v) => { s += hexAlphabet[v >> 4] + hexAlphabet[v & 15]; }); return s; }; /** * Convert a native javascript string to a Uint8Array of utf8 bytes * @param str - The string to convert * @returns A valid squence of utf8 bytes. */ export function stringToUtf8Array(str: string): Uint8Array { const encoder = new TextEncoder(); return encoder.encode(str); } /** * Convert a Uint8Array of utf8 bytes to a native javascript string * @param utf8 - A valid squence of utf8 bytes * @returns A native javascript string. */ export function utf8ArrayToString(utf8: Uint8Array): string { const decoder = new TextDecoder(); return decoder.decode(utf8); }
7,243
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/proxy/helpers.ts
import { KeyReference, PrivateKeyReference } from '../worker/api.models'; import { CryptoProxy } from './proxy'; /** * Find the key that generated the given signature. * If the signature is signed by multiple keys, only one matching key is returned. * Either `binarySignature` or `armoredSignature` must be provided. * @param keys - keys to search * @return signing key, if found among `keys` */ export async function getMatchingSigningKey(options: { armoredSignature: string; keys: KeyReference[]; }): Promise<KeyReference | undefined>; export async function getMatchingSigningKey(options: { binarySignature: Uint8Array; keys: KeyReference[]; }): Promise<KeyReference | undefined>; export async function getMatchingSigningKey(options: { binarySignature?: Uint8Array; armoredSignature?: string; keys: KeyReference[]; }): Promise<KeyReference | undefined> { const { binarySignature, armoredSignature, keys } = options; const { signingKeyIDs } = binarySignature ? await CryptoProxy.getSignatureInfo({ binarySignature }) : await CryptoProxy.getSignatureInfo({ armoredSignature: armoredSignature! }); for (const signingKeyID of signingKeyIDs) { // If the signing key is a subkey, we still return the full key entity const signingKey = keys.find((key) => { const keyIDs = key.getKeyIDs(); return keyIDs.indexOf(signingKeyID) >= 0; }); if (signingKey) { return signingKey; } } } /** * Create public key reference given a private key one. * The returned key reference is independent of the input one (i.e. clearing either key reference does not affect the other). * NOTE: this function is is considerably more expensive than the former `key.toPublic()`. It is only intended for long-term storage of the public key, as a new key entry will be added to the internal key store. * When using `CryptoProxy`, it is safe to pass a `PrivateKeyReference` where a `PublicKeyReference` is expected. */ export async function toPublicKeyReference(privateKey: PrivateKeyReference) { return CryptoProxy.importPublicKey({ binaryKey: await CryptoProxy.exportPublicKey({ key: privateKey, format: 'binary' }), }); }
7,244
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/proxy/index.ts
export * from './proxy'; export * from './helpers';
7,245
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/proxy/proxy.ts
import { DEFAULT_OFFSET, VERIFICATION_STATUS } from 'pmcrypto/lib/constants'; import { captureMessage } from '@proton/shared/lib/helpers/sentry'; import { serverTime } from '../serverTime'; import type { ApiInterface } from '../worker/api'; import { WorkerVerifyOptions } from '../worker/api.models'; export { VERIFICATION_STATUS }; export type CryptoApiInterface = ApiInterface; const assertNotNull = (value: CryptoApiInterface | null): CryptoApiInterface => { if (value === null) { throw new Error('CryptoProxy: endpoint not initialized'); } return value; }; let endpoint: CryptoApiInterface | null = null; let onEndpointRelease: (endpoint?: any) => Promise<void> = async () => {}; interface CryptoProxyInterface extends CryptoApiInterface { /** * Set proxy endpoint. * The provided instance must be already initialised and ready to resolve requests. * @param endpoint * @param onRelease - callback called after `this.releaseEndpoint()` is invoked and endpoint is released */ setEndpoint<T extends CryptoApiInterface>(endpoint: T, onRelease?: (endpoint: T) => Promise<void>): void; /** * Release endpoint. Afterwards, a different one may be set via `setEndpoint()`. * If a `onRelease` callback was passed to `setEndpoint()`, the callback is called before returning. * Note that this function does not have any other side effects, e.g. it does not clear the key store automatically. * Any endpoint-specific clean up logic should be done inside the `onRelease` callback. */ releaseEndpoint(): Promise<void>; } /** * Prior to OpenPGP.js v5.4.0, trailing spaces were not properly stripped with \r\n line endings (see https://github.com/openpgpjs/openpgpjs/pull/1548). * In order to verify the signatures generated over the incorrectly normalised data, we fallback to not normalising the input. * Currently, this is done inside the CryptoProxy, to transparently track the number of signatures that are affected throughout the apps. * @param options - verification options, with `date` already set to server time */ async function verifyMessageWithFallback< DataType extends string | Uint8Array, FormatType extends WorkerVerifyOptions<DataType>['format'] = 'utf8' >(options: WorkerVerifyOptions<DataType> & { format?: FormatType }) { const verificationResult = await assertNotNull(endpoint).verifyMessage<DataType, FormatType>(options); const { textData, stripTrailingSpaces } = options; if ( verificationResult.verified === VERIFICATION_STATUS.SIGNED_AND_INVALID && stripTrailingSpaces && textData && verificationResult.data !== textData // detect whether some normalisation was applied ) { const fallbackverificationResult = await assertNotNull(endpoint).verifyMessage<string, FormatType>({ ...options, binaryData: undefined, stripTrailingSpaces: false, }); if (fallbackverificationResult.verified === VERIFICATION_STATUS.SIGNED_AND_VALID) { captureMessage('Fallback verification needed', { level: 'info', }); return fallbackverificationResult; } // detect whether the message has trailing spaces followed by a mix of \r\n and \n line endings const legacyRemoveTrailingSpaces = (text: string) => { return text .split('\n') .map((line) => { let i = line.length - 1; for (; i >= 0 && (line[i] === ' ' || line[i] === '\t'); i--) {} return line.substr(0, i + 1); }) .join('\n'); }; if (textData !== legacyRemoveTrailingSpaces(textData)) { captureMessage('Fallback verification insufficient', { level: 'info', }); } } return verificationResult; } /** * CryptoProxy relays crypto requests to the specified endpoint, which is typically a worker(s), except if * CryptoProxy is already called (also indirectly) from inside a worker. * In such a case, the endpoint can be set to a `new WorkerApi()` instance, or to tbe worker instance itself, * provided it implements/extends WorkerApi. */ export const CryptoProxy: CryptoProxyInterface = { setEndpoint(endpointInstance, onRelease = onEndpointRelease) { if (endpoint) { throw new Error('already initialised'); } endpoint = endpointInstance; onEndpointRelease = onRelease; }, releaseEndpoint() { const tmp = endpoint; endpoint = null; return onEndpointRelease(assertNotNull(tmp)); }, encryptMessage: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).encryptMessage({ ...opts, date }), decryptMessage: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).decryptMessage({ ...opts, date }), decryptMessageLegacy: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).decryptMessageLegacy({ ...opts, date }), signMessage: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).signMessage({ ...opts, date }), verifyMessage: async ({ date = serverTime(), ...opts }) => verifyMessageWithFallback({ ...opts, date }), verifyCleartextMessage: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).verifyCleartextMessage({ ...opts, date }), processMIME: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).processMIME({ ...opts, date }), generateSessionKey: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).generateSessionKey({ ...opts, date }), generateSessionKeyForAlgorithm: async (opts) => assertNotNull(endpoint).generateSessionKeyForAlgorithm(opts), encryptSessionKey: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).encryptSessionKey({ ...opts, date }), decryptSessionKey: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).decryptSessionKey({ ...opts, date }), importPrivateKey: async (opts) => assertNotNull(endpoint).importPrivateKey(opts), importPublicKey: async (opts) => assertNotNull(endpoint).importPublicKey(opts), generateKey: async ({ date = new Date(+serverTime() + DEFAULT_OFFSET), ...opts }) => assertNotNull(endpoint).generateKey({ ...opts, date }), reformatKey: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).reformatKey({ ...opts, date }), exportPublicKey: async (opts) => assertNotNull(endpoint).exportPublicKey(opts), exportPrivateKey: async (opts) => assertNotNull(endpoint).exportPrivateKey(opts), clearKeyStore: () => assertNotNull(endpoint).clearKeyStore(), clearKey: async (opts) => assertNotNull(endpoint).clearKey(opts), replaceUserIDs: async (opts) => assertNotNull(endpoint).replaceUserIDs(opts), cloneKeyAndChangeUserIDs: async (opts) => assertNotNull(endpoint).cloneKeyAndChangeUserIDs(opts), generateE2EEForwardingMaterial: async (opts) => assertNotNull(endpoint).generateE2EEForwardingMaterial(opts), doesKeySupportE2EEForwarding: async (opts) => assertNotNull(endpoint).doesKeySupportE2EEForwarding(opts), isE2EEForwardingKey: async (opts) => assertNotNull(endpoint).isE2EEForwardingKey(opts), isRevokedKey: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).isRevokedKey({ ...opts, date }), isExpiredKey: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).isExpiredKey({ ...opts, date }), canKeyEncrypt: async ({ date = serverTime(), ...opts }) => assertNotNull(endpoint).canKeyEncrypt({ ...opts, date }), getSHA256Fingerprints: async (opts) => assertNotNull(endpoint).getSHA256Fingerprints(opts), computeHash: async (opts) => assertNotNull(endpoint).computeHash(opts), computeHashStream: async (opts) => assertNotNull(endpoint).computeHashStream(opts), getArmoredMessage: async (opts) => assertNotNull(endpoint).getArmoredMessage(opts), getArmoredKeys: async (opts) => assertNotNull(endpoint).getArmoredKeys(opts), getArmoredSignature: async (opts) => assertNotNull(endpoint).getArmoredSignature(opts), getSignatureInfo: async (opts) => assertNotNull(endpoint).getSignatureInfo(opts), getMessageInfo: async (opts) => assertNotNull(endpoint).getMessageInfo(opts), getKeyInfo: async (opts) => assertNotNull(endpoint).getKeyInfo(opts), };
7,246
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/worker/api.models.ts
import type { AlgorithmInfo, Data, DecryptLegacyOptions, DecryptOptionsPmcrypto, DecryptResultPmcrypto, DecryptSessionKeyOptionsPmcrypto, EncryptOptionsPmcrypto, EncryptSessionKeyOptionsPmcrypto, GenerateKeyOptions, GenerateSessionKeyOptionsPmcrypto, MIMEAttachment, ProcessMIMEOptions, ProcessMIMEResult, ReformatKeyOptions, SessionKey, SignOptionsPmcrypto, VerifyCleartextOptionsPmcrypto, VerifyMessageResult, VerifyOptionsPmcrypto, } from 'pmcrypto'; export type { enums } from 'pmcrypto/lib/openpgp'; export type MaybeArray<T> = T[] | T; export type { SessionKey, AlgorithmInfo, MIMEAttachment }; // TODO TS: do not allow mutually exclusive properties export interface WorkerDecryptionOptions extends Omit< DecryptOptionsPmcrypto<Data>, 'message' | 'signature' | 'encryptedSignature' | 'verificationKeys' | 'decryptionKeys' > { armoredSignature?: string; binarySignature?: Uint8Array; armoredMessage?: string; binaryMessage?: Uint8Array; armoredEncryptedSignature?: string; binaryEncryptedSignature?: Uint8Array; verificationKeys?: MaybeArray<PublicKeyReference>; decryptionKeys?: MaybeArray<PrivateKeyReference>; } export interface WorkerDecryptionResult<T extends Data> extends Omit<DecryptResultPmcrypto<T>, 'signatures'> { signatures: Uint8Array[]; } export interface WorkerDecryptLegacyOptions extends Omit< DecryptLegacyOptions, 'message' | 'signature' | 'encryptedSignature' | 'verificationKeys' | 'decryptionKeys' > { armoredMessage: string; armoredSignature?: string; binarySignature?: Uint8Array; verificationKeys?: MaybeArray<PublicKeyReference>; decryptionKeys?: MaybeArray<PrivateKeyReference>; } // TODO to make Option interfaces easy to use for the user, might be best to set default param types (e.g. T extends Data = Data). export interface WorkerVerifyOptions<T extends Data> extends Omit<VerifyOptionsPmcrypto<T>, 'signature' | 'verificationKeys'> { armoredSignature?: string; binarySignature?: Uint8Array; verificationKeys: MaybeArray<PublicKeyReference>; } export interface WorkerVerifyCleartextOptions extends Omit<VerifyCleartextOptionsPmcrypto, 'cleartextMessage' | 'verificationKeys'> { armoredCleartextMessage: string; verificationKeys: MaybeArray<PublicKeyReference>; } export interface WorkerVerificationResult<T extends Data = Data> extends Omit<VerifyMessageResult<T>, 'signatures'> { signatures: Uint8Array[]; } export interface WorkerSignOptions<T extends Data> extends Omit<SignOptionsPmcrypto<T>, 'signingKeys'> { format?: 'armored' | 'binary'; signingKeys?: MaybeArray<PrivateKeyReference>; } export interface WorkerEncryptOptions<T extends Data> extends Omit<EncryptOptionsPmcrypto<T>, 'signature' | 'signingKeys' | 'encryptionKeys'> { format?: 'armored' | 'binary'; armoredSignature?: string; binarySignature?: Uint8Array; encryptionKeys?: MaybeArray<PublicKeyReference>; signingKeys?: MaybeArray<PrivateKeyReference>; compress?: boolean; } export interface WorkerProcessMIMEOptions extends Omit<ProcessMIMEOptions, 'verificationKeys'> { verificationKeys?: MaybeArray<PublicKeyReference>; } export interface WorkerProcessMIMEResult extends Omit<ProcessMIMEResult, 'signatures'> { signatures: Uint8Array[]; } export type WorkerExportedKey<F extends 'armored' | 'binary' | undefined = 'armored'> = F extends 'armored' ? string : Uint8Array; export interface WorkerImportDecryptedPrivateKeyOptions<T extends Data> { armoredKey?: T extends string ? T : never; binaryKey?: T extends Uint8Array ? T : never; } export interface WorkerImportEncryptedPrivateKeyOptions<T extends Data> { armoredKey?: T extends string ? T : never; binaryKey?: T extends Uint8Array ? T : never; passphrase: string; } export interface WorkerImportPrivateKeyOptions<T extends Data> { armoredKey?: T extends string ? T : never; binaryKey?: T extends Uint8Array ? T : never; /** * null if the key is expected to be already decrypted, e.g. when user uploads a new private key that is unencrypted */ passphrase: string | null; } export type WorkerImportPublicKeyOptions<T extends Data> = { armoredKey?: T extends string ? T : never; binaryKey?: T extends Uint8Array ? T : never; }; export interface WorkerGenerateKeyOptions extends Omit<GenerateKeyOptions, 'format' | 'passphrase'> {} export interface WorkerReformatKeyOptions extends Omit<ReformatKeyOptions, 'privateKey' | 'format' | 'passphrase'> { privateKey: PrivateKeyReference; } export interface WorkerEncryptSessionKeyOptions extends Omit<EncryptSessionKeyOptionsPmcrypto, 'encryptionKeys'> { format?: 'armored' | 'binary'; encryptionKeys?: MaybeArray<PublicKeyReference>; } export interface WorkerGenerateSessionKeyOptions extends Omit<GenerateSessionKeyOptionsPmcrypto, 'recipientKeys'> { recipientKeys?: MaybeArray<PublicKeyReference>; } export interface WorkerDecryptSessionKeyOptions extends Omit<DecryptSessionKeyOptionsPmcrypto, 'message' | 'decryptionKeys'> { armoredMessage?: string; binaryMessage?: Uint8Array; decryptionKeys?: MaybeArray<PrivateKeyReference>; } export interface WorkerGetMessageInfoOptions<T extends Data> { armoredMessage?: T extends string ? T : never; binaryMessage?: T extends Uint8Array ? T : never; } export interface MessageInfo { signingKeyIDs: KeyID[]; encryptionKeyIDs: KeyID[]; } export interface WorkerGetSignatureInfoOptions<T extends Data> { armoredSignature?: T extends string ? T : never; binarySignature?: T extends Uint8Array ? T : never; } export interface WorkerGetKeyInfoOptions<T extends Data> { armoredKey?: T extends string ? T : never; binaryKey?: T extends Uint8Array ? T : never; } export interface SignatureInfo { signingKeyIDs: KeyID[]; } export interface KeyInfo { keyIsPrivate: boolean; /** * Whether the key is decrypted, or `null` for public keys */ keyIsDecrypted: boolean | null; fingerprint: string; /** * Key IDs of primary key and subkeys in hex format */ keyIDs: KeyID[]; } export type KeyID = string; export interface KeyReference { /** Internal unique key identifier for the key store */ readonly _idx: any; /** * (Internal) key content identifier to determine equality. * First entry includes the full key. * Second entry does not include 3rd party certifications (e.g. from Proton CA). **/ readonly _keyContentHash: [string, string]; getFingerprint(): string; /** * Key ID of primary key in hex format. */ getKeyID(): KeyID; /** * Key IDs of primary key and subkeys in hex format */ getKeyIDs(): KeyID[]; getAlgorithmInfo(): AlgorithmInfo; getCreationTime(): Date; isPrivate: () => this is PrivateKeyReference; getExpirationTime(): Date | number | null; getUserIDs(): string[]; /** * Whether the primary key or the subkeys fail to meet our recommended security level. * A key is considered secure (i.e. not weak) if it is: * - RSA of size >= 2047 bits * - ECC using curve 25519 or any of the NIST curves */ isWeak(): boolean; /** * Compare public key content. Keys are considered equal if they have same key and subkey material, * as well as same certification signatures, namely same expiration time, capabilities, algorithm preferences etc. * @param [ignoreOtherCerts] - whether third-party certifications (e.g. from Proton CA) should be ignored. */ equals(otherKey: KeyReference, ignoreOtherCerts?: boolean): boolean; subkeys: { getAlgorithmInfo(): AlgorithmInfo; getKeyID(): KeyID; }[]; } export interface PublicKeyReference extends KeyReference {} export interface PrivateKeyReference extends KeyReference { isPrivate: () => true; } export interface ComputeHashStreamOptions { algorithm: 'unsafeSHA1'; dataStream: ReadableStream<Uint8Array>; }
7,247
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/worker/api.ts
/* eslint-disable class-methods-use-this */ /* eslint-disable max-classes-per-file */ /* eslint-disable no-underscore-dangle */ import { SHA256, SHA512, armorBytes, canKeyEncrypt, checkKeyStrength, decryptKey, decryptMessage, decryptMessageLegacy, decryptSessionKey, doesKeySupportForwarding, encryptKey, encryptMessage, encryptSessionKey, generateForwardingMaterial, generateKey, generateSessionKey, generateSessionKeyForAlgorithm, getSHA256Fingerprints, init as initPmcrypto, isExpiredKey, isForwardingKey, isRevokedKey, processMIME, readCleartextMessage, readKey, readKeys, readMessage, readPrivateKey, readSignature, reformatKey, signMessage, unsafeMD5, unsafeSHA1, verifyCleartextMessage, verifyMessage, } from 'pmcrypto'; import type { Data, Key, PrivateKey, PublicKey } from 'pmcrypto'; import { UserID, enums } from 'pmcrypto/lib/openpgp'; import { arrayToHexString } from '../utils'; import { ComputeHashStreamOptions, KeyInfo, KeyReference, MaybeArray, MessageInfo, PrivateKeyReference, PublicKeyReference, SignatureInfo, WorkerDecryptLegacyOptions, WorkerDecryptionOptions, WorkerEncryptOptions, WorkerEncryptSessionKeyOptions, WorkerGenerateKeyOptions, WorkerGenerateSessionKeyOptions, WorkerGetKeyInfoOptions, WorkerGetMessageInfoOptions, WorkerGetSignatureInfoOptions, WorkerImportPrivateKeyOptions, WorkerImportPublicKeyOptions, WorkerProcessMIMEOptions, WorkerReformatKeyOptions, WorkerSignOptions, WorkerVerifyCleartextOptions, WorkerVerifyOptions, } from './api.models'; // Note: // - streams are currently not supported since they are not Transferable (not in all browsers). // - when returning binary data, the values are always transferred. type SerializedSignatureOptions = { armoredSignature?: string; binarySignature?: Uint8Array }; const getSignature = async ({ armoredSignature, binarySignature }: SerializedSignatureOptions) => { if (armoredSignature) { return readSignature({ armoredSignature }); } else if (binarySignature) { return readSignature({ binarySignature }); } throw new Error('Must provide `armoredSignature` or `binarySignature`'); }; type SerializedMessageOptions = { armoredMessage?: string; binaryMessage?: Uint8Array }; const getMessage = async ({ armoredMessage, binaryMessage }: SerializedMessageOptions) => { if (armoredMessage) { return readMessage({ armoredMessage }); } else if (binaryMessage) { return readMessage({ binaryMessage }); } throw new Error('Must provide `armoredMessage` or `binaryMessage`'); }; type SerializedKeyOptions = { armoredKey?: string; binaryKey?: Uint8Array }; const getKey = async ({ armoredKey, binaryKey }: SerializedKeyOptions) => { if (armoredKey) { return readKey({ armoredKey }); } else if (binaryKey) { return readKey({ binaryKey }); } throw new Error('Must provide `armoredKey` or `binaryKey`'); }; const toArray = <T>(maybeArray: MaybeArray<T>) => (Array.isArray(maybeArray) ? maybeArray : [maybeArray]); const getPublicKeyReference = async (key: PublicKey, keyStoreID: number): Promise<PublicKeyReference> => { const publicKey = key.isPrivate() ? key.toPublic() : key; // We don't throw on private key since we allow importing an (encrypted) private key using 'importPublicKey' const fingerprint = publicKey.getFingerprint(); const hexKeyID = publicKey.getKeyID().toHex(); const hexKeyIDs = publicKey.getKeyIDs().map((id) => id.toHex()); const algorithmInfo = publicKey.getAlgorithmInfo(); const creationTime = publicKey.getCreationTime(); const expirationTime = await publicKey.getExpirationTime(); const userIDs = publicKey.getUserIDs(); const keyContentHash = await SHA256(publicKey.write()).then(arrayToHexString); // Allow comparing keys without third-party certification let keyContentHashNoCerts: string; // Check if third-party certs are present if (publicKey.users.some((user) => user.otherCertifications.length > 0)) { // @ts-ignore missing `clone()` definition const publicKeyClone: PublicKey = publicKey.clone(); publicKeyClone.users.forEach((user) => { user.otherCertifications = []; }); keyContentHashNoCerts = await SHA256(publicKeyClone.write()).then(arrayToHexString); } else { keyContentHashNoCerts = keyContentHash; } let isWeak: boolean; try { checkKeyStrength(publicKey); isWeak = false; } catch { isWeak = true; } return { _idx: keyStoreID, _keyContentHash: [keyContentHash, keyContentHashNoCerts], isPrivate: () => false, getFingerprint: () => fingerprint, getKeyID: () => hexKeyID, getKeyIDs: () => hexKeyIDs, getAlgorithmInfo: () => algorithmInfo, getCreationTime: () => creationTime, getExpirationTime: () => expirationTime, getUserIDs: () => userIDs, isWeak: () => isWeak, equals: (otherKey: KeyReference, ignoreOtherCerts = false) => ignoreOtherCerts ? otherKey._keyContentHash[1] === keyContentHashNoCerts : otherKey._keyContentHash[0] === keyContentHash, subkeys: publicKey.getSubkeys().map((subkey) => { const subkeyAlgoInfo = subkey.getAlgorithmInfo(); const subkeyKeyID = subkey.getKeyID().toHex(); return { getAlgorithmInfo: () => subkeyAlgoInfo, getKeyID: () => subkeyKeyID, }; }), }; }; const getPrivateKeyReference = async (privateKey: PrivateKey, keyStoreID: number): Promise<PrivateKeyReference> => { const publicKeyReference = await getPublicKeyReference(privateKey.toPublic(), keyStoreID); return { ...publicKeyReference, isPrivate: () => true, }; }; class KeyStore { private store = new Map<number, Key>(); /** * Monotonic counter keeping track of the next unique identifier to index a newly added key. * The starting counter value is picked at random to minimize the changes of collisions between keys during different user sessions. * NB: key references may be stored by webapps even after the worker has been destroyed (e.g. after closing the browser window), * hence we want to keep using different identifiers even after restarting the worker, to also invalidate those stale key references. */ private nextIdx = crypto.getRandomValues(new Uint32Array(1))[0]; /** * Add a key to the key store. * @param key - key to add * @param customIdx - custom identifier to use to store the key, instead of the internally generated one. * This argument is primarily intended for when key store identifiers need to be synchronised across different workers. * This value must be unique for each key, even across different sessions. * @returns key identifier to retrieve the key from the store */ add(key: Key, customIdx?: number) { const idx = customIdx !== undefined ? customIdx : this.nextIdx; if (this.store.has(idx)) { throw new Error(`Idx ${idx} already in use`); } this.store.set(idx, key); this.nextIdx++; // increment regardless of customIdx, for code simplicity return idx; } get(idx: number) { const key = this.store.get(idx); if (!key) { throw new Error('Key not found'); } return key; } clearAll() { this.store.forEach((key) => { if (key.isPrivate()) { // @ts-ignore missing definition for clearPrivateParams() key.clearPrivateParams(); } }); this.store.clear(); // no need to reset index } clear(idx: number) { const keyToClear = this.get(idx); if (keyToClear.isPrivate()) { // @ts-ignore missing definition for clearPrivateParams() keyToClear.clearPrivateParams(); } this.store.delete(idx); } } type SerialisedOutputFormat = 'armored' | 'binary' | undefined; type SerialisedOutputTypeFromFormat<F extends SerialisedOutputFormat> = F extends 'armored' ? string : F extends 'binary' ? Uint8Array : never; class KeyManagementApi { protected keyStore = new KeyStore(); /** * Invalidate all key references by removing all keys from the internal key store. * The private key material corresponding to any PrivateKeyReference is erased from memory. */ async clearKeyStore() { this.keyStore.clearAll(); } /** * Invalidate the key reference by removing the key from the internal key store. * If a PrivateKeyReference is given, the private key material is erased from memory. */ async clearKey({ key: keyReference }: { key: KeyReference }) { this.keyStore.clear(keyReference._idx); } /** * Generate a key for the given UserID. * The key is stored in the key store, and can be exported using `exportPrivateKey` or `exportPublicKey`. * @param options.userIDs - user IDs as objects: `{ name: 'Jo Doe', email: 'info@jo.com' }` * @param options.type - key algorithm type: ECC (default) or RSA * @param options.rsaBits - number of bits for RSA keys * @param options.curve - elliptic curve for ECC keys * @param options.keyExpirationTime- number of seconds from the key creation time after which the key expires * @param options.subkeys - options for each subkey e.g. `[{ sign: true, passphrase: '123'}]` * @param options.date - use the given date as creation date of the key and the key signatures, instead of the server time * @returns reference to the generated private key */ async generateKey(options: WorkerGenerateKeyOptions) { const { privateKey } = await generateKey({ ...options, format: 'object' }); // Typescript guards against a passphrase input, but it's best to ensure the option wasn't given since for API simplicity we assume any PrivateKeyReference points to a decrypted key. if (!privateKey.isDecrypted()) { throw new Error( 'Unexpected "passphrase" option on key generation. Use "exportPrivateKey" after key generation to obtain a transferable encrypted key.' ); } const keyStoreID = this.keyStore.add(privateKey); return getPrivateKeyReference(privateKey, keyStoreID); } async reformatKey({ privateKey: keyReference, ...options }: WorkerReformatKeyOptions) { const originalKey = this.keyStore.get(keyReference._idx) as PrivateKey; // we have to deep clone before reformatting, since privateParams of reformatted key point to the ones of the given privateKey, and // we do not want reformatted key to be affected if the original key reference is cleared/deleted. // @ts-ignore - missing .clone() definition const keyToReformat = originalKey.clone(true); const { privateKey } = await reformatKey({ ...options, privateKey: keyToReformat, format: 'object' }); // Typescript guards against a passphrase input, but it's best to ensure the option wasn't given since for API simplicity we assume any PrivateKeyReference points to a decrypted key. if (!privateKey.isDecrypted()) { throw new Error( 'Unexpected "passphrase" option on key reformat. Use "exportPrivateKey" after key reformatting to obtain a transferable encrypted key.' ); } const keyStoreID = this.keyStore.add(privateKey); return getPrivateKeyReference(privateKey, keyStoreID); } /** * Import a private key, which is either already decrypted, or that can be decrypted with the given passphrase. * If a passphrase is given, but the key is already decrypted, importing fails. * Either `armoredKey` or `binaryKey` must be provided. * Note: if the passphrase to decrypt the key is unknown, the key shuld be imported using `importPublicKey` instead. * @param options.passphrase - key passphrase if the input key is encrypted, or `null` if the input key is expected to be already decrypted * @returns reference to imported private key * @throws {Error} if the key cannot be decrypted or importing fails */ async importPrivateKey<T extends Data>( { armoredKey, binaryKey, passphrase }: WorkerImportPrivateKeyOptions<T>, _customIdx?: number ) { if (!armoredKey && !binaryKey) { throw new Error('Must provide `armoredKey` or `binaryKey`'); } const expectDecrypted = passphrase === null; const maybeEncryptedKey = binaryKey ? await readPrivateKey({ binaryKey }) : await readPrivateKey({ armoredKey: armoredKey! }); let decryptedKey; if (expectDecrypted) { if (!maybeEncryptedKey.isDecrypted()) { throw new Error('Provide passphrase to import an encrypted private key'); } decryptedKey = maybeEncryptedKey; // @ts-ignore missing .validate() types await decryptedKey.validate(); } else { const usesArgon2 = maybeEncryptedKey.getKeys().some( // @ts-ignore s2k field not declared (keyOrSubkey) => keyOrSubkey.keyPacket.s2k && keyOrSubkey.keyPacket.s2k.type === 'argon2' ); if (usesArgon2) { // TODO: Argon2 uses Wasm which requires special bundling throw new Error('Keys encrypted using Argon2 are not supported yet'); } decryptedKey = await decryptKey({ privateKey: maybeEncryptedKey, passphrase }); } const keyStoreID = this.keyStore.add(decryptedKey, _customIdx); return getPrivateKeyReference(decryptedKey, keyStoreID); } /** * Import a public key. * Either `armoredKey` or `binaryKey` must be provided. * Note: if a private key is given, it will be converted to a public key before import. * @returns reference to imported public key */ async importPublicKey<T extends Data>( { armoredKey, binaryKey }: WorkerImportPublicKeyOptions<T>, _customIdx?: number ) { const publicKey = await getKey({ binaryKey, armoredKey }); const keyStoreID = this.keyStore.add(publicKey, _customIdx); return getPublicKeyReference(publicKey, keyStoreID); } /** * Get the serialized public key. * Exporting a key does not invalidate the corresponding `KeyReference`, nor does it remove the key from internal storage (use `clearKey()` for that). * @param options.format - `'binary'` or `'armored'` format of serialized key * @returns serialized public key */ async exportPublicKey<F extends SerialisedOutputFormat = 'armored'>({ format = 'armored', key: keyReference, }: { key: KeyReference; format?: F; }): Promise<SerialisedOutputTypeFromFormat<F>> { const maybePrivateKey = this.keyStore.get(keyReference._idx); const publicKey = maybePrivateKey.isPrivate() ? maybePrivateKey.toPublic() : maybePrivateKey; const serializedKey = format === 'binary' ? publicKey.write() : publicKey.armor(); return serializedKey as SerialisedOutputTypeFromFormat<F>; } /** * Get the serialized private key, encrypted with the given `passphrase`. * Exporting a key does not invalidate the corresponding `keyReference`, nor does it remove the key from internal storage (use `clearKey()` for that). * @param options.passphrase - passphrase to encrypt the key with (non-empty string), or `null` to export an unencrypted key (not recommended). * @param options.format - `'binary'` or `'armored'` format of serialized key * @returns serialized encrypted key */ async exportPrivateKey<F extends SerialisedOutputFormat = 'armored'>({ format = 'armored', ...options }: { privateKey: PrivateKeyReference; passphrase: string | null; format?: F; }): Promise<SerialisedOutputTypeFromFormat<F>> { const { privateKey: keyReference, passphrase } = options; if (!keyReference.isPrivate()) { throw new Error('Private key expected'); } const privateKey = this.keyStore.get(keyReference._idx) as PrivateKey; const doNotEncrypt = passphrase === null; const maybeEncryptedKey = doNotEncrypt ? privateKey : await encryptKey({ privateKey, passphrase }); const serializedKey = format === 'binary' ? maybeEncryptedKey.write() : maybeEncryptedKey.armor(); return serializedKey as SerialisedOutputTypeFromFormat<F>; } } /** * Each instance keeps a dedicated key storage. */ export class Api extends KeyManagementApi { /** * Init pmcrypto and set the underlying global OpenPGP config. */ static init() { initPmcrypto(); } /** * Encrypt the given data using `encryptionKeys`, `sessionKeys` and `passwords`, after optionally * signing it with `signingKeys`. * Either `textData` or `binaryData` must be given. * A detached signature over the data may be provided by passing either `armoredSignature` or `binarySignature`. * @param options.textData - text data to encrypt * @param options.binaryData - binary data to encrypt * @param options.stripTrailingSpaces - whether trailing spaces should be removed from each line of `textData` * @param options.context - (signed data only) settings to prevent verifying the signature in a different context (signature domain separation) * @param options.format - `'binary` or `'armored'` format of serialized signed message * @param options.date - use the given date for the message signature, instead of the server time */ async encryptMessage< DataType extends Data, FormatType extends WorkerEncryptOptions<DataType>['format'] = 'armored', DetachedType extends boolean = false >({ encryptionKeys: encryptionKeyRefs = [], signingKeys: signingKeyRefs = [], armoredSignature, binarySignature, compress = false, config = {}, ...options }: WorkerEncryptOptions<DataType> & { format?: FormatType; detached?: DetachedType }) { const signingKeys = toArray(signingKeyRefs).map( (keyReference) => this.keyStore.get(keyReference._idx) as PrivateKey ); const encryptionKeys = toArray(encryptionKeyRefs).map( (keyReference) => this.keyStore.get(keyReference._idx) as PublicKey ); const inputSignature = binarySignature || armoredSignature ? await getSignature({ armoredSignature, binarySignature }) : undefined; if (config.preferredCompressionAlgorithm) { throw new Error( 'Passing `config.preferredCompressionAlgorithm` is not supported. Use `compress` option instead.' ); } const encryptionResult = await encryptMessage<DataType, FormatType, DetachedType>({ ...options, encryptionKeys, signingKeys, signature: inputSignature, config: { ...config, preferredCompressionAlgorithm: compress ? enums.compression.zlib : enums.compression.uncompressed, }, }); return encryptionResult; } /** * Create a signature over the given data using `signingKeys`. * Either `textData` or `binaryData` must be given. * @param options.textData - text data to sign * @param options.binaryData - binary data to sign * @param options.stripTrailingSpaces - whether trailing spaces should be removed from each line of `textData` * @param options.context - settings to prevent verifying the signature in a different context (signature domain separation) * @param options.detached - whether to return a detached signature, without the signed data * @param options.format - `'binary` or `'armored'` format of serialized signed message * @param options.date - use the given date for signing, instead of the server time * @returns serialized signed message or signature */ async signMessage< DataType extends Data, FormatType extends WorkerSignOptions<DataType>['format'] = 'armored' // inferring D (detached signature type) is unnecessary since the result type does not depend on it for format !== 'object' >({ signingKeys: signingKeyRefs = [], ...options }: WorkerSignOptions<DataType> & { format?: FormatType }) { const signingKeys = toArray(signingKeyRefs).map( (keyReference) => this.keyStore.get(keyReference._idx) as PrivateKey ); const signResult = await signMessage<DataType, FormatType, boolean>({ ...options, signingKeys, }); return signResult; } /** * Verify a signature over the given data. * Either `armoredSignature` or `binarySignature` must be given for the signature, and either `textData` or `binaryData` must be given as data to be verified. * To verify a Cleartext message, which includes both the signed data and the corresponding signature, see `verifyCleartextMessage`. * @param options.textData - expected signed text data * @param options.binaryData - expected signed binary data * @param options.armoredSignature - armored signature to verify * @param options.binarySignature - binary signature to verify * @param options.stripTrailingSpaces - whether trailing spaces should be removed from each line of `textData`. * This option must match the one used when signing. * @param options.context - settings to prevent verifying a signature from a different context (signature domain separation). * This option should match the one used when signing. * @returns signature verification result over the given data */ async verifyMessage<DataType extends Data, FormatType extends WorkerVerifyOptions<DataType>['format'] = 'utf8'>({ armoredSignature, binarySignature, verificationKeys: verificationKeyRefs = [], ...options }: WorkerVerifyOptions<DataType> & { format?: FormatType }) { const verificationKeys = toArray(verificationKeyRefs).map((keyReference) => this.keyStore.get(keyReference._idx) ); const signature = await getSignature({ armoredSignature, binarySignature }); const { signatures: signatureObjects, // extracting this is needed for proper type inference of `serialisedResult.signatures` ...verificationResultWithoutSignatures } = await verifyMessage<DataType, FormatType>({ signature, verificationKeys, ...options }); const serialisedResult = { ...verificationResultWithoutSignatures, signatures: signatureObjects.map((sig) => sig.write() as Uint8Array), // no support for streamed input for now }; return serialisedResult; } /** * Verify a Cleartext message, which includes the signed data and the corresponding signature. * A cleartext message is always in armored form. * To verify a detached signature over some data, see `verifyMessage` instead. * @params options.armoredCleartextSignature - armored cleartext message to verify */ async verifyCleartextMessage({ armoredCleartextMessage, verificationKeys: verificationKeyRefs = [], ...options }: WorkerVerifyCleartextOptions) { const verificationKeys = toArray(verificationKeyRefs).map((keyReference) => this.keyStore.get(keyReference._idx) ); const cleartextMessage = await readCleartextMessage({ cleartextMessage: armoredCleartextMessage }); const { signatures: signatureObjects, // extracting this is needed for proper type inference of `serialisedResult.signatures` ...verificationResultWithoutSignatures } = await verifyCleartextMessage({ cleartextMessage, verificationKeys, ...options }); const serialisedResult = { ...verificationResultWithoutSignatures, signatures: signatureObjects.map((sig) => sig.write() as Uint8Array), // no support for streamed input for now }; return serialisedResult; } /** * Decrypt a message using `decryptionKeys`, `sessionKey`, or `passwords`, and optionally verify the content using `verificationKeys`. * Eiher `armoredMessage` or `binaryMessage` must be given. * For detached signature verification over the decrypted data, one of `armoredSignature`, * `binarySignature`, `armoredEncryptedSignature` and `binaryEncryptedSignature` may be given. * @param options.armoredMessage - armored data to decrypt * @param options.binaryMessage - binary data to decrypt * @param options.expectSigned - if true, data decryption fails if the message is not signed with the provided `verificationKeys` * @param options.context - (signed data only) settings to prevent verifying a signature from a different context (signature domain separation). * This option should match the one used when encrypting. * @param options.format - whether to return data as a string or Uint8Array. If 'utf8' (the default), also normalize newlines. * @param options.date - use the given date for verification instead of the server time */ async decryptMessage<FormatType extends WorkerDecryptionOptions['format'] = 'utf8'>({ decryptionKeys: decryptionKeyRefs = [], verificationKeys: verificationKeyRefs = [], armoredMessage, binaryMessage, armoredSignature, binarySignature, armoredEncryptedSignature: armoredEncSignature, binaryEncryptedSignature: binaryEncSingature, ...options }: WorkerDecryptionOptions & { format?: FormatType }) { const decryptionKeys = toArray(decryptionKeyRefs).map( (keyReference) => this.keyStore.get(keyReference._idx) as PrivateKey ); const verificationKeys = toArray(verificationKeyRefs).map((keyReference) => this.keyStore.get(keyReference._idx) ); const message = await getMessage({ binaryMessage, armoredMessage }); const signature = binarySignature || armoredSignature ? await getSignature({ binarySignature, armoredSignature }) : undefined; const encryptedSignature = binaryEncSingature || armoredEncSignature ? await getMessage({ binaryMessage: binaryEncSingature, armoredMessage: armoredEncSignature }) : undefined; const { signatures: signatureObjects, ...decryptionResultWithoutSignatures } = await decryptMessage< Data, FormatType >({ ...options, message, signature, encryptedSignature, decryptionKeys, verificationKeys, }); const serialisedResult = { ...decryptionResultWithoutSignatures, signatures: signatureObjects.map((sig) => sig.write() as Uint8Array), // no support for streamed input for now }; return serialisedResult; // TODO: once we have support for the intendedRecipient verification, we should add the // a `verify(publicKeys)` function to the decryption result, that allows verifying // the decrypted signatures after decryption. // Note: asking the apps to call `verifyMessage` separately is not an option, since // the verification result is to be considered invalid outside of the encryption context if the intended recipient is present, see: https://datatracker.ietf.org/doc/html/draft-ietf-openpgp-crypto-refresh#section-5.2.3.32 } /** * Backwards-compatible decrypt message function, to be only used for email messages that might be of legacy format. * For all other cases, use `decryptMessage`. */ async decryptMessageLegacy<FormatType extends WorkerDecryptLegacyOptions['format'] = 'utf8'>({ decryptionKeys: decryptionKeyRefs = [], verificationKeys: verificationKeyRefs = [], armoredMessage, armoredSignature, binarySignature, ...options }: WorkerDecryptLegacyOptions & { format?: FormatType }) { const decryptionKeys = toArray(decryptionKeyRefs).map( (keyReference) => this.keyStore.get(keyReference._idx) as PrivateKey ); const verificationKeys = toArray(verificationKeyRefs).map((keyReference) => this.keyStore.get(keyReference._idx) ); const signature = binarySignature || armoredSignature ? await getSignature({ binarySignature, armoredSignature }) : undefined; const { signatures: signatureObjects, ...decryptionResultWithoutSignatures } = await decryptMessageLegacy<FormatType>({ ...options, armoredMessage, signature, decryptionKeys, verificationKeys, }); const serialisedResult = { ...decryptionResultWithoutSignatures, signatures: signatureObjects.map((sig) => sig.write() as Uint8Array), // no support for streamed input for now }; return serialisedResult; } /** * Generate forwardee key and proxy parameter needed to setup end-to-end encrypted forwarding for the given * privateKey. * @param options.forwarderPrivateKey - private key of original recipient, initiating the forwarding * @param options.userIDsForForwardeeKey - userIDs to attach to forwardee key * @param options.passphrase - passphrase to encrypt the generated forwardee key with */ async generateE2EEForwardingMaterial({ forwarderKey, userIDsForForwardeeKey, passphrase, }: { forwarderKey: PrivateKeyReference; userIDsForForwardeeKey: MaybeArray<UserID>; passphrase: string | null; }) { const originalKey = this.keyStore.get(forwarderKey._idx) as PrivateKey; const { proxyInstances, forwardeeKey } = await generateForwardingMaterial(originalKey, userIDsForForwardeeKey); const maybeEncryptedKey = passphrase ? await encryptKey({ privateKey: forwardeeKey, passphrase }) : forwardeeKey; return { forwardeeKey: maybeEncryptedKey.armor(), proxyInstances, }; } /** * Check whether a key can be used as input to `generateE2EEForwardingMaterial` to setup E2EE forwarding. */ async doesKeySupportE2EEForwarding({ forwarderKey: keyReference }: { forwarderKey: PrivateKeyReference }) { const key = this.keyStore.get(keyReference._idx); if (!key.isPrivate()) { return false; } const supportsForwarding = await doesKeySupportForwarding(key); return supportsForwarding; } /** * Whether a key is a E2EE forwarding recipient key, where all its encryption-capable (sub)keys are setup * for forwarding. * NB: this function also accepts `PublicKeyReference`s in order to determine the status of inactive (undecryptable) * private keys. Such keys can only be imported using `importPublicKey`, but it's important that the encrypted * private key is imported (not the corresponding public key). * @throws if a PublicKeyReference containing a public key is given */ async isE2EEForwardingKey({ key: keyReference }: { key: KeyReference }) { // We support PublicKeyReference to determine the status of inactive/undecryptable address keys. // A PublicKeyReference can contain an encrypted private key. const key = this.keyStore.get(keyReference._idx); if (!key.isPrivate()) { throw new Error('Unexpected public key'); } const forForwarding = await isForwardingKey(key); return forForwarding; } /** * Generating a session key for the specified symmetric algorithm. * To generate a session key based on some recipient's public key preferences, * use `generateSessionKey()` instead. */ async generateSessionKeyForAlgorithm(algoName: Parameters<typeof generateSessionKeyForAlgorithm>[0]) { const sessionKeyBytes = await generateSessionKeyForAlgorithm(algoName); return sessionKeyBytes; } /** * Generate a session key compatible with the given recipient keys. * To get a session key for a specific symmetric algorithm, use `generateSessionKeyForAlgorithm` instead. */ async generateSessionKey({ recipientKeys: recipientKeyRefs = [], ...options }: WorkerGenerateSessionKeyOptions) { const recipientKeys = toArray(recipientKeyRefs).map((keyReference) => this.keyStore.get(keyReference._idx)); const sessionKey = await generateSessionKey({ recipientKeys, ...options }); return sessionKey; } /** * Encrypt a session key with `encryptionKeys`, `passwords`, or both at once. * At least one of `encryptionKeys` or `passwords` must be specified. * @param options.data - the session key to be encrypted e.g. 16 random bytes (for aes128) * @param options.algorithm - algorithm of the session key * @param options.aeadAlgorithm - AEAD algorithm of the session key * @param options.format - `'armored'` or `'binary'` format of the returned encrypted message * @param options.wildcard - use a key ID of 0 instead of the encryption key IDs * @param options.date - use the given date for key validity checks, instead of the server time */ async encryptSessionKey<FormatType extends WorkerEncryptSessionKeyOptions['format'] = 'armored'>({ encryptionKeys: encryptionKeyRefs = [], ...options }: WorkerEncryptSessionKeyOptions & { format?: FormatType }): Promise<SerialisedOutputTypeFromFormat<FormatType>> { const encryptionKeys = toArray(encryptionKeyRefs).map( (keyReference) => this.keyStore.get(keyReference._idx) as PublicKey ); const encryptedData = await encryptSessionKey<FormatType>({ ...options, encryptionKeys, }); return encryptedData as SerialisedOutputTypeFromFormat<FormatType>; } /** * Decrypt the message's session keys using either `decryptionKeys` or `passwords`. * Either `armoredMessage` or `binaryMessage` must be given. * @param options.armoredMessage - an armored message containing encrypted session key packets * @param options.binaryMessage - a binary message containing encrypted session key packets * @param options.date - date to use for key validity checks instead of the server time * @throws if no session key could be found or decrypted */ async decryptSessionKey({ decryptionKeys: decryptionKeyRefs = [], armoredMessage, binaryMessage, ...options }: WorkerDecryptionOptions) { const decryptionKeys = toArray(decryptionKeyRefs).map( (keyReference) => this.keyStore.get(keyReference._idx) as PrivateKey ); const message = await getMessage({ binaryMessage, armoredMessage }); const sessionKey = await decryptSessionKey({ ...options, message, decryptionKeys, }); return sessionKey; } async processMIME({ verificationKeys: verificationKeyRefs = [], ...options }: WorkerProcessMIMEOptions) { const verificationKeys = toArray(verificationKeyRefs).map((keyReference) => this.keyStore.get(keyReference._idx) ); const { signatures: signatureObjects, ...resultWithoutSignature } = await processMIME({ ...options, verificationKeys, }); const serialisedResult = { ...resultWithoutSignature, signatures: signatureObjects.map((sig) => sig.write() as Uint8Array), }; return serialisedResult; } async getMessageInfo<DataType extends Data>({ armoredMessage, binaryMessage, }: WorkerGetMessageInfoOptions<DataType>): Promise<MessageInfo> { const message = await getMessage({ binaryMessage, armoredMessage }); const signingKeyIDs = message.getSigningKeyIDs().map((keyID) => keyID.toHex()); const encryptionKeyIDs = message.getEncryptionKeyIDs().map((keyID) => keyID.toHex()); return { signingKeyIDs, encryptionKeyIDs }; } async getSignatureInfo<DataType extends Data>({ armoredSignature, binarySignature, }: WorkerGetSignatureInfoOptions<DataType>): Promise<SignatureInfo> { const signature = await getSignature({ binarySignature, armoredSignature }); const signingKeyIDs = signature.getSigningKeyIDs().map((keyID) => keyID.toHex()); return { signingKeyIDs }; } /** * Get basic info about a serialied key without importing it in the key store. * E.g. determine whether the given key is private, and whether it is decrypted. */ async getKeyInfo<T extends Data>({ armoredKey, binaryKey }: WorkerGetKeyInfoOptions<T>): Promise<KeyInfo> { const key = await getKey({ binaryKey, armoredKey }); const keyIsPrivate = key.isPrivate(); const keyIsDecrypted = keyIsPrivate ? key.isDecrypted() : null; const fingerprint = key.getFingerprint(); const keyIDs = key.getKeyIDs().map((keyID) => keyID.toHex()); return { keyIsPrivate, keyIsDecrypted, fingerprint, keyIDs, }; } /** * Armor a message signature in binary form */ async getArmoredSignature({ binarySignature }: { binarySignature: Uint8Array }) { const signature = await getSignature({ binarySignature }); return signature.armor(); } /** * Armor a message given in binary form */ async getArmoredMessage({ binaryMessage }: { binaryMessage: Uint8Array }) { const armoredMessage = await armorBytes(binaryMessage); return armoredMessage; } /** * Given one or more keys concatenated in binary format, get the corresponding keys in armored format. * The keys are not imported into the key store nor processed further. Both private and public keys are supported. * @returns array of armored keys */ async getArmoredKeys({ binaryKeys }: { binaryKeys: Uint8Array }) { const keys = await readKeys({ binaryKeys }); return keys.map((key) => key.armor()); } /** * Returns whether the primary key is revoked. * @param options.date - date to use for signature verification, instead of the server time */ async isRevokedKey({ key: keyReference, date }: { key: KeyReference; date?: Date }) { const key = this.keyStore.get(keyReference._idx); const isRevoked = await isRevokedKey(key, date); return isRevoked; } /** * Returns whether the primary key is expired, or its creation time is in the future. * @param options.date - date to use for the expiration check, instead of the server time */ async isExpiredKey({ key: keyReference, date }: { key: KeyReference; date?: Date }) { const key = this.keyStore.get(keyReference._idx); const isExpired = await isExpiredKey(key, date); return isExpired; } /** * Check whether a key can successfully encrypt a message. * This confirms that the key has encryption capabilities, it is neither expired nor revoked, and that its key material is valid. */ async canKeyEncrypt({ key: keyReference, date }: { key: KeyReference; date?: Date }) { const key = this.keyStore.get(keyReference._idx); const canEncrypt = await canKeyEncrypt(key, date); return canEncrypt; } async getSHA256Fingerprints({ key: keyReference }: { key: KeyReference }) { const key = this.keyStore.get(keyReference._idx); // this is quite slow since it hashes the key packets, even for v5 keys, instead of reusing the fingerprint. // once v5 keys are more widespread and this function can be made more efficient, we could include `sha256Fingerprings` in `KeyReference` or `KeyInfo`. const sha256Fingerprints = await getSHA256Fingerprints(key); return sha256Fingerprints; } async computeHash({ algorithm, data, }: { algorithm: 'unsafeMD5' | 'unsafeSHA1' | 'SHA512' | 'SHA256'; data: Uint8Array; }) { let hash; switch (algorithm) { case 'SHA512': hash = await SHA512(data); return hash; case 'SHA256': hash = await SHA256(data); return hash; case 'unsafeSHA1': hash = await unsafeSHA1(data); return hash; case 'unsafeMD5': hash = await unsafeMD5(data); return hash; default: throw new Error(`Unsupported algorithm: ${algorithm}`); } } // this function may be merged with `computeHash` once we add streaming support to all/most hash algos async computeHashStream({ algorithm, dataStream }: ComputeHashStreamOptions) { let hashStream; switch (algorithm) { case 'unsafeSHA1': hashStream = await unsafeSHA1(dataStream); return hashStream; default: throw new Error(`Unsupported algorithm: ${algorithm}`); } } /** * Replace the User IDs of the target key to match those of the source key. * NOTE: this function mutates the target key in place, and does not update binding signatures. */ async replaceUserIDs({ sourceKey: sourceKeyReference, targetKey: targetKeyReference, }: { sourceKey: KeyReference; targetKey: PrivateKeyReference; }) { const sourceKey = this.keyStore.get(sourceKeyReference._idx); const targetKey = this.keyStore.get(targetKeyReference._idx); if (targetKey.getFingerprint() !== sourceKey.getFingerprint()) { throw new Error('Cannot replace UserIDs of a different key'); } targetKey.users = sourceKey.users.map((sourceUser) => { // @ts-ignore missing .clone() definition const destUser = sourceUser.clone(); destUser.mainKey = targetKey; return destUser; }); } /** * Return a new key reference with changed userIDs. * Aside from the userIDs, the two keys are identical (e.g. same binding signatures). * The original key is not modified. */ async cloneKeyAndChangeUserIDs({ privateKey: privateKeyRef, userIDs, }: { privateKey: PrivateKeyReference; userIDs: MaybeArray<UserID>; }) { const originalKey = this.keyStore.get(privateKeyRef._idx) as PrivateKey; // @ts-ignore missing clone declaration const updatedKey: PrivateKey = originalKey.clone(true); // To preserve the original key signatures that are not involved with userIDs, // we first reformat the key to add & sign the new userIDs, then replace the userIDs of the original key. // To improve reformatting performance, we can drop subkeys beforehand, as they are not needed for the UserID const updatedSubkeys = updatedKey.subkeys; // NB: the private key params of the returned reformatted keys point to the same ones as `updatedKey`. // Hence, they will be cleared once the corresponding ref is cleared by the app -- no need to clear them now. const { publicKey: temporaryKeyWithNewUsers } = await reformatKey({ privateKey: updatedKey, userIDs, format: 'object', }); updatedKey.subkeys = updatedSubkeys; // same process as `updateUserIDs` updatedKey.users = temporaryKeyWithNewUsers.users.map((newUser) => { // @ts-ignore missing .clone() definition const destUser = newUser.clone(); destUser.mainKey = updatedKey; return destUser; }); const keyStoreID = this.keyStore.add(updatedKey); return getPrivateKeyReference(updatedKey, keyStoreID); } } export interface ApiInterface extends Omit<Api, 'keyStore'> {}
7,248
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/worker/worker.ts
import { expose, transferHandlers } from 'comlink'; // apply polyfills import 'core-js/stable'; import { Api as WorkerApi } from './api'; import { workerTransferHandlers } from './transferHandlers'; workerTransferHandlers.forEach(({ name, handler }) => transferHandlers.set(name, handler)); WorkerApi.init(); expose(WorkerApi);
7,249
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/worker/workerPool.ts
import { Remote, releaseProxy, transferHandlers, wrap } from 'comlink'; import { getIsNetworkError } from '@proton/shared/lib/api/helpers/apiErrorHelper'; import { captureMessage } from '@proton/shared/lib/helpers/sentry'; import type { Api as CryptoApi, ApiInterface as CryptoApiInterface } from './api'; import { mainThreadTransferHandlers } from './transferHandlers'; export interface WorkerPoolInterface extends CryptoApiInterface { /** * Setup worker pool (singleton instance): * create and start workers, and initializes internal Crypto API (incl. pmcrypto and OpenPGP.js) * @param options.poolSize - number of workers to start; defaults to `Navigator.hardwareConcurrency()` if available, otherwise to 1. */ init(options?: { poolSize?: number }): Promise<void>; /** * Close all workers, after clearing their internal key store. * After the pool has been destroyed, it is possible to `init()` it again. */ destroy(): Promise<void>; } const networkErrorReporter = (err: Error) => { if (getIsNetworkError(err)) { captureMessage('Network error in crypto worker', { level: 'info', extra: { message: err.message }, }); } throw err; }; // Singleton worker pool. export const CryptoWorkerPool: WorkerPoolInterface = (() => { let workerPool: Remote<CryptoApi>[] | null = null; let i = -1; const initWorker = async () => { // Webpack static analyser is not especially powerful at detecting web workers that require bundling, // see: https://github.com/webpack/webpack.js.org/issues/4898#issuecomment-823073304. // Harcoding the path here is the easiet way to get the worker to be bundled properly. const RemoteApi = wrap<typeof CryptoApi>( new Worker( new URL( /* webpackChunkName: "crypto-worker" */ './worker.ts', import.meta.url ) ) ); const worker = await new RemoteApi(); return worker; }; const destroyWorker = async (worker: Remote<CryptoApi>) => { await worker?.clearKeyStore(); worker?.[releaseProxy](); }; const getWorker = (): Remote<CryptoApi> => { if (workerPool == null) { throw new Error('Uninitialised worker pool'); } i = (i + 1) % workerPool.length; return workerPool[i]; }; // The return type is technically `Remote<CryptoApi>[]` but that removes some type inference capabilities that are // useful to type-check the internal worker pool functions. const getAllWorkers = (): CryptoApi[] => { if (workerPool == null) { throw new Error('Uninitialised worker pool'); } return workerPool as any as CryptoApi[]; }; return { init: async ({ poolSize = navigator.hardwareConcurrency || 1 } = {}) => { if (workerPool !== null) { throw new Error('worker pool already initialised'); } // We load one worker early to ensure the browser serves the cached resources to the rest of the pool workerPool = [await initWorker()]; if (poolSize > 1) { workerPool = workerPool.concat( await Promise.all(new Array(poolSize - 1).fill(null).map(() => initWorker())) ); } mainThreadTransferHandlers.forEach(({ name, handler }) => transferHandlers.set(name, handler)); }, destroy: async () => { workerPool && (await Promise.all(workerPool.map(destroyWorker))); workerPool = null; }, // @ts-ignore marked as non-callable, unclear why, might be due to a limitation of type Remote encryptMessage: (opts) => getWorker().encryptMessage(opts).catch(networkErrorReporter), decryptMessage: (opts) => getWorker().decryptMessage(opts).catch(networkErrorReporter), decryptMessageLegacy: (opts) => getWorker().decryptMessageLegacy(opts).catch(networkErrorReporter), // @ts-ignore marked as non-callable, unclear why, might be due to a limitation of type Remote signMessage: (opts) => getWorker().signMessage(opts).catch(networkErrorReporter), // @ts-ignore marked as non-callable, unclear why, might be due to a limitation of type Remote verifyMessage: (opts) => getWorker().verifyMessage(opts), verifyCleartextMessage: (opts) => getWorker().verifyCleartextMessage(opts).catch(networkErrorReporter), processMIME: (opts) => getWorker().processMIME(opts).catch(networkErrorReporter), computeHash: (opts) => getWorker().computeHash(opts).catch(networkErrorReporter), computeHashStream: (opts) => getWorker().computeHashStream(opts).catch(networkErrorReporter), generateSessionKey: (opts) => getWorker().generateSessionKey(opts).catch(networkErrorReporter), generateSessionKeyForAlgorithm: (opts) => getWorker().generateSessionKeyForAlgorithm(opts).catch(networkErrorReporter), encryptSessionKey: (opts) => getWorker().encryptSessionKey(opts).catch(networkErrorReporter), decryptSessionKey: (opts) => getWorker().decryptSessionKey(opts).catch(networkErrorReporter), importPrivateKey: async (opts) => { const [first, ...rest] = getAllWorkers(); const result = await first.importPrivateKey(opts).catch(networkErrorReporter); await Promise.all(rest.map((worker) => worker.importPrivateKey(opts, result._idx))); return result; }, importPublicKey: async (opts) => { const [first, ...rest] = getAllWorkers(); const result = await first.importPublicKey(opts).catch(networkErrorReporter); await Promise.all(rest.map((worker) => worker.importPublicKey(opts, result._idx))); return result; }, generateKey: async (opts) => { const [first, ...rest] = getAllWorkers(); const keyReference = await first.generateKey(opts).catch(networkErrorReporter); const key = await first.exportPrivateKey({ privateKey: keyReference, passphrase: null, format: 'binary' }); await Promise.all( rest.map((worker) => worker.importPrivateKey({ binaryKey: key, passphrase: null }, keyReference._idx)) ); return keyReference; }, reformatKey: async (opts) => { const [first, ...rest] = getAllWorkers(); const keyReference = await first.reformatKey(opts).catch(networkErrorReporter); const key = await first.exportPrivateKey({ privateKey: keyReference, passphrase: null, format: 'binary' }); await Promise.all( rest.map((worker) => worker.importPrivateKey({ binaryKey: key, passphrase: null }, keyReference._idx)) ); return keyReference; }, generateE2EEForwardingMaterial: (opts) => getWorker().generateE2EEForwardingMaterial(opts).catch(networkErrorReporter), doesKeySupportE2EEForwarding: async (opts) => getWorker().doesKeySupportE2EEForwarding(opts).catch(networkErrorReporter), isE2EEForwardingKey: async (opts) => getWorker().isE2EEForwardingKey(opts).catch(networkErrorReporter), replaceUserIDs: async (opts) => { await Promise.all(getAllWorkers().map((worker) => worker.replaceUserIDs(opts))); }, cloneKeyAndChangeUserIDs: async (opts) => { const [first, ...rest] = getAllWorkers(); const keyReference = await first.cloneKeyAndChangeUserIDs(opts).catch(networkErrorReporter); const key = await first.exportPrivateKey({ privateKey: keyReference, passphrase: null, format: 'binary' }); await Promise.all( rest.map((worker) => worker.importPrivateKey({ binaryKey: key, passphrase: null }, keyReference._idx)) ); return keyReference; }, exportPublicKey: (opts) => getWorker().exportPublicKey(opts).catch(networkErrorReporter), exportPrivateKey: (opts) => getWorker().exportPrivateKey(opts).catch(networkErrorReporter), clearKeyStore: async () => { await Promise.all(getAllWorkers().map((worker) => worker.clearKeyStore())); }, clearKey: async (opts) => { await Promise.all(getAllWorkers().map((worker) => worker.clearKey(opts))); }, isExpiredKey: (opts) => getWorker().isExpiredKey(opts).catch(networkErrorReporter), isRevokedKey: (opts) => getWorker().isRevokedKey(opts).catch(networkErrorReporter), canKeyEncrypt: (opts) => getWorker().canKeyEncrypt(opts).catch(networkErrorReporter), getSHA256Fingerprints: (opts) => getWorker().getSHA256Fingerprints(opts), getMessageInfo: (opts) => getWorker().getMessageInfo(opts).catch(networkErrorReporter), getKeyInfo: (opts) => getWorker().getKeyInfo(opts).catch(networkErrorReporter), getSignatureInfo: (opts) => getWorker().getSignatureInfo(opts).catch(networkErrorReporter), getArmoredKeys: (opts) => getWorker().getArmoredKeys(opts), getArmoredSignature: (opts) => getWorker().getArmoredSignature(opts), getArmoredMessage: (opts) => getWorker().getArmoredMessage(opts), } as WorkerPoolInterface; // casting needed to 'reuse' CryptoApi's parametric types declarations and preserve dynamic inference of // the output types based on the input ones. })();
7,250
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/worker
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/worker/transferHandlers/index.ts
import type { TransferHandler } from 'comlink'; import type { ComputeHashStreamOptions, KeyReference } from '../api.models'; import { ReadableStreamSerializer, SerializeWebStreamTypes } from './streamHandler'; // return interface with same non-function fields as T, and with function fields type converted to their return type // e.g. ExtractFunctionReturnTypes<{ foo: () => string, bar: 3 }> returns { foo: string, bar: 3 } type ExtractFunctionReturnTypes<T> = { [I in keyof T]: T[I] extends (...args: any) => any ? ReturnType<T[I]> : T[I] extends (infer A)[] ? ExtractFunctionReturnTypes<A>[] : T[I]; // recurse on array fields }; // ExtractFunctionReturnTypes cannot keep track of fixed length of `_keyContentHash` so we explicitly re-declare type SerializedKeyReference = ExtractFunctionReturnTypes<KeyReference> & { _keyContentHash: [string, string] }; const KeyReferenceSerializer = { canHandle: (obj: any): obj is KeyReference => typeof obj === 'object' && obj._idx !== undefined && obj.isPrivate !== undefined, // NB: careful not to confuse with KeyInfo object serialize: (keyReference: KeyReference): SerializedKeyReference => ({ // store values directly, convert back to function when deserialising ...keyReference, isPrivate: keyReference.isPrivate(), getFingerprint: keyReference.getFingerprint(), getKeyID: keyReference.getKeyID(), getKeyIDs: keyReference.getKeyIDs(), getAlgorithmInfo: keyReference.getAlgorithmInfo(), getCreationTime: keyReference.getCreationTime(), getExpirationTime: keyReference.getExpirationTime(), getUserIDs: keyReference.getUserIDs(), isWeak: keyReference.isWeak(), equals: false, // unused, function will be reconstructed based on ._keyContentHash subkeys: keyReference.subkeys.map((subkey) => ({ getAlgorithmInfo: subkey.getAlgorithmInfo(), getKeyID: subkey.getKeyID(), })), }), deserialize: (serialized: SerializedKeyReference): KeyReference => ({ ...serialized, isPrivate: () => serialized.isPrivate, getFingerprint: () => serialized.getFingerprint, getKeyID: () => serialized.getKeyID, getKeyIDs: () => serialized.getKeyIDs, getAlgorithmInfo: () => serialized.getAlgorithmInfo, getCreationTime: () => serialized.getCreationTime, getExpirationTime: () => serialized.getExpirationTime, getUserIDs: () => serialized.getUserIDs, isWeak: () => serialized.isWeak, equals: (otherKey, ignoreOtherCerts) => ignoreOtherCerts ? otherKey._keyContentHash[1] === serialized._keyContentHash[1] : otherKey._keyContentHash[0] === serialized._keyContentHash[0], subkeys: serialized.subkeys.map((subkey) => ({ getAlgorithmInfo: () => subkey.getAlgorithmInfo, getKeyID: () => subkey.getKeyID, })), }), }; const KeyOptionsSerializer = { _optionNames: [ 'verificationKeys', 'signingKeys', 'encryptionKeys', 'decryptionKeys', 'privateKey', 'key', 'recipientKeys', 'targetKey', 'sourceKey', 'forwarderKey', ], canHandle: (options: any): options is KeyReference | KeyReference[] => { if (typeof options !== 'object') { return false; } return KeyOptionsSerializer._optionNames.some((name) => options[name]); }, serialize: (options: any) => { const serializedOptions = { ...options }; KeyOptionsSerializer._optionNames.forEach((name) => { if (options[name]) { serializedOptions[name] = Array.isArray(options[name]) ? options[name].map(KeyReferenceSerializer.serialize) : KeyReferenceSerializer.serialize(options[name]); } }); return serializedOptions; }, deserialize: (serializedOptions: any) => { const options = { ...serializedOptions }; KeyOptionsSerializer._optionNames.forEach((name) => { if (serializedOptions[name]) { options[name] = Array.isArray(options[name]) ? serializedOptions[name].map(KeyReferenceSerializer.deserialize) : KeyReferenceSerializer.deserialize(serializedOptions[name]); } }); return options; }, }; type SerializedComputeHashStreamOptions = SerializeWebStreamTypes<ComputeHashStreamOptions>; const ComputeHashStreamOptionsSerializer = { canHandle: (input: any): input is ComputeHashStreamOptions => typeof input === 'object' && input.algorithm && ReadableStreamSerializer.canHandle(input.dataStream), serialize: ({ dataStream, ...rest }: ComputeHashStreamOptions): SerializedComputeHashStreamOptions => ({ ...rest, dataStream: ReadableStreamSerializer.serialize(dataStream), }), deserialize: ({ dataStream, ...rest }: SerializedComputeHashStreamOptions): ComputeHashStreamOptions => ({ ...rest, dataStream: ReadableStreamSerializer.deserialize(dataStream), }), }; type SerializedError = { isError: true; value: Pick<Error, 'message' | 'name' | 'stack'> }; const ErrorSerializer = { canHandle: (value: any) => typeof value === 'object' && (value instanceof Error || value.isError), serialize: ({ message, name, stack }: Error) => ({ isError: true, value: { message, name, stack }, }), deserialize: (serialized: SerializedError) => Object.assign(new Error(serialized.value.message), serialized.value), }; const ResultTranferer = { _binaryFieldNames: ['message', 'signature', 'signatures', 'encryptedSignature', 'sessionKey'], _errorFieldNames: ['errors', 'verificationErrors'], canHandle: (result: any): result is any => { if (typeof result !== 'object') { return false; } return ResultTranferer._binaryFieldNames.some((name) => result[name]); }, serialize: (result: any) => { const serializedResult = { ...result }; ResultTranferer._errorFieldNames.forEach((name) => { if (result[name]) { serializedResult[name] = result[name].map(ErrorSerializer.serialize); } }); return serializedResult; }, getTransferables: (result: any) => { const transferables = ResultTranferer._binaryFieldNames .filter((name) => result[name] instanceof Uint8Array) .map((name) => result[name].buffer); // 'signatures' are always in binary form return transferables.concat(result.signatures ? result.signatures.map((sig: Uint8Array) => sig.buffer) : []); }, deserialize: (serializedResult: any) => { const result = { ...serializedResult }; ResultTranferer._errorFieldNames.forEach((name) => { if (serializedResult[name]) { result[name] = serializedResult[name].map(ErrorSerializer.deserialize); } }); return result; }, }; type OneWayTransferHandler = { name: string; workerHandler: TransferHandler<any, any>; mainThreadHandler: TransferHandler<any, any>; }; type ExportedTransferHandler = { name: string; handler: TransferHandler<any, any> }; /** * Transfer handlers for data that needs to be transferred only in one direction (e.g. from the worker to the main thread). * NB: serializer still needs to be declared for recipient side too (comlink does not support implementing only the deserializer) */ const oneWayTransferHanders: OneWayTransferHandler[] = [ { name: 'Uint8Array', // automatically transfer Uint8Arrays from worker (but not vice versa) workerHandler: { canHandle: (input: any): input is Uint8Array => input instanceof Uint8Array, serialize: (bytes: Uint8Array) => [ bytes, [bytes.buffer], // transferables ], deserialize: (bytes) => bytes, }, mainThreadHandler: { canHandle: (input: any): input is Uint8Array => input instanceof Uint8Array, serialize: (bytes: Uint8Array) => [ bytes, [], // transferables: no transferring from main thread ], deserialize: (bytes) => bytes, }, }, { name: 'ComputeHashStreamOptions', // takes stream as input but returns a Uint8Array (responsibility of a different handler) workerHandler: { canHandle: ComputeHashStreamOptionsSerializer.canHandle, serialize: () => [undefined, []], // unused on worker side deserialize: ComputeHashStreamOptionsSerializer.deserialize, }, mainThreadHandler: { canHandle: (input: any): input is { dataStream: ReadableStream<Uint8Array> } => typeof input === 'object' && ReadableStreamSerializer.canHandle(input.dataStream), serialize: ({ dataStream, ...rest }) => { const serializedStreamPort = ReadableStreamSerializer.serialize(dataStream); return [ { dataStream: serializedStreamPort, ...rest }, [serializedStreamPort], // transferables ]; }, deserialize: () => {}, // unused on main thread side }, }, { name: 'encrypt/decrypt/sign/verifyResult', // result objects are already serialised, but we need to transfer all Uint8Arrays fields from worker workerHandler: { canHandle: ResultTranferer.canHandle, serialize: (result: any) => [ ResultTranferer.serialize(result), ResultTranferer.getTransferables(result), // transferables ], deserialize: (result) => result, // unused }, mainThreadHandler: { canHandle: ResultTranferer.canHandle, serialize: (result: any) => [result, []], // unused deserialize: ResultTranferer.deserialize, }, }, ]; /** * These transferHandles are needed to transfer some objects from and to the worker (either as returned data, or as arguments). * They are meant to be set both inside the worker and in the main thread. */ const sharedTransferHandlers: ExportedTransferHandler[] = [ { name: 'KeyReference', handler: { canHandle: KeyReferenceSerializer.canHandle, serialize: (keyReference: KeyReference) => [ KeyReferenceSerializer.serialize(keyReference), [], // transferables ], deserialize: KeyReferenceSerializer.deserialize, }, }, { name: 'KeyOptions', // only passed by the main thread, but it's harmless to declare the same handler on both sides handler: { canHandle: KeyOptionsSerializer.canHandle, serialize: (options: object) => [ KeyOptionsSerializer.serialize(options), [], // transferables ], deserialize: KeyOptionsSerializer.deserialize, }, }, ]; // Handlers to be set by the worker export const workerTransferHandlers: ExportedTransferHandler[] = [ ...sharedTransferHandlers, ...oneWayTransferHanders.map(({ name, workerHandler }) => ({ name, handler: workerHandler })), ]; // Handlers to be set by the main thread export const mainThreadTransferHandlers: ExportedTransferHandler[] = [ ...sharedTransferHandlers, ...oneWayTransferHanders.map(({ name, mainThreadHandler }) => ({ name, handler: mainThreadHandler })), ];
7,251
0
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/worker
petrpan-code/ProtonMail/WebClients/packages/crypto/lib/worker/transferHandlers/streamHandler.ts
type Data = Uint8Array | string; type ChunkWithData<T> = { done: boolean; value?: T }; enum STREAM_CONTROL_TYPE { 'READ', 'CANCEL', } // Transfer a readable stream chunk by chunk using message channels export const ReadableStreamSerializer = { canHandle: (obj: any): obj is ReadableStream => typeof obj === 'object' && obj.getReader, serialize: (readableStream: ReadableStream<Uint8Array>): MessagePort => { const { port1, port2 } = new MessageChannel(); // wait to get the reader until the first chunk is requested // in case the user wants to cancel the stream before starting reading it let reader: ReadableStreamDefaultReader<Uint8Array> | null = null; port1.onmessage = async ({ data: { type } }) => { switch (type) { case STREAM_CONTROL_TYPE.READ: if (reader === null) { reader = readableStream.getReader(); } const dataChunk = await reader.read(); port1.postMessage(dataChunk, []); // no transferables break; case STREAM_CONTROL_TYPE.CANCEL: if (reader) { void reader.cancel(); } else { void readableStream.cancel(); } break; default: throw new Error('Unknown stream transfer control type'); } }; // Transfer the message channel to the caller's execution context return port2; // NB: the port is transferable and must be transferred }, deserialize: <T extends Data>(port: MessagePort): ReadableStream<T> => { // Convenience function to allow us to use async/await for messages coming down the port const nextPortMessage = () => new Promise<ChunkWithData<T>>((resolve) => { port.onmessage = ({ data: chunk }: { data: ChunkWithData<T> }) => { resolve(chunk); }; }); // Minimal proxy reader const portReader = { read: () => { port.postMessage({ type: STREAM_CONTROL_TYPE.READ }); // promise that will resolve with the chunk returned by the remote reader return nextPortMessage(); }, cancel: () => { port.postMessage({ type: STREAM_CONTROL_TYPE.CANCEL }); }, }; const reconstructedStream = new ReadableStream<T>({ async pull(controller) { const { done, value } = await portReader.read(); // When no more data needs to be consumed, close the stream if (done) { controller.close(); return; } // Enqueue the next data chunk into our target stream controller.enqueue(value); }, cancel() { portReader.cancel(); }, }); // // TODO? (not needed for now): make it iterable so it can be used in for-await-of statement // reconstructedStream[Symbol.asyncIterator] = () => portReader; return reconstructedStream; }, }; export type SerializeWebStreamTypes<T> = { [I in keyof T]: T[I] extends ReadableStream<Data> | undefined ? MessagePort : T[I]; };
7,252
0
petrpan-code/ProtonMail/WebClients/packages/crypto
petrpan-code/ProtonMail/WebClients/packages/crypto/test/karma.conf.js
const path = require('path'); const os = require('os'); const { firefox, chromium } = require('playwright'); process.env.CHROME_BIN = chromium.executablePath(); process.env.FIREFOX_BIN = firefox.executablePath(); /** * Karma does not automatically serve the bundled webworker asset generated by webpack, * so we need to manually reference and expose the webpack temporary output dir. * See: https://github.com/ryanclark/karma-webpack/issues/498#issuecomment-790040818 */ const karmaWebpackOutputPath = path.join(os.tmpdir(), '_karma_webpack_') + Math.floor(Math.random() * 1000000); module.exports = function (config) { config.set({ // base path that will be used to resolve all patterns (eg. files, exclude) basePath: '..', // frameworks to use // available frameworks: https://www.npmjs.com/search?q=keywords:karma-adapter frameworks: ['mocha', 'webpack'], plugins: [ 'karma-mocha', 'karma-chrome-launcher', 'karma-firefox-launcher', 'karma-webpack', 'karma-mocha-reporter', ], // list of files / patterns to load in the browser files: [ { pattern: 'test/**/!(karma.conf).*', watched: false }, { pattern: `${karmaWebpackOutputPath}/**/*`, watched: false, included: false, served: true, }, ], // list of files / patterns to exclude exclude: [], // preprocess matching files before serving them to the browser // available preprocessors: https://www.npmjs.com/search?q=keywords:karma-preprocessor preprocessors: { 'test/**/*.*': 'webpack', }, webpack: { output: { path: karmaWebpackOutputPath, }, resolve: { fallback: { stream: false, buffer: false, }, extensions: ['', '.js', '.ts'], }, module: { rules: [ { test: /\.ts?$/, use: [ { loader: 'ts-loader', options: { compilerOptions: { noEmit: false }, allowTsInNodeModules: true, }, }, ], exclude: /node_modules\/(?!.*(pmcrypto))/, }, ], }, }, // available reporters: https://www.npmjs.com/search?q=keywords:karma-reporter reporters: ['mocha'], // web server port port: 9876, // enable / disable colors in the output (reporters and logs) colors: true, // level of logging // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG logLevel: config.LOG_INFO, // enable / disable watching file and executing tests whenever any file changes autoWatch: false, customLaunchers: { ChromeHeadlessCI: { base: 'ChromeHeadless', flags: ['--no-sandbox'], }, }, browsers: ['ChromeHeadlessCI', 'FirefoxHeadless'], // Continuous Integration mode // if true, Karma captures browsers, runs the tests and exits singleRun: true, // Concurrency level // how many browser instances should be started simultaneously concurrency: Infinity, client: { mocha: { // timeout for mocha tests, default is 2 seconds. Some streaming tests can take longer. timeout: 5000, }, }, }); };
7,253
0
petrpan-code/ProtonMail/WebClients/packages/crypto/test
petrpan-code/ProtonMail/WebClients/packages/crypto/test/proxy/helpers.spec.ts
import { use as chaiUse, expect } from 'chai'; import chaiAsPromised from 'chai-as-promised'; import { CryptoProxy, getMatchingSigningKey } from '../../lib'; import { Api as CryptoApi } from '../../lib/worker/api'; chaiUse(chaiAsPromised); describe('CryptoProxy helpers', () => { before(() => { CryptoProxy.setEndpoint(new CryptoApi(), (endpoint) => endpoint.clearKeyStore()); }); after(() => { void CryptoProxy.releaseEndpoint(); }); it('getMatchingSigningKey - it can get a matching primary key', async () => { const keyWithoutSubkeys = `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYYqcWBYJKwYBBAHaRw8BAQdAesbhqiOxbLV+P9Dt8LV+Q8hRBLbwsSf6 emoCS30uQpEAAQDFgBruRj6Zqb0OULkaaNz+QK4+gvc006UtTgz2wdrP8xFv zRE8ZW1haWwyQHRlc3QuY29tPsKMBBAWCgAdBQJhipxYBAsJBwgDFQgKBBYA AgECGQECGwMCHgEAIQkQJCJW2HYCeYIWIQTdZGjv9WwTyL+azOUkIlbYdgJ5 gm9nAQDY//xzc2hy6Efz8NqDJeLg1lh2sZkKcMXP3L+CJbhWJQEAuI6UDakE +XVcDsBS+CIi3qg74r/80Ysb7tmRC06znwA= =I0d7 -----END PGP PRIVATE KEY BLOCK-----`; const keyWithSigningSubkey = `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYYqb5xYJKwYBBAHaRw8BAQdA0zCRw6gyovlI8V6pQoDtmAoIr7YPNPxm jQa5PfiQq5gAAQDQ1o8+YXQg34FUNbbo+PUuRDAar37n9RFQiNrkH+vvlBHW zRA8ZW1haWxAdGVzdC5jb20+wowEEBYKAB0FAmGKm+cECwkHCAMVCAoEFgAC AQIZAQIbAwIeAQAhCRCqDK8y54tXERYhBELBCpl0aMYXdXBljKoMrzLni1cR v44BAI826OYoikU8aMs6wBiHd/SVqPU/ZVLz5VUGriEkJoqGAPwLOztUuX1Q zmtAq8mQUQjlrmAm50DctKQeug8rrn30BcdYBGGKm+cWCSsGAQQB2kcPAQEH QGNOppjS4p71QAy6MvBX6JK9zt8YeUo7dm4b7RaFq0ejAAD/ZcyhjL8LEIZO t/8qU7LJn+lxPSl6tFZ7TBgXj4RkldMQccLALwQYFgoACQUCYYqb5wIbAgCY CRCqDK8y54tXEXYgBBkWCgAGBQJhipvnACEJEF5S2ZJhJACOFiEElQ0ZXBPe 9UZzI0KoXlLZkmEkAI6EuQD+JRU3Z+u6RHCRdKupZlLuzCFzWmvJvZGktcuQ 40bYgFQA/iwWv5vDkw8zTxw5GRTahnnp0shs/YOG4GgB6EHXom8FFiEEQsEK mXRoxhd1cGWMqgyvMueLVxHYNAD+NaLEsrzFxvgu3c8nVN5sjVETTZZdHjly wSeOoh9ocbsA/joCCpHxxH061g/tjEhP76tWJX17ShZ9wT7KZ6aPejoM =FkBc -----END PGP PRIVATE KEY BLOCK-----`; const key1 = await CryptoProxy.importPrivateKey({ armoredKey: keyWithSigningSubkey, passphrase: null }); const key2 = await CryptoProxy.importPrivateKey({ armoredKey: keyWithoutSubkeys, passphrase: null }); const signatureFromSubkey = await CryptoProxy.signMessage({ textData: 'a message', signingKeys: key1, detached: true, }); const signatureFromPrimaryKey = await CryptoProxy.signMessage({ textData: 'a message', signingKeys: key2, detached: true, }); expect( await getMatchingSigningKey({ armoredSignature: signatureFromSubkey, keys: [key1, key2] }) ).to.deep.equal(key1); expect( await getMatchingSigningKey({ armoredSignature: signatureFromPrimaryKey, keys: [key1, key2] }) ).to.deep.equal(key2); }); });
7,254
0
petrpan-code/ProtonMail/WebClients/packages/crypto/test
petrpan-code/ProtonMail/WebClients/packages/crypto/test/proxy/proxy.spec.ts
import { use as chaiUse, expect } from 'chai'; import chaiAsPromised from 'chai-as-promised'; import { CryptoApiInterface, CryptoProxy, VERIFICATION_STATUS, updateServerTime } from '../../lib'; import { Api as CryptoApi } from '../../lib/worker/api'; chaiUse(chaiAsPromised); describe('CryptoProxy', () => { let api = new CryptoApi(); it('setEndpoint - should throw if already set', async () => { CryptoProxy.setEndpoint(api); expect(() => CryptoProxy.setEndpoint(api)).to.throw(/already initialised/); await CryptoProxy.releaseEndpoint(); }); it('releaseEndpoint - should invoke callback', async () => { let called = false; CryptoProxy.setEndpoint(api, async () => { called = true; }); expect(called).to.be.false; await CryptoProxy.releaseEndpoint(); expect(called).to.be.true; }); it('should use serverTime()', async () => { let passedDate = null; const mockApi: CryptoApiInterface = { generateKey: async ({ date }) => { passedDate = date; return {}; }, } as CryptoApiInterface; CryptoProxy.setEndpoint(mockApi); const now = new Date(); const zero = new Date(0); updateServerTime(zero); // we don't care about returned value await CryptoProxy.generateKey({ userIDs: [], date: undefined }); // explicitly passing undefined should not overwrite the server time updateServerTime(now); // restore current time // the proxy is expected to pass the server time at each function call. // `generateKey` also applies an offset to account for time skew across servers. expect(passedDate).to.be.lessThan(zero); await CryptoProxy.releaseEndpoint(); }); it('verifyMessage() - should verify signature over message with trailing spaces incorrectly normalised', async () => { CryptoProxy.setEndpoint(api); const armoredKey = `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYvJTNBYJKwYBBAHaRw8BAQdAS4QHqABRYAf5eCH/iY6Q4AfdDqLrKYQz TND/5puZH90AAP9sYypq0Tf5xhhmMdf1XGtLtTyBFnNFx+k4vxfuAjEwuQ++ zQ48dGVzdEB0ZXN0Lml0PsKMBBAWCgAdBQJi8lM0BAsJBwgDFQgKBBYAAgEC GQECGwMCHgEAIQkQ5+2ErLwEs+cWIQQ8a87Tc2dD9ijDVZvn7YSsvASz522q AQDXDqV6QtBVnL7kLXGB/V1xtyEXKPXtfyr1tp0mf2k2UwEAx+0YqWlM76JW /SFs2nnwwFLMW8uhrssQcmTTDScVCwTHXQRi8lM0EgorBgEEAZdVAQUBAQdA jYyYTDfSC6KeWzAev0CVq1LRsf7FXytrTashagvGXwsDAQgHAAD/a96w3SfZ NVbSgVY/O63vGqT5pLSRfun6r5k8SVO+bEASqsJ4BBgWCAAJBQJi8lM0AhsM ACEJEOfthKy8BLPnFiEEPGvO03NnQ/Yow1Wb5+2ErLwEs+eJMgD9HoKKixWh oXrxfym9/JTYmc0nCAflvWrDYLBLj6EWKaUA/1N/Ai8kp2dBlhHclTMQDf9K djRcL8vRk+sHWQXCJ1UH =M5+8 -----END PGP PRIVATE KEY BLOCK----- `; // standard cleartext message signature const signatureOverStrippedWhitespace = `-----BEGIN PGP SIGNATURE----- wnUEARYKAAYFAmLyVbgAIQkQ5+2ErLwEs+cWIQQ8a87Tc2dD9ijDVZvn7YSs vASz55e1AQDiYW/fvrwBzGC400v+0SYEKK5bZoPppQ5R9rGfcCDf2wD/QVmi G9IB1OYHAKtneAqnZexj3JgnU1gTWMu6jMsUXQ8= =Lv3B -----END PGP SIGNATURE----- `; // signature over data incorrectly normalised (trailing whitespace not stripped) const signatureOverTrailingWhitespace = `-----BEGIN PGP SIGNATURE----- wnUEARYKAAYFAmLyU78AIQkQ5+2ErLwEs+cWIQQ8a87Tc2dD9ijDVZvn7YSs vASz5yDVAP4kh41TVuc/r5hsEpuCGpDk1D6lJr/1uXvL/BqsQJPJ8gD/f23y tE1tRB5+iYHzBnQVEeKN7T12E5zo1HShM7ntSgE= =jcw/ -----END PGP SIGNATURE----- `; const textData = 'BEGIN:VCARD\r\nVERSION:4.0\r\nFN;PREF=1: \r\nEND:VCARD'; const verificationKeys = await CryptoProxy.importPublicKey({ armoredKey }); const { verified, data: verifiedData } = await CryptoProxy.verifyMessage({ textData, armoredSignature: signatureOverStrippedWhitespace, verificationKeys, stripTrailingSpaces: true, }); expect(verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); // confirm data was normalised as expected expect(verifiedData).to.equal('BEGIN:VCARD\nVERSION:4.0\nFN;PREF=1:\nEND:VCARD'); // test fallback verification const { verified: verifiedFallback, data: verifiedDataFallback } = await CryptoProxy.verifyMessage({ textData, armoredSignature: signatureOverTrailingWhitespace, verificationKeys, stripTrailingSpaces: true, }); expect(verifiedFallback).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); // confirm that normalisation was not applied expect(verifiedDataFallback).to.equal(textData); await CryptoProxy.releaseEndpoint(); }); });
7,255
0
petrpan-code/ProtonMail/WebClients/packages/crypto/test
petrpan-code/ProtonMail/WebClients/packages/crypto/test/worker/decryptMessageLegacy.data.ts
export const testPrivateKeyLegacy = `-----BEGIN PGP PRIVATE KEY BLOCK----- Version: OpenPGP.js v0.9.0 Comment: http://openpgpjs.org xcMGBFSjdRkBB/9slBPGNrHAMbYT71AnxF4a0W/fcrzCP27yd1nte+iUKGyh yux3xGQRIHrwB9zyYBPFORXXwaQIA3YDH73YnE0FPfjh+fBWENWXKBkOVx1R efPTytGIyATFtLvmN1D65WkvnIfBdcOc7FWj6N4w5yOajpL3u/46Pe73ypic he10XuwO4198q/8YamGpTFgQVj4H7QbtuIxoV+umIAf96p9PCMAxipF+piao D8LYWDUCK/wr1tSXIkNKL+ZCyuCYyIAnOli7xgIlKNCWvC8csuJEYcZlmf42 /iHyrWeusyumLeBPhRABikE2ePSo+XI7LznD/CIrLhEk6RJT31+JR0NlABEB AAH+CQMIGhfYEFuRjVpgaSOmgLetjNJyo++e3P3RykGb5AL/vo5LUzlGX95c gQWSNyYYBo7xzDw8K02dGF4y9Hq6zQDFkA9jOI2XX/qq4GYb7K515aJZwnuF wQ+SntabFrdty8oV33Ufm8Y/TSUP/swbOP6xlXIk8Gy06D8JHW22oN35Lcww LftEo5Y0rD+OFlZWnA9fe/Q6CO4OGn5DJs0HbQIlNPU1sK3i0dEjCgDJq0Fx 6WczXpB16jLiNh0W3X/HsjgSKT7Zm3nSPW6Y5mK3y7dnlfHt+A8F1ONYbpNt RzaoiIaKm3hoFKyAP4vAkto1IaCfZRyVr5TQQh2UJO9S/o5dCEUNw2zXhF+Z O3QQfFZgQjyEPgbzVmsc/zfNUyB4PEPEOMO/9IregXa/Ij42dIEoczKQzlR0 mHCNReLfu/B+lVNj0xMrodx9slCpH6qWMKGQ7dR4eLU2+2BZvK0UeG/QY2xe IvLLLptm0IBbfnWYZOWSFnqaT5NMN0idMlLBCYQoOtpgmd4voND3xpBXmTIv O5t4CTqK/KO8+lnL75e5X2ygZ+f1x6tPa/B45C4w+TtgITXZMlp7OE8RttO6 v+0Fg6vGAmqHJzGckCYhwvxRJoyndRd501a/W6PdImZQJ5bPYYlaFiaF+Vxx ovNb7AvUsDfknr80IdzxanKq3TFf+vCmNWs9tjXgZe0POwFZvjTdErf+lZcz p4lTMipdA7zYksoNobNODjBgMwm5H5qMCYDothG9EF1dU/u/MOrCcgIPFouL Z/MiY665T9xjLOHm1Hed8LI1Fkzoclkh2yRwdFDtbFGTSq00LDcDwuluRM/8 J6hCQQ72OT7SBtbCVhljbPbzLCuvZ8mDscvardQkYI6x7g4QhKLNQVyVk1nA N4g59mSICpixvgihiFZbuxYjYxoWJMJvzQZVc2VySUTCwHIEEAEIACYFAlSj dSQGCwkIBwMCCRB9LVPeS8+0BAQVCAIKAxYCAQIbAwIeAQAAFwoH/ArDQgdL SnS68BnvnQy0xhnYMmK99yc+hlbWuiTJeK3HH+U/EIkT5DiFiEyE6YuZmsa5 9cO8jlCN8ZKgiwhDvb6i4SEa9f2gar1VCPtC+4KCaFa8esp0kdSjTRzP4ZLb QPrdbfPeKoLoOoaKFH8bRVlPCnrCioHTBTsbLdzg03mcczusZomn/TKH/8tT OctX7CrlB+ewCUc5CWL4mZqRFjAMSJpogj7/4jEVHke4V/frKRtjvQNDcuOo PPU+fVpHq4ILuv7pYF9DujAIbLgWN/tdE4Goxsrm+aCUyylQ2P55Vb5mhAPu CLYXqSELPi99/NKEM9xhLa/1HwdTwQ/1X0zHwwYEVKN1JAEH/3XCsZ/W7fnw zMbkE+rMUlo1+KbX+ltEG7nAwP+Q8NrwhbwhmpA3bHM3bhSdt0CO4mRx4oOR cqeTNjFftQzPxCbPTmcTCupNCODOK4rnEn9i9lz7/JtkOf55+/oHbx+pjvDz rA7u+ugNHzDYTd+nh2ue99HWoSZSEWD/sDrp1JEN8M0zxODGYfO/Hgr5Gnnp TEzDzZ0LvTjYMVcmjvBhtPTNLiQsVakOj1wTLWEgcna2FLHAHh0K63snxAjT 6G1oF0Wn08H7ZP5/WhiMy1Yr+M6N+hsLpOycwtwBdjwDcWLrOhAAj3JMLI6W zFS6SKUr4wxnZWIPQT7TZNBXeKmbds8AEQEAAf4JAwhPB3Ux5u4eB2CqeaWy KsvSTH/D1o2QpWujempJ5KtCVstyV4bF1JZ3tadOGOuOpNT7jgcp/Et2VVGs nHPtws9uStvbY8XcZYuu+BXYEM9tkDbAaanS7FOvh48F8Qa07IQB6JbrpOAW uQPKtBMEsmBqpyWMPIo856ai1Lwp6ZYovdI/WxHdkcQMg8Jvsi2DFY827/ha 75vTnyDx0psbCUN+kc9rXqwGJlGiBdWmLSGW1cb9Gy05KcAihQmXmp9YaP9y PMFPHiHMOLn6HPW1xEV8B1jHVF/BfaLDJYSm1q3aDC9/QkV5WLeU7DIzFWN9 JcMsKwoRJwEf63O3/CZ39RHd9qwFrd+HPIlc7X5Pxop16G1xXAOnLBucup90 kYwDcbNvyC8TKESf+Ga+Py5If01WhgldBm+wgOZvXnn8SoLO98qAotei8MBi kI/B+7cqynWg4aoZZP2wOm/dl0zlsXGhoKut2Hxr9BzG/WdbjFRgbWSOMawo yF5LThbevNLZeLXFcT95NSI2HO2XgNi4I0kqjldY5k9JH0fqUnlQw87CMbVs TUS78q6IxtljUXJ360kfQh5ue7cRdCPrfWqNyg1YU3s7CXvEfrHNMugES6/N zAQllWz6MHbbTxFz80l5gi3AJAoB0jQuZsLrm4RB82lmmBuWrQZh4MPtzLg0 HOGixprygBjuaNUPHT281Ghe2UNPpqlUp8BFkUuHYPe4LWSB2ILNGaWB+nX+ xmvZMSnI4kVsA8oXOAbg+v5W0sYNIBU4h3nk1KOGHR4kL8fSgDi81dfqtcop 2jzolo0yPMvcrfWnwMaEH/doS3dVBQyrC61si/U6CXLqCS/w+8JTWShVT/6B NihnIf1ulAhSqoa317/VuYYr7hLTqS+D7O0uMfJ/1SL6/AEy4D1Rc7l8Bd5F ud9UVvXCwF8EGAEIABMFAlSjdSYJEH0tU95Lz7QEAhsMAACDNwf/WTKH7bS1 xQYxGtPdqR+FW/ejh30LiPQlrs9AwrBk2JJ0VJtDxkT3FtHlwoH9nfd6YzD7 ngJ4mxqePuU5559GqgdTKemKsA2C48uanxJbgOivivBI6ziB87W23PDv7wwh 4Ubynw5DkH4nf4oJR2K4H7rN3EZbesh8D04A9gA5tBQnuq5L+Wag2s7MpWYl ZrvHh/1xLZaWz++3+N4SfaPTH8ao3Qojw/Y+OLGIFjk6B/oVEe9ZZQPhJjHx gd/qu8VcYdbe10xFFvbiaI/RS6Fs7JRSJCbXE0h7Z8n4hQIP1y6aBZsZeh8a PPekG4ttm6z3/BqqVplanIRSXlsqyp6J8A== =Pyb1 -----END PGP PRIVATE KEY BLOCK----- `; export const testMessageEncryptedLegacy = `---BEGIN ENCRYPTED MESSAGE---esK5w7TCgVnDj8KQHBvDvhJObcOvw6/Cv2/CjMOpw5UES8KQwq/CiMOpI3MrexLDimzDmsKqVmwQw7vDkcKlRgXCosOpwoJgV8KEBCslSGbDtsOlw5gow7NxG8OSw6JNPlYuwrHCg8K5w6vDi8Kww5V5wo/Dl8KgwpnCi8Kww7nChMKdw5FHwoxmCGbCm8O6wpDDmRVEWsO7wqnCtVnDlMKORDbDnjbCqcOnNMKEwoPClFlaw6k1w5TDpcOGJsOUw5Unw5fCrcK3XnLCoRBBwo/DpsKAJiTDrUHDuGEQXz/DjMOhTCN7esO5ZjVIQSoFZMOyF8Kgw6nChcKmw6fCtcOBcW7Ck8KJwpTDnCzCnz3DjFY7wp5jUsOhw7XDosKQNsOUBmLDksKzPcO4fE/Dmw1GecKew4/CmcOJTFXDsB5uMcOFd1vDmX9ow4bDpCPDoU3Drw8oScKOXznDisKfYF3DvMKoEy0DDmzDhlHDjwIyC8OzRS/CnEZ4woM9w5cnw51fw6MZMAzDk8O3CDXDoyHDvzlFwqDCg8KsTnAiaMOsIyfCmUEaw6nChMK5TMOxG8KEHUNIwo1seMOXw5HDhyVawrzCr8KmFWHDpMO3asKpwrQbbMOlwoMew4t1Jz51wp9Jw6kGWcOzc8KgwpLCpsOHOMOgYB3DiMOxLcOQB8K7AcOyWF3CmnwfK8Kxw6XDm2TCiT/CnVTCg8Omw7Ngwp3CuUAHw6/CjRLDgcKsU8O/w6gXJ0cIw6pZMcOxEWETwpd4w58Mwr5SBMKORQjCi3FYcULDgx09w5M7SH7DrMKrw4gnXMKjwqUrBMOLwqQyF0nDhcKuwqTDqsO2w7LCnGjCvkbDgDgcw54xAkEiQMKUFlzDkMOew73CmkU4wrnCjw3DvsKaW8K0InA+w4sPSXfDuhbClMKgUcKeCMORw5ZYJcKnNEzDoMOhw7MYCX4DwqIQwoHCvsOaB1UAI8KVw6LCvcOTw53CuSgow4kZdHw5aRkYw7ZyV8OsP0LCh8KnwpIuw4p1NisoEcKcwrjDhcOtMzdvw5rDmsK3IAdAw7M4J8K+w6zCmR3CuMKUw4lqw6osPMObw53Dg8K3wqLCrsKZwr8mPcK4w4QWw5LCnwZeH1bDgwwiXcKbUhHDk1DDk0MLwoDDqMKXw5skNsKAAcOFw77Di8KNGCBzP8OcwrI5wodQQwQyw5V0wrInwrPDt8O+T8KbNsKVw7Mzw7HCsMOjwpcewoPCuMOUEsOow6QZVDjDpgbDlMOBGDXCtMOmw6jDuMKfw4nDlWTDq8Kqd0TDvwPCpSzDlA4JO3EHwrlBWcK5w7DCscOwCMK2wpsvwrYNIcOgBBXChMK0w6nCosKWEVd+w7cEal5hIcO4SWrCu0TDrW5Yw4XCmBgCwpc7YVwIwqPCi8OlGDzDmyJ/woHCscOtw4zDuC7CpUXCrDAJwp7Cj8KxPX3CrhDCvVB2w7PCosKbw7F+V11hY8Omwq1eQcO8w4wcRMKBJ2LDgW/DomXDhwkgAlxmQcKew6HDq8Ouw6ASeG/DlcKgUcKmLMOowpQWNcKJJcKDa3XDksK/woHCo3d6wrHDpMOqwqs/UUXCjUpnwrHCmsOyJx4bwoHChAnDi0TCpjLDrBvCvEghw5VtfhPCk8K5KsKIw75FCsOyDsKtV17CicOjwqAnF8OHHC0qMsOEwrgEwr13c8KZw4fDn8KXw73CksKAw4QTGRgIG8KMMXwpwrRBT2DDq8K3AsOQXl/DqMKYMivClsKiXcOhGkvDmsK9w77Cmmpvwrhsd8Kaw7bDgQ/DuCU2CyTDtjnCgn/DiMOtSyPDnsOfVTstccO6EVXDrj03MUHDvDDCgsO7BFQFEX3DszIyw7Rsw7pNwpjCs8OCLR9UbsOlw5USw73DiWJqVXTCl2tFw7FaAcKaw7l5a3Mvw5TCpMKCwpbDi3fCi8KHwrfDugUZwo5hw7fChsKDw5ZhPjA7w7HDjcO9wrrCjUbDoy4JXA1JICRDw49UNsOYOsK9FGE5wqhAw67DumnDqW0cwqbCu8OedEbDqcOfw50MVH8twpVLH8O3LsKvacKJw75xTMKkOcOJw4/DvsOYwqRwZcOnwqfCm2XCnRJFwqEgX8KLPsKfwpQWw6nChm82w6hME10KTRhGw5LCj1stPiXClsO8w7rCocOLw6lFw7tAZ8K0O3wswpZ4wqvCmMOFwpzDhMKVRRQjw53CikECPMOKZcOOwoAKcMK7WMO3K8Okw4bCjgrCisKLRsKewqzDvmtnw584wrtiw6RFVsKPecOpIhx7TsKzw4TCisKyw6nCqcK+w6fChsKxw5kWSsOgfD7CkRfCncKGKMOubsKoBA9Fe2YHwrx4aQNSG8Kpw5zDrMO1FMOPZcKSIVnDrHxOBsKyBcKmYwQMOl7CiRvCnDNVw7NaesOoPR3CrnQEwr9Xw600BSFYECnDgi1OFS7DoFYJw4M6wrzCog09WFPCmiHDogjDpQFjdsKKIsOWFsKXd0TDjXU3CsONRX3DssOrw4HDmX0Mw7rDiENvwpPCghsXacK2w6XCkMOICcKVw4nCkMO8RcOUw4zCn1VJw752RAUawqhdw5dEwqbDh0wAMH/DlTrChC/DosOoGsOPw5nClTcyw5XDlsKhNsKAcBINwpxUAi8Rw5Jvwpckwq4uBy0nw51dP2UGbidATX1FLMKFw5zDsQxewp3DlMKwwo3CrhBPJGR7cVHCnTUnwrDDksO0AcO5T3jCm245OnUVUT8WD1HDhTnCqnbCt8OjMDvCsAzCjsKSwoDDlDhtw7cFwpsDaS7CvVLDu0zDnlvDlMOEwrnCgVzCgcOZN8Oxwp0LSMKswq/DrMK9fcKTL1zDgcOvwofCtWAoL0IKR8OWwqpPw6QfVsKcwqxTXGEPKCFydX4Mw5jDmcOEWlPCgMKDPcOJw7HDgcOMahzCjMO7HyPDo8K3Y8OswqPDgSQ+w6wfw67Cr8O/w61oMsO+woTDrnECI2TDuMK5wrzDusOHw5/CosKFwrciQF3Csj5aw7DDpMKwZMK3Z8KlRBIcLcKvM2/CtBk8JMKWwqVyw6RNwoUhwoDCsXbCrD04wpQ4F8KOcMKIw7PDtMKqZRTCjsKSOMOKCMKYQ8OhwqZ1dGrChcKXLSnDiT7CrEjCihckNcOXw63CkUYpT8KTwq7CgMKiw7PCqmBzwq/Crz50XcKEGlLCrUBjw6ASVsObD8K9wpZ6eBHCi2FTMVcDSzvDgwtxw5ZJHlF5woDDtsKTwovChMOyYMKOSCt7w7hGDDsFaMOewrrCjRbDrGPDg2rCpsO3wo8IEMO9wqjCrG0mRXHDocKJwqQYdsKOw7UUwqIUwq/CqUlKW8ObwpcZGizCpgd4dAZBXMOYw5s5w6HDvkEgw6sbRxAwwoBSOyXCjDPDpsKlwrPCrl/DqsOswoJJDWzDp8Ocw5nDrE5FWm3DncKVwpnCqMKiwoDDmMONQcOEwpwRwonCsh0Tw7FCw6Nfw7U7wp7DnMKnfMOHCMOnw4TClcOVwrzCiiddUj3CmsOgwqvDhxfDjsOMWcKDZnvDocObw77Do1rDgMKHVsKCLcOXRMOHD0RNwpEdwozCrBnDqBYWwojCiVzCjTTCqcO5wqgAwqhhw7tnw5ZuOcOYNGTDiR1GAEzDuE0PeErDnlQlfsOjw6UGWUUNw6TCmgx8NMKzDMKgL8O3esKDwprDoTl8wrbDvVDCvU4Iw5sAwr/DugcoR8KMw4hNeMKSw7Jmw4rDjG8NbcO8w7jCs8OvfFXCoBBNfcOqNsK0EQLCncKPw53DrsOiwolvwqjCr8OZDsORw47DiyA+VcOMSg5wworDgGx0w7sgKMOyDMOyZRkgw43CqUHDicKfwpDCo8OII8KvKsOxDcKoFsOaw7HCgXTDssK7B8KIwoNcw4zCu8KBw4vCvFjDkWLDl8OyB8O/w4oYw5DCslzDk2kDw7jDgcOJw4jComXDkwdfw61xw53Cv8KPf11iwq0kKsKDw7nCmiVNF0NqLMKvwqvDjhQ3ZXbDomvDs8OKQQ7CocOnwr1Fw7xZRMK6w41cw5DDgzzCthIoAMOBQcOPbcOPVx/Cm8OYw7pHwo/CvCxhCcKVw7vChShnw6rClUQ7w6dbZMOrw4hpw7lZXMOxw5pnUXHDiMOLDxrDiA/DtMKqw6zDjXRJwp07BsKEwoTClBHCritDYXgzT3RWDcOlw4lfw4Vbw7fCj8K0w4AnwqjCrxPDpCVXF8KbY8OMPwQvwqdaw6E8w4AHPcKbNGl8wpQMX2PDp0pJfcOyGsOUXkNww5jCg8Obwo7DryjCisKeYiQ/XUzDvRvDncOtCMKJwqxHw6LDh8KwwrV7LGPCkcKOIXbCv8KHwpnDi1keQkLDssOSw7XCk8K+w7YdSMKAQmbDo8KPw7xywpnCsgANNTJYScKkNAvDo8KZw6Ayw6tmC8KaTsKEbcOZTx3DilrDtUjDi8OWV8K/wrocwpNKLlYbbcOmPcKPwrvCsTpLey5Xw58XJBPCo8KEPWJrwqZJX1fCncKDw4AZw4hWw5pTw7pidlzDtMO6w7t9DcK+R8KefMOfETvCskgjOgHCqcK7UgHCgsOfwrt8bcKQw5FeZcOiw4Faw7hRTjDDocOuEMOoEm04NQTCrCjDvMOaNDV6V8OHc8OTdMOndCh7HMOqw7HDnlzCl3MqwpjDiiDDtcKmCknCuBcQwobDvcOUN2LDmsOeHMOmPMKeH0nCt0nDgsO8w73CkRDDmMOuacO9w5J1KsKswqY7UMKyHHzDjMOjw5QOSWUhw4jCpMKJw4DCtcKNdcKPLcOFJsOqQ14=---END ENCRYPTED MESSAGE---||---BEGIN ENCRYPTED RANDOM KEY--------BEGIN PGP MESSAGE----- Version: OpenPGP.js v0.9.0 Comment: http://openpgpjs.org wcBMA2tjJVxNCRhtAQf/YzkQoUqaqa3NJ/c1apIF/dsl7yJ4GdVrC3/w7lxE 2CO5ioQD4s6QMWP2Y9dOdVl2INwz8eXOds9NS+1nMs4SoMbrpJnAjx8Cthti 1Z/8eWMU023LYahds8BYM0T435K/2tTB5GTA4uTl2y8Xzz2PbptQ4PrUDaII +egeQQyPA0yuoRDwpaeTiaBYOSa06YYuK5Agr0buQAxRIMCxI2o+fucjoabv FsQHKGu20U5GlJroSIyIVVkaH3evhNti/AnYX1HuokcGEQNsF5vo4SjWcH23 2P86EIV+w5lUWC1FN9vZCyvbvyuqLHQMtqKVn4GBOkIc3bYQ0jru3a0FG4Cx bNJ0ASps2+p3Vxe0d+so2iFV92ByQ+0skyCUwCNUlwOV5V5f2fy1ImXk4mXI cO/bcbqRxx3pG9gkPIh43FoQktTT+tsJ5vS53qfaLGdhCYfkrWjsKu+2P9Xg +Cr8clh6NTblhfkoAS1gzjA3XgsgEFrtP+OGqwg= =c5WU -----END PGP MESSAGE----- ---END ENCRYPTED RANDOM KEY--- `; export const testMessageEncryptedStandard = `-----BEGIN PGP MESSAGE----- Version: OpenPGP.js v4.10.10 Comment: https://openpgpjs.org wcBMA2tjJVxNCRhtAQf9E4dbqHn+nBopmnr7NmaQYO5HADPeQOR+YJOTX5g3 2rNVdxHJFCUHq47GK6XN57zs54vNLaS6DKJJIez0Pb0oMjCTtTFMuDV7SnfG ifYsBFJASd85xsBBEQZtg/+OO0gqNmp2pXFusF6x9d/yP9yToILv9YvMC3yH Xggizz6Y8FXBQAObJ0Fh6pirvLhZkmSvLI2N1pDkl/EQuU/U8IHNRho7txC9 GM6Qx8C9bBFspzaUK/Nas1gZVVFxKmmywGDwmFPfl/KCq/ZH69J8Rsv5YtDr hTSONRq38yh6qweB42wuZnz3sKov509wngK4q58+ECV125x36xP/vMzBgtbG ZdLH2wH9UgGgyW75l74598gY+rCt7w0bs/eOK57DQPv3Y75Um2VF1cXB6m4P 4C8uCux4A4p5PZlVqeBLvEH5P/A09lgK+iLtadYVcrLX5QMck2Wd+drOeoFb /kPwSxw5cuoDREG7IDh19eE2la11WMN60CnNjHvW/BDSXakUpWJlK0VJcT4u B5TqsizabP3NrEw3pPAwwvpiDYFXmlHwds1ppe0gG1mbjKo04lU7PRCr8J53 uUPOkJw8SrAGaYGmzHgMUNS/EhC9IrF+iCfxQkORsFTkobD8nB9jZA/bE33L ON6CXcIVSvSInw5ETwcnCwYVs5/S7NZbwZTWfs1zNXycei4NJ+SqP9OXEwr5 BG7KdJCMeKCSrvEJLILWtKO7yCfE4ZyXQEwvfb9lzq6d52PZVnVUyhzqhwEL Lo94KQzrMfRkA4g2P7uRDoX6IN+Bl0SjkKU1hEFk0J1KPetDfMrBoYE2PTSA m+aXRzcMnVXODP4WQLRdXLwGGybB/lqN+kKEkaqEAUSZvX6NGlJ2fTGZtUxx oxmOqYLuY11dynjPSBbf4svuFNqauMCE7uXIbmEn+PsxOKyXjbwGu3X07LCY 4f16DjO5VKjWsyRynaFmQNLu/GjffsZhiMHRA6ArEsAYXP/FLV2qq4Rhih8Y HN7jjFa53Iv8u2awL6OJd9cO3ApXKYzFn8aaHuvJDDwdlKLGK3aLjMpLRaVr 3rJrVdtRd/kITL/qi+kLkBW2flc2W1BuM+SsddXw7zDPCE/TURm2f0eF5M4k f9Om4NbsYwdKk9X2/32LWFndOeG0bfXFjrAZcBqdPOsMWF4q/ZFUHhOIdyXu kwmQ85AfMrDapU9mnHthtcPkcwUK1Pi/OyQsddvz4WE9H54F8pYTaSRKQBJ/ YhWalR0jXF5POW+NSrmk6ZfxrMDeD8Sq2lsEIWEYOTchTnl9TNP2U2+S3ZUz ZREjg1V3aW607TxaooG2cs1wecmXmDTcpX4KELcoy1dUwJLNP5acn7w3VvPx D8hFO3bsyjXkfsvL9rBGEwdMC7LYIh73Wa8aZwy0ni6zFQtxSm03Me12RGmt 9Lxtmqr7PpVBH6ShGGkVysDii5wEBMZlH3/mgp77MKpIAevh7kD1SVbAI0AR q+JAb6aGHyXtPRWy2MrVhW2QtfZNcVHs+VGHP2Y8VK1phr3NdpGrDBKTHTJ2 ND8x57uwCCIr87fkucjsUUqaRnLb2Iv1NNmv7NbHanKmg2wb8ZlSiXbP70MN dlz2ukPJl+sJnbMeFPXvYJ+5cacGSxJm02/JY03io94YGXG9K1jToaJhZc09 NlenTo7pPc2Iqdwx4bOm00eKE5sX1vuMPYmFWASZKlLNttGKlmdeB1ozW8SB guKZ8XW2vz2HeRrHwOLrl3/KyOgElTbG41qXfLXJ2pusJQ7Q/NVnwbnzvbrt 0YW7J2dJdaLrqMdP84w86VVcmWqf4TelhgTZMqhHyvoGh5AI9bYlqlsL0+WJ jZFf/Jz8XG88b8DvldmKupsdSV3Cv2F/1sXr1gY/1ix4eY3XnbP599ewaHv7 CVG5+jJczkqBHqPnO3itQhBcqj8bSUAYcL91RFJqQNZUh0odm9gPtrStna92 JDwR6FvaY/RnnIe41Bg0RaGQSnCXHGIiIn/D76FBsqTgDrjgFLq2IIVSyJq+ wZjmtqlBaFlGpaJJPJrlmy/+Pse/dGg8FyGqrjWgvB0FNGHKdpMt/ojdGnsP HwsSNiSlfPOeytqQ8Ll0/ZwNKYm/nz7Bhnrwxke9xScsB7jrRijTvPkrdyOT za4ymcWLWxIvohlcItncZAQ2xPI/pf2CfZxTsbYTW5fAQNKTHoTfqG65D235 SUZtUe1xsbEh3phYjz4xWo/dsM5644WuB/7ieG0VNydFTlPzM65Au1h126rw 186eLPATuX5g1tF1IUY81LOD14hI0zzYnXmj9guHGpM1m/ySK+Exb17YJfY3 4hfvgBDuoHDgpVDdpnnQGBl8tPjev/z/rRc6jyBCAXVvQrzcbgr1t9J9tmot S48iFWu3n8KfyjLXwtfyNqut+9f2NWxOEaEsjtsN96pkhcDMr4Diuw8Hnptj 6QnNkT4OQgN75epjaOlkB3lyz+y13c90t1wEbxvHYP+8aGtmY5ldWVt5e3Q6 i4Zh8XgsRHz1Ymq2xLvvhWoT0p/0JhVP2eiZGn58zm/nIVgwNjSyBXj6OFCW u2kNiCRKangR2WuNNHCcciFdCzstQs3VEheFoDARlNbMc1Jh4eyfIdvTl+ko WmgGfld+uSON4GSBeVCzZkza17atSNZbrT0jrzN1XHsgKPmp5WZ4xWfIsV1h ZNWzRCRGzkgS6x1MDC59rsRLhLWG+5gvwq+l0vvmk9matQ07y1QPev96eygM HchqDIhx+wZO8ATAlAQsoOvWZGNl6I0oLAozOhQWvKfIkE6cIydkAuINQBtA gBKKX1B/iVLT60fWsjm6vAoBlEqcI34j+kweao6gVZGWqlzPbYEQntNLQ1K3 YxQN6KJjHdP0ZWMNNjRJoU81XExzWre5SgrB2zz7ro10C4i7roTdsHqg1h+m d+3UY0goMzJYb+0q8oRbGQwpImAnBvMyw1TxVgeBPAbhTWBze1oHDsiVjAJo YbT+FUOuJBbRFAchj+U5yUU+GQ21KBUtXcLKedMGm9eg4IrsgGYpRtexZxlV QwDL/5PPK6t6vNlY1P/y3CZOJEYd+4TBr6AMWuujKbBhrrbFEjfb/jLGhvtQ qCiox1il4Xxu03lR0sTBLXIpoBJRzQx+3XYSpez9M9N9vDJ9afSQGANUmR6z nXofEf1YnduffjkcHEC16ZzKAoqC1n+4P/CCHxyIoNKV+rPzsXijGnL8zW+M mXh+8Rr/2F76TDjD+WpMG8oCHifartxtTzB/w0RI6yWg49bIdY+WgLtsEwRP oHI= =Y+OF -----END PGP MESSAGE-----`; export const testMessageResult = '<div>flkasjfkjasdklfjasd<br></div><div>fasd<br></div><div>jfasjdfjasd<br></div><div>fj<br></div><div>asdfj<br></div><div>sadjf<br></div><div>sadjf<br></div><div>asjdf<br></div><div>jasd<br></div><div>fj<br></div><div>asdjf<br></div><div>asdjfsad<br></div><div>fasdlkfjasdjfkljsadfljsdfjsdljflkdsjfkljsdlkfjsdlk<br></div><div>jasfd<br></div><div>jsd<br></div><div>jf<br></div><div>sdjfjsdf<br></div><div><br></div><div>djfskjsladf<br></div><div>asd<br></div><div>fja<br></div><div>sdjfajsf<br></div><div>jas<br></div><div>fas<br></div><div>fj<br></div><div>afj<br></div><div>ajf<br></div><div>af<br></div><div>asdfasdfasd<br></div><div>Sent from <a href="https://protonmail.ch">ProtonMail</a>, encrypted email based in Switzerland.<br></div><div>dshfljsadfasdf<br></div><div>as<br></div><div>df<br></div><div>asd<br></div><div>fasd<br></div><div>f<br></div><div>asd<br></div><div>fasdflasdklfjsadlkjf</div><div>asd<br></div><div>fasdlkfjasdlkfjklasdjflkasjdflaslkfasdfjlasjflkasflksdjflkjasdf<br></div><div>asdflkasdjflajsfljaslkflasf<br></div><div>asdfkas<br></div><div>dfjas<br></div><div>djf<br></div><div>asjf<br></div><div>asj<br></div><div>faj<br></div><div>f<br></div><div>afj<br></div><div>sdjaf<br></div><div>jas<br></div><div>sdfj<br></div><div>ajf<br></div><div>aj<br></div><div>ajsdafafdaaf<br></div><div>a<br></div><div>f<br></div><div>lasl;ga<br></div><div>sags<br></div><div>ad<br></div><div>gags<br></div><div>g<br></div><div>ga<br></div><div>a<br></div><div>gg<br></div><div>a<br></div><div>ag<br></div><div>ag<br></div><div>agga.g.ga,ag.ag./ga<br></div><div><br></div><div>dsga<br></div><div>sg<br></div><div><br></div><div>gasga\\g\\g\\g\\g\\g\\n\\y\\t\\r\\\\r\\r\\\\n\\n\\n\\<br></div><div><br></div><div><br></div><div>sd<br></div><div>asdf<br></div><div>asdf<br></div><div>dsa<br></div><div>fasd<br></div><div>f</div>';
7,256
0
petrpan-code/ProtonMail/WebClients/packages/crypto/test
petrpan-code/ProtonMail/WebClients/packages/crypto/test/worker/keys.data.ts
export const ecc25519Key = `-----BEGIN PGP PUBLIC KEY BLOCK----- xjMEYRaiLRYJKwYBBAHaRw8BAQdAMrsrfniSJuxOLn+Q3VKP0WWqgizG4VOF 6t0HZYx8mSnNEHRlc3QgPHRlc3RAYS5pdD7CjAQQFgoAHQUCYRaiLQQLCQcI AxUICgQWAAIBAhkBAhsDAh4BACEJEKaNwv/NOLSZFiEEnJT1OMsrVBCZa+wE po3C/804tJnYOAD/YR2og60sJ2VVhPwYRL258dYIHnJXI2dDXB+m76GK9x4A /imlPnTOgIJAV1xOqkvO96QcbawjKgvH829zxN9DZEgMzjgEYRaiLRIKKwYB BAGXVQEFAQEHQN5UswYds0RWr4I7xNKNK+fOn+o9pYkkYzJwCbqxCsBwAwEI B8J4BBgWCAAJBQJhFqItAhsMACEJEKaNwv/NOLSZFiEEnJT1OMsrVBCZa+wE po3C/804tJkeKgEA0ruKx9rcMTi4LxfYgijjPrI+GgrfegfREt/YN2KQ75gA /Rs9S+8arbQVoniq7izz3uisWxfjMup+IVEC5uqMld8L =8+ep -----END PGP PUBLIC KEY BLOCK-----`; export const eddsaElGamalSubkey = `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYRU8lhYJKwYBBAHaRw8BAQdAixQ3oWfWg0zF8Dr8iCSKI7d87uR0D8KT jaXmeP/BFLMAAQC6l0agypEfDhEsPXnooVeQ9RdbuQJt79G0X0fEMJUaHA6L zQDCjAQQFgoAHQUCYRU+mQQLCQcIAxUICgQWAAIBAhkBAhsDAh4BACEJECU3 cYVou5siFiEEX78JM3S2Y1dhmm1KJTdxhWi7myIj8gEA08yfQM4huuE0HyzB gfVrSx/tZ7YNIuS8NusuI2C67PAA/2VDK/asD/++J6jeTLf4TojOBgKSNaF2 3OVX3XdjESQNx8F9BGEVFHAQCAChUnflGlhxwxxppDZCIG5RKmvya6PPPjeS /hhIHhYrvRkPio8bOolG72GW+jwTpkttqhX7hQeYSAuFVLWbvZT6nxxrUDCk v7eN3pq4YIaIF5UxHucoiE65LNBaa9rtqQdcrn/dT/SCS0YNfIIVqWUeHM1w sY06CwqQvRfBVbn5GkJqA+RhMF3Pavlb7vz99vDGaQXBqQlIRYWI3pWL2Abs nG35qzF6mA/gLuEazmdOmdn0RvUUxYUA4pkxVYaFvU+tQfMUFc0KvJgKLU1N ePtTeT9XxBgxLRAbi0v8ex1R08hFkvc5o7mFrAjiJ5iq7GUib0xSmEl9sa8b NQ2osvurAAMFB/42M4lEdyeGt+GC7NMI3k6E1s6piyvDFEX0BbWJihYuOmoU bHIS86NiRXoUUp9fyE4Qj7JLvtBUWxfRw4UsWSX02NZVT9GjpBGBjwpr/kB0 Gev/+mUshgYQjmycxVwsK42P15wNaP09JBf60ONcMswNq4UIhBs936yxwdJ4 EAKesY58vx7Pr+1BClS5338LzoSF8tVsAIdRyN9uC1DM+8IN2o4a/DrYD9Tu AVCHekvLzt2fX1oAV6HM2S9uaSfXyAkYqTa0EAPHzKthgiFa4IVyCqU9qNXH LJx6tdMkCFlIrl8R+HiA49AHx5x/n1FCOJ+POIlwfwJGgYrkLqI4F4V3AAFU CIWprTH3YxjBAAxfs4gj4oVmBwWBC5PfdQpO4a2Rp5eexmDGpyU4T8qesBRb wngEGBYIAAkFAmEVPpkCGwwAIQkQJTdxhWi7myIWIQRfvwkzdLZjV2GabUol N3GFaLubIojzAPwNPJX9AwYnd8vuvq4s+JCyG+Gs5a8MeUtAQyMTszhHDwD+ LAhjJS/ggyNCU/A+d6Eu9gacwFDD3j0IQLNe012Z2wU= =qRad -----END PGP PRIVATE KEY BLOCK-----`; export const rsa512BitsKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- xk0EYRam4gECALVRNFX0hcAEE2+FfdzawLPZJwyk2Lt4Rw/iWk+lBmbWuifM b7vbYKV2gGBnyEIoo1P6eN6aN7sRFtYYL0uVWB0AEQEAAc0QdGVzdCA8dGVz dEBhLml0PsKKBBABCAAdBQJhFqbiBAsJBwgDFQgKBBYAAgECGQECGwMCHgEA IQkQNE7iDhRqacUWIQSvhgc8XQvlU4EgETE0TuIOFGppxa/XAf43Z7Y4marq wN7RGSinKMFLerNInhaJsyFmHEuNPk3Z1k32EL3007lYemvg5U96KdBn7cos qOz1E5L+vNW3qcSpzk0EYRam4gECALLIfkJOcpHUYazmmD4e4SuyfDvHxaA5 D1GnOsavGycj5AlYnhGu6mwFFQvhjgNSFIT/l6KZjVxRVci++eH4pXEAEQEA AcJ2BBgBCAAJBQJhFqbiAhsMACEJEDRO4g4UamnFFiEEr4YHPF0L5VOBIBEx NE7iDhRqacULrAH6AmBrodF/hjHBy9Ag+m21Q4WcIsRMse4T0arCZgrjmwwZ m53MXUW1fnpBPuv9RWJDN+tLhm5FPJktpuElr6hcBg== =J9mf -----END PGP PUBLIC KEY BLOCK-----`; export const keyWithThirdPartyCertifications = `-----BEGIN PGP PUBLIC KEY BLOCK----- xjMEY9lU1xYJKwYBBAHaRw8BAQdAdDv0sCXYCFUdV9CKFbpU8XyiOGrcOIsa kxYRwViGLKTNFW5hbWUgPGVtYWlsQHRlc3QuY29tPsKMBBAWCgA+BQJj2VTX BAsJBwgJEOFr0Rz+vLN8AxUICgQWAAIBAhkBAhsDAh4BFiEEyMoAEQdOZiNi Bf1K4WvRHP68s3wAAPBBAPkBlUVIqbTSlTRyJ926gfuI3m3+WEB1ERZ/PGKr s0/e3gD/XsM1zaWq1/CbsPdWWJSQMZB1GCwK4s7lLtZzfqLT4gvCeAQQFgoA KgUCY9lU4wkQf+GheCFj+YUCGwMWIQTSGjpRlvRsHskHyEt/4aF4IWP5hQAA spsBAKdlu+B+auwXlSkubx4AEZGCN5olOMQfYdPHkYjlhF1WAP4jMpQnadg2 kufvNP5FVkRv5HZZDjR1jtKloXbT58HaBs44BGPZVNcSCisGAQQBl1UBBQEB B0DsHXPmEAMDqKviWCKO0IcIyHQbkAdWrq0EmIP+iuypUQMBCAfCeAQYFggA KgUCY9lU1wkQ4WvRHP68s3wCGwwWIQTIygARB05mI2IF/Urha9Ec/ryzfAAA fN4A/iUAxwmPYDEnnymsiYuTFrkWmT7jMjK7SJ8RnshclA4AAQCjOZKSdb92 gze00ZmXo9g8Zod85s+4eiyMKTbQ+mx1DQ== =WbVq -----END PGP PUBLIC KEY BLOCK-----`; export const keyWithP256AndCurve25519Subkeys = `-----BEGIN PGP PRIVATE KEY BLOCK----- xXcEZNI7SRMIKoZIzj0DAQcCAwRNbEFVQ7/5dkZsMEObzf2bL6bYLg7UmbOL nC8LG9BWIfEmTH3QNOO2IuJDRyF/WmqpoNXQBuO7Emophg+23x1WAAD+JRQA cUMAXKtqmey7d06r7EHIYyE/dgZeGo/z0WKmmjcO5M0OPHRlc3RAdGVzdC5p dD7CiQQQEwgAOwWCZNI7SQMLCQcJkLi5pXUe27CfAxUICgIWAAIZAQKbAwIe ARYhBEJtG+YOG/wgLGeeOri5pXUe27CfAADxkAEA4dh2u60jIlRo5yMwSBeb nDEuRrt4M1XNs78OgDkHv0QBALrQuKGEP7UVo5O6Vr0ah91O5VAcC9XxwjtY xl1CersLx3sEZNI7SRIIKoZIzj0DAQcCAwQTk1ESj08ix1DHXGW4ZQ5KiQNi KL3z6+KiYnjEDNjsPtH4o0FHS6d5zUmEXZ1xqbGcOmOKZ8YgKyNklYu3T5g1 AwEIBwABAKdySgrgktTT86zgFJRkxpPkNDhMRFpBj9APRJZE1NhlEIPCeAQY EwgAKgWCZNI7SQmQuLmldR7bsJ8CmwwWIQRCbRvmDhv8ICxnnjq4uaV1Htuw nwAAwa4BAPslluPut3qHU2h7PB+D93ttxCn/AhSgOc5lUOafZt2VAP91FuPa 8ziVOrUmQTj0eOBjfW0XYIlm7JTERrRlh5S8R8ddBGTSO0kSCisGAQQBl1UB BQEBB0CrsfLaOT7JAcwc2vg36SSJ6YCXODfvudM9INHNA3kxcQMBCAcAAP9h 0r01q6Jz/KvfNkJXzkvfaAfXOe6GfrFs10QvTvjpwBL4wngEGBMIACoFgmTS O0kJkLi5pXUe27CfApsMFiEEQm0b5g4b/CAsZ546uLmldR7bsJ8AAGnuAQCF lAWga4MJBiFLbBiYD7248zu+xmvUAWBU7f/dkHenYAD+K8UCcwQrqeDhCl0q z5FbOJXSHsoez1SZ7GKgoxC+X0w= -----END PGP PRIVATE KEY BLOCK-----`;
7,257
0
petrpan-code/ProtonMail/WebClients/packages/crypto/test
petrpan-code/ProtonMail/WebClients/packages/crypto/test/worker/processMIME.data.ts
export const key = `-----BEGIN PGP PUBLIC KEY BLOCK----- Version: OpenPGP.js v4.4.6 Comment: https://openpgpjs.org xjMEXG6rNhYJKwYBBAHaRw8BAQdA63eiHJ6ylmHXwDzvNoBXDx3UkaF6rm3d kToIFs8KYGnNG0pvbiBTbWl0aCA8am9uQGV4YW1wbGUuY29tPsJ3BBAWCgAf BQJcbqs2BgsJBwgDAgQVCAoCAxYCAQIZAQIbAwIeAQAKCRACmBrNmWu7s6ig AP4l4JUNFYP1lzje4+VB1oz3xgAJwDpIPnpvV4p6fVfCMQEAsfqvA6OdgLl+ MmVRBRXO1BUtkSxwS9zxzQfE/0NZ7QfOOARcbqs2EgorBgEEAZdVAQUBAQdA 4IcImEOmtilzNy6BvjyoHHtiukYZlb4/38iqQbzQxywDAQgHwmEEGBYIAAkF AlxuqzYCGwwACgkQApgazZlru7OCeAD/Waa1g7t1DsrE8Di+ovD19Xs7js4R 82uvdzLBXafN8okBALL5uHCjG/gkJzHGun2Tj2MKO2ykR6gv6lVKo7jX75kD =7vY3 -----END PGP PUBLIC KEY BLOCK-----`; export const multipartSignedMessage = `From: Jon Smith <jon@example.com> To: Jon Smith <jon@example.com> Mime-Version: 1.0 Content-Type: multipart/signed; boundary=bar; micalg=pgp-md5; protocol="application/pgp-signature" --bar Content-Type: text/plain; charset=iso-8859-1 Content-Transfer-Encoding: quoted-printable =A1Hola! Did you know that talking to yourself is a sign of senility? It's generally a good idea to encode lines that begin with From=20because some mail transport agents will insert a greater- than (>) sign, thus invalidating the signature. Also, in some cases it might be desirable to encode any =20 trailing whitespace that occurs on lines in order to ensure =20 that the message signature is not invalidated when passing =20 a gateway that modifies such whitespace (like BITNET). =20 me --bar Content-Type: application/pgp-signature -----BEGIN PGP SIGNATURE----- Version: OpenPGP.js v4.4.6 Comment: https://openpgpjs.org wl4EARYKAAYFAlxurnwACgkQApgazZlru7OZ4gEA7gcIhNDZe9DurcA7I6Hb J+mJL9vKtB5Ob4ponog5+ZYBAK6MCfmEImVCpdOlAIKmA9VRzQVLbW+Zm9cc iwVC3WsC =beyW -----END PGP SIGNATURE----- --bar--`; export const multipartSignedMessageBody = `¡Hola! Did you know that talking to yourself is a sign of senility? It's generally a good idea to encode lines that begin with From because some mail transport agents will insert a greater- than (>) sign, thus invalidating the signature. Also, in some cases it might be desirable to encode any trailing whitespace that occurs on lines in order to ensure that the message signature is not invalidated when passing a gateway that modifies such whitespace (like BITNET). me`; // Message from: https://docs.microsoft.com/en-us/previous-versions/office/developer/exchange-server-2010/aa563375(v=exchg.140) export const multipartMessageWithAttachment = `From: Some One <someone@example.com> To: "Someone Else" <someone-else@example.com> MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="XXXXboundary text" This is a multipart message in MIME format. --XXXXboundary text Content-Type: text/plain this is the body text --XXXXboundary text Content-Type: text/plain; Content-Disposition: attachment; filename="test.txt" this is the attachment text --XXXXboundary text--`; export const messageWithEmptySignature = `Content-Type: multipart/signed; protocol="application/pgp-signature";\n micalg="pgp-sha256"; boundary="===============9034558267015095129==" MIME-Version: 1.0 --===============9034558267015095129== MIME-Version: 1.0 Content-Type: multipart/alternative; boundary="--==_mimepart_64c6b73c4dd6d_2292b028ca3c6b8191503f"; charset=UTF-8 Content-Transfer-Encoding: 7bit This is a multi-part message in MIME format. ----==_mimepart_64c6b73c4dd6d_2292b028ca3c6b8191503f Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: quoted-printable Hello ----==_mimepart_64c6b73c4dd6d_2292b028ca3c6b8191503f Content-Type: text/html; charset=UTF-8 Content-Transfer-Encoding: quoted-printable <div>Hello</div> ----==_mimepart_64c6b73c4dd6d_2292b028ca3c6b8191503f-- --===============9034558267015095129== Content-Type: application/pgp-signature; name="signature.asc" MIME-Version: 1.0 Content-Disposition: attachment; filename="signature.asc" --===============9034558267015095129==-- `;
7,258
0
petrpan-code/ProtonMail/WebClients/packages/crypto/test
petrpan-code/ProtonMail/WebClients/packages/crypto/test/worker/worker.spec.ts
import { use as chaiUse, expect } from 'chai'; import chaiAsPromised from 'chai-as-promised'; import { SessionKey, generateKey, getSHA256Fingerprints, reformatKey } from 'pmcrypto'; import { CompressedDataPacket, enums, decryptKey as openpgp_decryptKey, encryptKey as openpgp_encryptKey, readKey as openpgp_readKey, readMessage as openpgp_readMessage, readPrivateKey as openpgp_readPrivateKey, revokeKey as openpgp_revokeKey, } from 'pmcrypto/lib/openpgp'; import { S2kTypeForConfig, VERIFICATION_STATUS } from '../../lib'; import { arrayToHexString, binaryStringToArray, hexStringToArray, stringToUtf8Array, utf8ArrayToString, } from '../../lib/utils'; import { CryptoWorkerPool as CryptoWorker } from '../../lib/worker/workerPool'; import { testMessageEncryptedLegacy, testMessageEncryptedStandard, testMessageResult, testPrivateKeyLegacy, } from './decryptMessageLegacy.data'; import { ecc25519Key, eddsaElGamalSubkey, keyWithP256AndCurve25519Subkeys, keyWithThirdPartyCertifications, rsa512BitsKey, } from './keys.data'; import { messageWithEmptySignature, key as mimeKey, multipartMessageWithAttachment, multipartSignedMessage, multipartSignedMessageBody, } from './processMIME.data'; chaiUse(chaiAsPromised); describe('Worker API and Worker Pool Integration', () => { before(async () => { await CryptoWorker.init({ poolSize: 1 }); }); afterEach(async () => { await CryptoWorker.clearKeyStore(); }); after(async () => { await CryptoWorker.destroy(); }); it('init - should throw if already initialised', async () => { await expect(CryptoWorker.init()).to.be.rejectedWith(/already initialised/); }); it('decryptMessage - should decrypt message with correct password', async () => { const armoredMessage = `-----BEGIN PGP MESSAGE----- wy4ECQMIxybp91nMWQIAa8pGeuXzR6zIs+uE6bUywPM4GKG8sve4lJoxGbVS /xN10jwBEsZQGe7OTWqxJ9NNtv6X6qFEkvABp4PD3xvi34lo2WUAaUN2wb0g tBiO7HKQxoGj3FnUTJnI52Y0pIg= =HJfc -----END PGP MESSAGE-----`; const decryptionResult = await CryptoWorker.decryptMessage({ armoredMessage, passwords: 'password', }); expect(decryptionResult.data).to.equal('hello world'); expect(decryptionResult.signatures).to.have.length(0); expect(decryptionResult.verificationErrors).to.not.exist; expect(decryptionResult.verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); const decryptWithWrongPassword = CryptoWorker.decryptMessage({ armoredMessage, passwords: 'wrong password', }); await expect(decryptWithWrongPassword).to.be.rejectedWith(/Error decrypting message/); }); it('decryptMessage - message with signature', async () => { const messageWithSignature = `-----BEGIN PGP MESSAGE----- wy4ECQMIUxTg50RvG9EAMkSwKLgTqzpEMlGv1+IKf52HmId83iK4kku8nBzR FxcD0sACAc9hM9NVeaAhGQdsTqt9zRcRmMRhyWqoAsR0+uZukqPxGZfOw0+6 ouguW3wrVd+/niaHPaDs87sATldw5KK5WI9xcR+mBid4Bq7hugXNcZDMa8qN gqM8VJm8262cvZAtjwbH50TjBNl+q/YN7DDr+BXd6gRzrvMM+hl5UwYiiYfW qXGo4MRQBT+B41Yjh/2rUdlCmWoRw2OGWzQTmTspNm4EEolrT6jdYQMxn9IZ GzGRkb+Rzb42pnKcuihith40374= =ccav -----END PGP MESSAGE----- `; const decryptionResult = await CryptoWorker.decryptMessage({ armoredMessage: messageWithSignature, passwords: 'password', }); expect(decryptionResult.data).to.equal('hello world'); expect(decryptionResult.signatures).to.have.length(1); expect(decryptionResult.verificationErrors![0]).instanceOf(Error); // Errors should be automatically reconstructed by comlink expect(decryptionResult.verificationErrors![0]).to.match(/Could not find signing key/); expect(decryptionResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_INVALID); }); it('decryptMessage - output binary data should be transferred', async () => { const decryptionResult = await CryptoWorker.decryptMessage({ armoredMessage: `-----BEGIN PGP MESSAGE----- wy4ECQMIxybp91nMWQIAa8pGeuXzR6zIs+uE6bUywPM4GKG8sve4lJoxGbVS /xN10jwBEsZQGe7OTWqxJ9NNtv6X6qFEkvABp4PD3xvi34lo2WUAaUN2wb0g tBiO7HKQxoGj3FnUTJnI52Y0pIg= =HJfc -----END PGP MESSAGE-----`, passwords: 'password', format: 'binary', }); expect(decryptionResult.data).to.deep.equal(stringToUtf8Array('hello world')); expect(decryptionResult.signatures).to.have.length(0); expect(decryptionResult.verificationErrors).to.not.exist; expect(decryptionResult.verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); }); it('decryptMessage - supports decrypting e2ee forwarded message', async () => { // final recipient key const fwdRecipientKeyArmored = `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEZAdtGBYJKwYBBAHaRw8BAQdAcNgHyRGEaqGmzEqEwCobfUkyrJnY8faBvsf9 R2c5ZzYAAP9bFL4nPBdo04ei0C2IAh5RXOpmuejGC3GAIn/UmL5cYQ+XzRtjaGFy bGVzIDxjaGFybGVzQHByb3Rvbi5tZT7CigQTFggAPAUCZAdtGAmQFXJtmBzDhdcW IQRl2gNflypl1XjRUV8Vcm2YHMOF1wIbAwIeAQIZAQILBwIVCAIWAAIiAQAAJKYA /2qY16Ozyo5erNz51UrKViEoWbEpwY3XaFVNzrw+b54YAQC7zXkf/t5ieylvjmA/ LJz3/qgH5GxZRYAH9NTpWyW1AsdxBGQHbRgSCisGAQQBl1UBBQEBB0CxmxoJsHTW TiETWh47ot+kwNA1hCk1IYB9WwKxkXYyIBf/CgmKXzV1ODP/mRmtiBYVV+VQk5MF EAAA/1NW8D8nMc2ky140sPhQrwkeR7rVLKP2fe5n4BEtAnVQEB3CeAQYFggAKgUC ZAdtGAmQFXJtmBzDhdcWIQRl2gNflypl1XjRUV8Vcm2YHMOF1wIbUAAAl/8A/iIS zWBsBR8VnoOVfEE+VQk6YAi7cTSjcMjfsIez9FYtAQDKo9aCMhUohYyqvhZjn8aS 3t9mIZPc+zRJtCHzQYmhDg== =lESj -----END PGP PRIVATE KEY BLOCK-----`; const fwdCiphertextArmored = `-----BEGIN PGP MESSAGE----- wV4DB27Wn97eACkSAQdA62TlMU2QoGmf5iBLnIm4dlFRkLIg+6MbaatghwxK+Ccw yGZuVVMAK/ypFfebDf4D/rlEw3cysv213m8aoK8nAUO8xQX3XQq3Sg+EGm0BNV8E 0kABEPyCWARoo5klT1rHPEhelnz8+RQXiOIX3G685XCWdCmaV+tzW082D0xGXSlC 7lM8r1DumNnO8srssko2qIja =pVRa -----END PGP MESSAGE-----`; const fwdRecipientKey = await CryptoWorker.importPrivateKey({ armoredKey: fwdRecipientKeyArmored, passphrase: null, }); await expect( CryptoWorker.decryptMessage({ armoredMessage: fwdCiphertextArmored, decryptionKeys: fwdRecipientKey, }) ).to.be.rejectedWith(/Error decrypting message/); // missing config flag const { data } = await CryptoWorker.decryptMessage({ armoredMessage: fwdCiphertextArmored, decryptionKeys: fwdRecipientKey, config: { allowForwardedMessages: true }, }); expect(data).to.deep.equal('Message for Bob'); }); it('decryptMessageLegacy - it can decrypt a legacy message', async () => { const privateKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: testPrivateKeyLegacy, passphrase: '123', }); const decryptionResult = await CryptoWorker.decryptMessageLegacy({ armoredMessage: testMessageEncryptedLegacy, decryptionKeys: privateKeyRef, messageDate: new Date('2015-01-01'), }); expect(decryptionResult.data).to.equal(testMessageResult); expect(decryptionResult.signatures).to.have.length(0); expect(decryptionResult.verificationErrors).to.not.exist; expect(decryptionResult.verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); }); it('decryptMessageLegacy - it can decrypt a non-legacy armored message', async () => { const privateKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: testPrivateKeyLegacy, passphrase: '123', }); const decryptionResult = await CryptoWorker.decryptMessageLegacy({ armoredMessage: testMessageEncryptedStandard, decryptionKeys: privateKeyRef, verificationKeys: privateKeyRef, messageDate: new Date('2015-01-01'), }); expect(decryptionResult.data).to.equal(testMessageResult); expect(decryptionResult.signatures).to.have.length(1); expect(decryptionResult.verificationErrors).to.not.exist; expect(decryptionResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); }); it('encryptMessage - output binary message should be transferred', async () => { const encryptionResult = await CryptoWorker.encryptMessage({ textData: 'hello world', passwords: 'password', format: 'binary', }); expect(encryptionResult.message.length > 0).to.be.true; const decryptionResult = await CryptoWorker.decryptMessage({ binaryMessage: encryptionResult.message, passwords: 'password', }); expect(decryptionResult.signatures).to.have.length(0); expect(decryptionResult.verificationErrors).to.not.exist; expect(decryptionResult.verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); }); it('encryptMessage - should compress with zlib only if given `compress: true`', async () => { const password = 'password'; const { message: armoredMessage } = await CryptoWorker.encryptMessage({ textData: 'hello world', passwords: password, }); const encryptedMessage = await openpgp_readMessage({ armoredMessage }); const decryptedMessage = await encryptedMessage.decrypt([], [password]); expect(decryptedMessage.packets.findPacket(enums.packet.compressedData)).to.be.undefined; // request compression const { message: compressedArmoredMessage } = await CryptoWorker.encryptMessage({ textData: 'hello world', passwords: password, compress: true, }); const compressedEncryptedMessage = await openpgp_readMessage({ armoredMessage: compressedArmoredMessage }); const compressedDecryptedMessage = await compressedEncryptedMessage.decrypt([], [password]); const compressedPacket = compressedDecryptedMessage.packets.findPacket( enums.packet.compressedData ) as CompressedDataPacket; expect(compressedPacket).to.not.be.undefined; // @ts-ignore undeclared algorithm field expect(compressedPacket.algorithm).to.equal(enums.compression.zlib); }); it('encryptMessage/decryptMessage - should encrypt and decrypt text and binary data', async () => { const privateKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'name', email: 'email@test.com' } }); const { message: encryptedArmoredMessage } = await CryptoWorker.encryptMessage({ textData: 'hello world', encryptionKeys: privateKeyRef, signingKeys: undefined, // redundant; test that the option can still be serialized correctly }); const textDecryptionResult = await CryptoWorker.decryptMessage({ armoredMessage: encryptedArmoredMessage, decryptionKeys: privateKeyRef, }); expect(textDecryptionResult.data).to.equal('hello world'); expect(textDecryptionResult.signatures).to.have.length(0); expect(textDecryptionResult.verificationErrors).to.not.exist; expect(textDecryptionResult.verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); const { message: encryptedBinaryMessage } = await CryptoWorker.encryptMessage({ binaryData: new Uint8Array([1, 2, 3]), encryptionKeys: privateKeyRef, format: 'binary', }); const binaryDecryptionResult = await CryptoWorker.decryptMessage({ binaryMessage: encryptedBinaryMessage, decryptionKeys: privateKeyRef, format: 'binary', }); expect(binaryDecryptionResult.data).to.deep.equal(new Uint8Array([1, 2, 3])); expect(binaryDecryptionResult.signatures).to.have.length(0); expect(binaryDecryptionResult.verificationErrors).to.not.exist; expect(binaryDecryptionResult.verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); }); it('encryptMessage/decryptMessage - with elgamal key', async () => { // an elgamal key is considered insecure by OpenPGP.js by default, but we need to allow existing keys to be used for now. const weakKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: eddsaElGamalSubkey, passphrase: null }); const { message: encryptedArmoredMessage } = await CryptoWorker.encryptMessage({ textData: 'hello world', encryptionKeys: weakKeyRef, }); const textDecryptionResult = await CryptoWorker.decryptMessage({ armoredMessage: encryptedArmoredMessage, decryptionKeys: weakKeyRef, }); expect(textDecryptionResult.data).to.equal('hello world'); expect(textDecryptionResult.signatures).to.have.length(0); expect(textDecryptionResult.verificationErrors).to.not.exist; expect(textDecryptionResult.verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); }); it('signMessage/verifyMessage - output binary signature and data should be transferred', async () => { const privateKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'name', email: 'email@test.com' } }); const binarySignature = await CryptoWorker.signMessage({ textData: 'hello world', format: 'binary', detached: true, signingKeys: privateKeyRef, }); expect(binarySignature.length > 0).to.be.true; const verificationResult = await CryptoWorker.verifyMessage({ textData: 'hello world', verificationKeys: privateKeyRef, binarySignature, }); expect(verificationResult.data).to.equal('hello world'); expect(verificationResult.signatures).to.have.length(1); expect(verificationResult.errors).to.not.exist; expect(verificationResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); const invalidVerificationResult = await CryptoWorker.verifyMessage({ textData: 'not signed data', binarySignature, verificationKeys: privateKeyRef, format: 'binary', }); expect(invalidVerificationResult.data).to.deep.equal(stringToUtf8Array('not signed data')); expect(invalidVerificationResult.signatures).to.have.length(1); expect(invalidVerificationResult.errors).to.have.length(1); expect(invalidVerificationResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_INVALID); }); it('signMessage/verifyMessage - with context', async () => { const privateKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'name', email: 'email@test.com' } }); const textData = 'message with context'; const armoredSignature = await CryptoWorker.signMessage({ textData, signingKeys: privateKeyRef, context: { value: 'test-context', critical: true }, detached: true, }); const verificationValidContext = await CryptoWorker.verifyMessage({ textData, armoredSignature, verificationKeys: privateKeyRef, context: { value: 'test-context', required: true }, }); const verificationMissingContext = await CryptoWorker.verifyMessage({ textData, armoredSignature, verificationKeys: privateKeyRef, }); expect(verificationValidContext.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); expect(verificationMissingContext.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_INVALID); // check errors expect(verificationValidContext.errors).to.be.undefined; expect(verificationMissingContext.errors).to.have.length(1); expect(verificationMissingContext.errors![0]).to.match(/Unknown critical notation: context@proton/); }); it('verifyCleartextMessage - output binary signature should be transferred', async () => { const armoredKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- xjMEYj2jmxYJKwYBBAHaRw8BAQdAlG1ARz91CtsRmJ0lQo2wOqAzUXn8KnOu oBdEwZWZhPvNDzx0ZXN0QHRlc3QuY29tPsKMBBAWCgAdBQJiPaObBAsJBwgD FQgKBBYAAgECGQECGwMCHgEAIQkQ0k/eZvRKo8YWIQQseK5K/i3v7uzoNYHS T95m9EqjxqiLAP9sIlmYlCVgSiPZBmsixn9CL27Hv/Bgr2nc73v9K5OszAEA ypolW41xuLR+4D7vvxT66lwMMVagQSIisR+49QQP2w8= =rzuc -----END PGP PUBLIC KEY BLOCK-----`; const armoredCleartextMessage = `-----BEGIN PGP SIGNED MESSAGE----- Hash: SHA512 hello world -----BEGIN PGP SIGNATURE----- wnUEARYKAAYFAmI9o6IAIQkQ0k/eZvRKo8YWIQQseK5K/i3v7uzoNYHST95m 9EqjxoO3AP9xPAlk+qZ3sr/Y1lgWBIdoGeQ1ZGzLKVVzgrhH5sOcZQEA3AeS fLz+Lk0ZkB4L3nhM/c6sQKSsI9k2Tptm1VZ5+Qo= =1A38 -----END PGP SIGNATURE-----`; const publicKeyRef = await CryptoWorker.importPublicKey({ armoredKey }); const verificationResult = await CryptoWorker.verifyCleartextMessage({ armoredCleartextMessage, verificationKeys: publicKeyRef, }); expect(verificationResult.data).to.equal('hello world'); expect(verificationResult.signatures).to.have.length(1); expect(verificationResult.errors).to.not.exist; expect(verificationResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); const invalidVerificationResult = await CryptoWorker.verifyCleartextMessage({ armoredCleartextMessage, verificationKeys: [], }); expect(invalidVerificationResult.signatures).to.have.length(1); expect(invalidVerificationResult.errors).to.have.length(1); expect(invalidVerificationResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_INVALID); }); it('should encrypt/sign and decrypt/verify text and binary data', async () => { const aliceKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'alice', email: 'alice@test.com' } }); const bobKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'bob', email: 'bob@test.com' } }); const { message: encryptedArmoredMessage } = await CryptoWorker.encryptMessage({ textData: 'hello world', encryptionKeys: bobKeyRef, signingKeys: aliceKeyRef, }); const textDecryptionResult = await CryptoWorker.decryptMessage({ armoredMessage: encryptedArmoredMessage, decryptionKeys: bobKeyRef, verificationKeys: aliceKeyRef, }); expect(textDecryptionResult.data).to.equal('hello world'); expect(textDecryptionResult.signatures).to.have.length(1); expect(textDecryptionResult.verificationErrors).to.not.exist; expect(textDecryptionResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); const { message: encryptedBinaryMessage } = await CryptoWorker.encryptMessage({ binaryData: new Uint8Array([1, 2, 3]), encryptionKeys: bobKeyRef, signingKeys: aliceKeyRef, format: 'binary', }); const binaryDecryptionResult = await CryptoWorker.decryptMessage({ binaryMessage: encryptedBinaryMessage, decryptionKeys: bobKeyRef, verificationKeys: aliceKeyRef, format: 'binary', }); expect(binaryDecryptionResult.data).to.deep.equal(new Uint8Array([1, 2, 3])); expect(binaryDecryptionResult.signatures).to.have.length(1); expect(binaryDecryptionResult.verificationErrors).to.not.exist; expect(binaryDecryptionResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); }); it('should encrypt/sign and decrypt/verify binary data with detached signatures', async () => { const aliceKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'alice', email: 'alice@test.com' } }); const bobKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'bob', email: 'bob@test.com' } }); const plaintext = stringToUtf8Array('hello world'); const { message: encryptedBinaryMessage, signature: detachedBinarySignature, encryptedSignature: encryptedBinarySignature, } = await CryptoWorker.encryptMessage({ binaryData: plaintext, encryptionKeys: bobKeyRef, signingKeys: aliceKeyRef, format: 'binary', detached: true, }); const decryptionResult = await CryptoWorker.decryptMessage({ binaryMessage: encryptedBinaryMessage, binarySignature: detachedBinarySignature, decryptionKeys: bobKeyRef, verificationKeys: aliceKeyRef, format: 'binary', }); expect(decryptionResult.data).to.deep.equal(plaintext); expect(decryptionResult.signatures).to.have.length(1); expect(decryptionResult.verificationErrors).to.not.exist; expect(decryptionResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); const decryptionResultWithEncryptedSignature = await CryptoWorker.decryptMessage({ binaryMessage: encryptedBinaryMessage, binaryEncryptedSignature: encryptedBinarySignature, decryptionKeys: bobKeyRef, verificationKeys: aliceKeyRef, format: 'binary', }); expect(decryptionResultWithEncryptedSignature.data).to.deep.equal(plaintext); expect(decryptionResultWithEncryptedSignature.signatures).to.have.length(1); expect(decryptionResultWithEncryptedSignature.verificationErrors).to.not.exist; expect(decryptionResultWithEncryptedSignature.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); }); it('should support encrypting/decrypting using argon2', async () => { const passwords = 'password'; const sessionKey = { algorithm: enums.read(enums.symmetric, enums.symmetric.aes128), data: hexStringToArray('01FE16BBACFD1E7B78EF3B865187374F'), }; const encrypted = await CryptoWorker.encryptSessionKey({ ...sessionKey, passwords, format: 'binary', config: { s2kType: S2kTypeForConfig.argon2 }, }); // ensure encryption used argon2 const skeskStartIndex = 2; expect(encrypted[skeskStartIndex]).to.equal(4); // SKESK version (v6 format is different, test needs updating) expect(encrypted[skeskStartIndex + 2]).to.equal(S2kTypeForConfig.argon2); const decryptedSessionKey = await CryptoWorker.decryptSessionKey({ binaryMessage: encrypted, passwords }); expect(decryptedSessionKey).to.deep.equal(sessionKey); }); it('generateSessionKey - should return session key of expected size', async () => { const sessionKey128 = await CryptoWorker.generateSessionKeyForAlgorithm('aes128'); expect(sessionKey128.length).to.equal(16); const sessionKey192 = await CryptoWorker.generateSessionKeyForAlgorithm('aes192'); expect(sessionKey192.length).to.equal(24); const sessionKey256 = await CryptoWorker.generateSessionKeyForAlgorithm('aes256'); expect(sessionKey256.length).to.equal(32); }); it('generateSessionKeyFromKeyPreferences - should return shared algo preference', async () => { const aliceKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'alice', email: 'alice@test.com' } }); const bobKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'bob', email: 'bob@test.com' } }); const sessionKey = await CryptoWorker.generateSessionKey({ recipientKeys: [aliceKeyRef, bobKeyRef], }); expect(sessionKey.algorithm).to.equal('aes256'); }); it('generate/encrypt/decryptSessionKey - should encrypt and decrypt with key and password', async () => { const privateKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'test', email: 'test@test.com' } }); const password = 'password'; const sessionKey: SessionKey = { data: new Uint8Array(16).fill(123), algorithm: 'aes128', }; // armored result await CryptoWorker.encryptSessionKey({ ...sessionKey, encryptionKeys: privateKeyRef, passwords: password, }).then(async (armoredEncryptedSessionKey) => { const decryptedSessionKeyWithPassword = await CryptoWorker.decryptSessionKey({ armoredMessage: armoredEncryptedSessionKey, passwords: password, }); expect(decryptedSessionKeyWithPassword).to.deep.equal(sessionKey); const decryptedSessionKeyWithKey = await CryptoWorker.decryptSessionKey({ armoredMessage: armoredEncryptedSessionKey, decryptionKeys: privateKeyRef, }); expect(decryptedSessionKeyWithKey).to.deep.equal(sessionKey); }); // binary result await CryptoWorker.encryptSessionKey({ ...sessionKey, encryptionKeys: privateKeyRef, passwords: password, format: 'binary', }).then(async (binaryEncryptedSessionKey) => { const decryptedSessionKeyWithPassword = await CryptoWorker.decryptSessionKey({ binaryMessage: binaryEncryptedSessionKey, passwords: password, }); expect(decryptedSessionKeyWithPassword).to.deep.equal(sessionKey); const decryptedSessionKeyWithKey = await CryptoWorker.decryptSessionKey({ binaryMessage: binaryEncryptedSessionKey, decryptionKeys: privateKeyRef, }); expect(decryptedSessionKeyWithKey).to.deep.equal(sessionKey); }); }); it('processMIME - it can process multipart/signed mime messages and verify the signature', async () => { const mimeKeyRef = await CryptoWorker.importPublicKey({ armoredKey: mimeKey }); const { body, verified, signatures } = await CryptoWorker.processMIME({ data: multipartSignedMessage, verificationKeys: mimeKeyRef, }); expect(verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); expect(signatures.length).to.equal(1); expect(signatures[0].length > 0).to.be.true; // check that serialized signature is transferred expect(body).to.equal(multipartSignedMessageBody); }); it('processMIME - it can parse message with text attachment', async () => { const { verified, body, signatures, attachments } = await CryptoWorker.processMIME({ data: multipartMessageWithAttachment, }); expect(verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); expect(signatures.length).to.equal(0); expect(body).to.equal('this is the body text\n'); expect(attachments.length).to.equal(1); const [attachment] = attachments; expect(attachment.fileName).to.equal('test.txt'); expect(attachment.contentId.endsWith('@pmcrypto>')).to.be.true; expect(attachment.content.length > 0).to.be.true; expect(attachment.content.length).to.equal(attachment.size); expect(utf8ArrayToString(attachment.content)).to.equal('this is the attachment text\n'); }); it('processMIME - it can parse message with empty signature', async () => { const { body, signatures, verified, attachments } = await CryptoWorker.processMIME({ data: messageWithEmptySignature, }); expect(verified).to.equal(VERIFICATION_STATUS.NOT_SIGNED); expect(signatures).to.have.length(0); expect(body).to.equal('<div>Hello</div>\n'); expect(attachments).to.have.length(1); // signature part that failed to parse expect(attachments[0].content).to.have.length(0); }); it('getMessageInfo - it returns correct keyIDs', async () => { const signedMessage = `-----BEGIN PGP MESSAGE----- xA0DAQoWaZjmpnshsL8Bywt1AGIyFfFoZWxsb8J1BAEWCgAGBQJiMhXxACEJ EGmY5qZ7IbC/FiEE3C2Gg07gzeD8liPcaZjmpnshsL9atgD+PiNipUtpGyv7 Jky/kRH9ikiCFdnNCPmXpGM/HXBQsnAA/jZVt+uBEVIgTeTJ9c7AqEgV3x9K 2Dj4M71DOHZr/lAL =gTiI -----END PGP MESSAGE-----`; const encryptedMessage = `-----BEGIN PGP MESSAGE----- wV4DmdSzzm35uOMSAQdAfIPK4Iteh+VVFIddVCaR60ETJ8mhx6ytbR7ppS4h qiAwqc/J464YnVgZ8BbGLt0k2ipAsR5y0M+I+GivWhCXMSKtRwvBmwiCgiE7 PzIOge9V0jYBuRj2e07jffFN7LDy9Q6kaLdkj+R/pAJi1StBntsW0sBBSkcN xMT1c31ROTrAe4C6g21wLAY= =2VmX -----END PGP MESSAGE-----`; const signedMessageInfo = await CryptoWorker.getMessageInfo({ armoredMessage: signedMessage }); expect(signedMessageInfo.encryptionKeyIDs).to.deep.equal([]); expect(signedMessageInfo.signingKeyIDs).to.deep.equal(['6998e6a67b21b0bf']); const encryptedMessageInfo = await CryptoWorker.getMessageInfo({ armoredMessage: encryptedMessage }); expect(encryptedMessageInfo.encryptionKeyIDs).to.deep.equal(['99d4b3ce6df9b8e3']); expect(encryptedMessageInfo.signingKeyIDs).to.deep.equal([]); }); it('getSignatureInfo - it returns correct keyIDs', async () => { const armoredSignature = `-----BEGIN PGP SIGNATURE----- wnUEARYKAAYFAmIyIZcAIQkQaZjmpnshsL8WIQTcLYaDTuDN4PyWI9xpmOam eyGwv58uAQDBVzpXdSjXtEleTrlCDV0Ai7edrGelnbYl5M5QWHsO6AEA7ylY M8uical4EQWijKwbwpfCViRXlPLbWED7HjRFJAQ= =jrvP -----END PGP SIGNATURE-----`; const signatureInfo = await CryptoWorker.getSignatureInfo({ armoredSignature }); expect(signatureInfo.signingKeyIDs).to.deep.equal(['6998e6a67b21b0bf']); }); it('getKeyInfo - it returns correct key type and encryption status', async () => { const armoredPublicKey = ecc25519Key; const armoredDecryptedPrivateKey = `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYjn/DRYJKwYBBAHaRw8BAQdACQUzjf/48LfAqt/iJoCLvNh82ezGNLad uLoCyyqP+kMAAQDlR+FqVc7sOXkWw9Ce21H9U75JbXkQZdopNT6rmUP5eRDN zRE8dGVzdEB3b3JrZXIuY29tPsKMBBAWCgAdBQJiOf8NBAsJBwgDFQgKBBYA AgECGQECGwMCHgEAIQkQgXhWyqdTIuIWIQSLE8CDw3U8LVFQRY6BeFbKp1Mi 4kBPAQCHH7+sA6/Rvn/cABOPdGuDz6LtBB2pai5ahUuYTyBP1QEAgG/KR/AX fWuidzPVytVcHmE7PH0ZUe/J8qAxszespALHXQRiOf8NEgorBgEEAZdVAQUB AQdAi+mqfJuhbYqNNrCRb0w8dDMImkdk9ygaZZXgzh6REWwDAQgHAAD/eBLr qR+dnBSXPk7n+0+/6bWjBWrYc6vDElTpPSA3stARasJ4BBgWCAAJBQJiOf8N AhsMACEJEIF4VsqnUyLiFiEEixPAg8N1PC1RUEWOgXhWyqdTIuLV3QD/e6jQ Y9qpG8A3sC7ZB29GClPXVJy6uL2Ai5R37cGozfUA/REr1bi6Ac4FauZsge1+ Z3SSOseslp6+4nnQ3zOqnisO =pEmk -----END PGP PRIVATE KEY BLOCK-----`; const encryptedPrivateKey = await openpgp_encryptKey({ privateKey: await openpgp_readPrivateKey({ armoredKey: armoredDecryptedPrivateKey }), passphrase: 'passphrase', }); const publicKeyInfo = await CryptoWorker.getKeyInfo({ armoredKey: armoredPublicKey }); expect(publicKeyInfo.keyIsPrivate).to.be.false; expect(publicKeyInfo.keyIsDecrypted).to.be.null; const decryptedKeyInfo = await CryptoWorker.getKeyInfo({ armoredKey: armoredDecryptedPrivateKey }); expect(decryptedKeyInfo.keyIsPrivate).to.be.true; expect(decryptedKeyInfo.keyIsDecrypted).to.be.true; const encryptedKeyInfo = await CryptoWorker.getKeyInfo({ armoredKey: encryptedPrivateKey.armor() }); expect(encryptedKeyInfo.keyIsPrivate).to.be.true; expect(encryptedKeyInfo.keyIsDecrypted).to.be.false; expect(encryptedKeyInfo.fingerprint).to.equal(encryptedPrivateKey.getFingerprint()); expect(encryptedKeyInfo.keyIDs).to.deep.equal(encryptedPrivateKey.getKeyIDs().map((keyID) => keyID.toHex())); }); it('getArmoredKeys - it returns a valid armored key', async () => { const hexBinaryPublicKey = `c63304623b4fac16092b06010401da470f010107405787b1d537d9974a40fea2f239578b81c355991ac4fe619a1595c3f21ecfb3abcd113c7465737440776f726b65722e636f6d3ec28c0410160a001d0502623b4fac040b0907080315080a0416000201021901021b03021e0100210910db8c8a5d901c2766162104d26cda9d40b4fea61b6cb65adb8c8a5d901c2766f80e00fd1eb4b4c4917d18436081ff9f463b9cc595af7805c789ba3b57a6b66511ff95d600fe36072060165618fed927051527d585ff26c3293b42671f2a169f47a4df6ba50cce3804623b4fac120a2b06010401975501050101074057571d1c77fbcb8f9fd0abd3d4f0e95ea725f569a49ec4faf0d2d0df8df3cd4103010807c2780418160800090502623b4fac021b0c00210910db8c8a5d901c2766162104d26cda9d40b4fea61b6cb65adb8c8a5d901c27665f760100e650904af11ea8933e9b91028df04375867f15b2542e8f8f86c2069081f66ed000fb06ddfa8ce3370eb9b92e93c40f7a624c1bf3b190f26beba3a9a93107b9ad310e`; const armoredKeys = await CryptoWorker.getArmoredKeys({ binaryKeys: hexStringToArray(hexBinaryPublicKey), }); expect(armoredKeys).to.have.length(1); const key = await openpgp_readKey({ armoredKey: armoredKeys[0] }); expect(key.isPrivate()).to.be.false; }); it('getArmoredSignature - it returns a valid armored signature', async () => { const hexBinarySignature = `c2750401160a0006050262351cc9002109101a5092c9a2df33531621041f75b4729655e143dc146f941a5092c9a2df33532cba0100ab31401b4aca8b449dc490d16927e37c9510c076745795b3e73bba0209b826770100dc6a6fcc1aaa6fcbce51a5b20682ea201414ec923d387a4eb88932df87f6e60c`; const armoredSignature = await CryptoWorker.getArmoredSignature({ binarySignature: hexStringToArray(hexBinarySignature), }); const signatureInfo = await CryptoWorker.getSignatureInfo({ armoredSignature }); expect(signatureInfo.signingKeyIDs).to.not.be.empty; }); it('getArmoredMessage - it returns a valid armored message', async () => { const hexBinaryMessage = `c15e03f95c1ce325f4cb90120107409ebcb5a71c378c1f0936a5264aa69cd97d11abc03ff7e82077641e1e2000fd2e3000316865a8f7516e3048376a949ea31e84f1d5588fef7d485ece4e8a96358697c1c25a2019c8d6b527cea6c234265354d23c013b5dc3c8ab1a6bd1afda98ea4c5476dc93e4319c9f3734148ed7eec41adef1d80a86b02eb256e185bce5958f43dd0cbbf6eb654970d65234595e72`; const armoredMessage = await CryptoWorker.getArmoredMessage({ binaryMessage: hexStringToArray(hexBinaryMessage), }); const message = await openpgp_readMessage({ armoredMessage }); expect(message.getEncryptionKeyIDs()).to.not.be.empty; }); it('isExpiredKey/canKeyEncrypt - it can correctly detect an expired key', async () => { const now = new Date(); const future = new Date(+now + 1000); const past = new Date(+now - 1000); // key expires in one second const expiringKeyRef = await CryptoWorker.generateKey({ userIDs: [{ name: 'name', email: 'email@test.com' }], date: now, keyExpirationTime: 1, }); expect(await CryptoWorker.isExpiredKey({ key: expiringKeyRef, date: now })).to.be.false; expect(await CryptoWorker.isExpiredKey({ key: expiringKeyRef, date: future })).to.be.true; expect(await CryptoWorker.isExpiredKey({ key: expiringKeyRef, date: past })).to.be.true; // canKeyEncrypt should return false for expired keys expect(await CryptoWorker.canKeyEncrypt({ key: expiringKeyRef, date: now })).to.be.true; expect(await CryptoWorker.canKeyEncrypt({ key: expiringKeyRef, date: past })).to.be.false; const keyReference = await CryptoWorker.generateKey({ userIDs: [{ name: 'name', email: 'email@test.com' }], date: now, }); expect(await CryptoWorker.isExpiredKey({ key: keyReference })).to.be.false; expect(await CryptoWorker.isExpiredKey({ key: keyReference, date: past })).to.be.true; }); it('isRevokedKey/canKeyEncrypt - it can correctly detect a revoked key', async () => { const past = new Date(0); const now = new Date(); const { privateKey: key, revocationCertificate } = await generateKey({ userIDs: [{ name: 'name', email: 'email@test.com' }], date: past, format: 'object', }); const { publicKey: armoredRevokedKey } = await openpgp_revokeKey({ key, revocationCertificate, }); const keyRef = await CryptoWorker.importPublicKey({ armoredKey: key.armor() }); const revokedKeyRef = await CryptoWorker.importPublicKey({ armoredKey: armoredRevokedKey }); expect(await CryptoWorker.isRevokedKey({ key: revokedKeyRef, date: past })).to.be.true; expect(await CryptoWorker.isRevokedKey({ key: revokedKeyRef, date: now })).to.be.true; expect(await CryptoWorker.isRevokedKey({ key: keyRef, date: now })).to.be.false; // canKeyEncrypt should return false for revoked key expect(await CryptoWorker.canKeyEncrypt({ key: revokedKeyRef, date: now })).to.be.false; expect(await CryptoWorker.canKeyEncrypt({ key: keyRef, date: now })).to.be.true; }); it('getSHA256Fingerprints - it returns the expected fingerprints', async () => { const key = await openpgp_readKey({ armoredKey: ecc25519Key }); const keyReference = await CryptoWorker.importPublicKey({ armoredKey: ecc25519Key }); const sha256Fingerprings = await CryptoWorker.getSHA256Fingerprints({ key: keyReference }); expect(sha256Fingerprings).to.deep.equal(await getSHA256Fingerprints(key)); }); it('computeHash', async () => { const testHashMD5 = await CryptoWorker.computeHash({ algorithm: 'unsafeMD5', data: binaryStringToArray('The quick brown fox jumps over the lazy dog'), }).then(arrayToHexString); expect(testHashMD5).to.equal('9e107d9d372bb6826bd81d3542a419d6'); const testHashSHA1 = await CryptoWorker.computeHash({ algorithm: 'unsafeSHA1', data: new Uint8Array() }).then( arrayToHexString ); expect(testHashSHA1).to.equal('da39a3ee5e6b4b0d3255bfef95601890afd80709'); const testHashSHA256 = await CryptoWorker.computeHash({ algorithm: 'SHA256', data: new Uint8Array() }).then( arrayToHexString ); expect(testHashSHA256).to.equal('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'); const testHashSHA512 = await CryptoWorker.computeHash({ algorithm: 'SHA512', data: new Uint8Array() }).then( arrayToHexString ); expect(testHashSHA512).to.equal( 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' ); }); it('computeHashStream', async () => { const emptyDataStream = new ReadableStream<Uint8Array>({ start: (controller) => { for (let i = 0; i < 100; i++) { controller.enqueue(new Uint8Array()); } controller.close(); }, }); const testHashSHA1Empty = await CryptoWorker.computeHashStream({ algorithm: 'unsafeSHA1', dataStream: emptyDataStream, }).then(arrayToHexString); expect(testHashSHA1Empty).to.equal('da39a3ee5e6b4b0d3255bfef95601890afd80709'); // `data` and `dataStream` share the underlying buffer: this is to test that no byte transferring is taking place const data = new Uint8Array(100).fill(1); const dataStream = new ReadableStream<Uint8Array>({ pull: (controller) => { for (let i = 0; i < 10; i++) { controller.enqueue(data.subarray(i, i + 10)); } controller.close(); }, }); const testHashSHA1Streamed = await CryptoWorker.computeHashStream({ algorithm: 'unsafeSHA1', dataStream }).then( arrayToHexString ); const testHashSHA1 = await CryptoWorker.computeHash({ algorithm: 'unsafeSHA1', data }).then(arrayToHexString); expect(testHashSHA1).to.equal('3f3feea4f73d400fe98b7518a4b21ad4fc80476d'); expect(testHashSHA1Streamed).to.equal(testHashSHA1); }); it('replaceUserIDs - the target key user IDs match the source key ones', async () => { const sourceKey = await openpgp_readKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYkMx+RYJKwYBBAHaRw8BAQdA2wiwC/FbumCQYlJAEHeRCm2GZD0S1aPt BG6ZcpuehWUAAQDpWPNfvUtTnn6AiJ/xEQ09so7ZWF+2GHlaOglSQUADwQ5J zQ88Y0B3b3JrZXIudGVzdD7CiQQQFgoAGgUCYkMx+QQLCQcIAxUICgQWAAIB AhsDAh4BACEJECO0b8qLQMw0FiEEYiHKmAo/cFLglZrtI7RvyotAzDRu6QEA mbhLi00tsTr7hmJxIPw4JLHGw8UVvztUfeyFE6ZqAIsBAJtF8P9pcZxHKb58 nNamH0U5+cC+9hN9uw2pn51NIY8KzQ88YkB3b3JrZXIudGVzdD7CiQQQFgoA GgUCYkMx+QQLCQcIAxUICgQWAAIBAhsDAh4BACEJECO0b8qLQMw0FiEEYiHK mAo/cFLglZrtI7RvyotAzDSSNwD+JDTJNbf8/0u9QUS3liusBKk5qKUPXG+j ezH+Sgw1wagA/36wOxNMHxVUJXBjYiOIrZjcUKwXPR2pjke6zgntRuQOx10E YkMx+RIKKwYBBAGXVQEFAQEHQJDjVd81zZuOdxAkjMe6Y+8Bj8gF9PKBkMJ+ I8Yc2OQKAwEIBwAA/2Ikos/IDw3uCSa6DGRoMDzQzZSwyzIO0XhoP9cgKSb4 Dw/CeAQYFggACQUCYkMx+QIbDAAhCRAjtG/Ki0DMNBYhBGIhypgKP3BS4JWa 7SO0b8qLQMw02YoBAOwG3hB8S5NBjdam/kRWvRjS8LMZDsVICPpOrwhQXkRl AQDFe4bzH3MY16IqrIq70QSCxqLJ0Ao+NYb1whc/mXYOAA== =p5Q+ -----END PGP PRIVATE KEY BLOCK-----`, }); const targetKey = await openpgp_readPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYkMx+RYJKwYBBAHaRw8BAQdA2wiwC/FbumCQYlJAEHeRCm2GZD0S1aPt BG6ZcpuehWUAAQDpWPNfvUtTnn6AiJ/xEQ09so7ZWF+2GHlaOglSQUADwQ5J zQ88Y0B3b3JrZXIudGVzdD7CiQQQFgoAGgUCYkMx+QQLCQcIAxUICgQWAAIB AhsDAh4BACEJECO0b8qLQMw0FiEEYiHKmAo/cFLglZrtI7RvyotAzDRu6QEA mbhLi00tsTr7hmJxIPw4JLHGw8UVvztUfeyFE6ZqAIsBAJtF8P9pcZxHKb58 nNamH0U5+cC+9hN9uw2pn51NIY8Kx10EYkMx+RIKKwYBBAGXVQEFAQEHQJDj Vd81zZuOdxAkjMe6Y+8Bj8gF9PKBkMJ+I8Yc2OQKAwEIBwAA/2Ikos/IDw3u CSa6DGRoMDzQzZSwyzIO0XhoP9cgKSb4Dw/CeAQYFggACQUCYkMx+QIbDAAh CRAjtG/Ki0DMNBYhBGIhypgKP3BS4JWa7SO0b8qLQMw02YoBAOwG3hB8S5NB jdam/kRWvRjS8LMZDsVICPpOrwhQXkRlAQDFe4bzH3MY16IqrIq70QSCxqLJ 0Ao+NYb1whc/mXYOAA== =AjeC -----END PGP PRIVATE KEY BLOCK-----`, }); const sourceKeyRef = await CryptoWorker.importPublicKey({ armoredKey: sourceKey.armor() }); const targetKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: targetKey.armor(), passphrase: null, }); await CryptoWorker.replaceUserIDs({ sourceKey: sourceKeyRef, targetKey: targetKeyRef }); const exportedSourceKey = await openpgp_readKey({ armoredKey: await CryptoWorker.exportPublicKey({ key: sourceKeyRef }), }); const exportedTargetKey = await openpgp_readKey({ armoredKey: await CryptoWorker.exportPublicKey({ key: targetKeyRef }), }); // source key users should be unchanged expect(sourceKey.getUserIDs()).to.deep.equal(exportedSourceKey.getUserIDs()); expect((await sourceKey.getPrimaryUser()).user.userID).to.deep.equal( (await exportedSourceKey.getPrimaryUser()).user.userID ); // target key users should have changed expect(targetKey.getUserIDs()).to.not.deep.equal(exportedTargetKey.getUserIDs()); expect(sourceKey.getUserIDs()).to.deep.equal(exportedTargetKey.getUserIDs()); expect((await sourceKey.getPrimaryUser()).user.userID).to.deep.equal( (await exportedTargetKey.getPrimaryUser()).user.userID ); }); it('cloneKeyAndChangeUserIDs - the returned key user IDs are correct', async () => { const sourceKey = await openpgp_readKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYkMx+RYJKwYBBAHaRw8BAQdA2wiwC/FbumCQYlJAEHeRCm2GZD0S1aPt BG6ZcpuehWUAAQDpWPNfvUtTnn6AiJ/xEQ09so7ZWF+2GHlaOglSQUADwQ5J zQ88Y0B3b3JrZXIudGVzdD7CiQQQFgoAGgUCYkMx+QQLCQcIAxUICgQWAAIB AhsDAh4BACEJECO0b8qLQMw0FiEEYiHKmAo/cFLglZrtI7RvyotAzDRu6QEA mbhLi00tsTr7hmJxIPw4JLHGw8UVvztUfeyFE6ZqAIsBAJtF8P9pcZxHKb58 nNamH0U5+cC+9hN9uw2pn51NIY8KzQ88YkB3b3JrZXIudGVzdD7CiQQQFgoA GgUCYkMx+QQLCQcIAxUICgQWAAIBAhsDAh4BACEJECO0b8qLQMw0FiEEYiHK mAo/cFLglZrtI7RvyotAzDSSNwD+JDTJNbf8/0u9QUS3liusBKk5qKUPXG+j ezH+Sgw1wagA/36wOxNMHxVUJXBjYiOIrZjcUKwXPR2pjke6zgntRuQOx10E YkMx+RIKKwYBBAGXVQEFAQEHQJDjVd81zZuOdxAkjMe6Y+8Bj8gF9PKBkMJ+ I8Yc2OQKAwEIBwAA/2Ikos/IDw3uCSa6DGRoMDzQzZSwyzIO0XhoP9cgKSb4 Dw/CeAQYFggACQUCYkMx+QIbDAAhCRAjtG/Ki0DMNBYhBGIhypgKP3BS4JWa 7SO0b8qLQMw02YoBAOwG3hB8S5NBjdam/kRWvRjS8LMZDsVICPpOrwhQXkRl AQDFe4bzH3MY16IqrIq70QSCxqLJ0Ao+NYb1whc/mXYOAA== =p5Q+ -----END PGP PRIVATE KEY BLOCK-----`, }); const sourceKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: sourceKey.armor(), passphrase: null }); const updatedKeyRef = await CryptoWorker.cloneKeyAndChangeUserIDs({ privateKey: sourceKeyRef, userIDs: [{ email: 'new1@pm.me' }, { email: 'new2@pm.me' }], }); expect(updatedKeyRef.getUserIDs()).to.deep.equal(['<new1@pm.me>', '<new2@pm.me>']); expect(updatedKeyRef.getUserIDs()).to.not.deep.equal(sourceKeyRef.getUserIDs()); expect(updatedKeyRef.getFingerprint()).to.deep.equal(sourceKeyRef.getFingerprint()); const exportedSourceKey = await openpgp_readKey({ armoredKey: await CryptoWorker.exportPublicKey({ key: sourceKeyRef }), }); const exportedUpdatedKey = await openpgp_readKey({ armoredKey: await CryptoWorker.exportPublicKey({ key: updatedKeyRef }), }); // source key users should be unchanged const sourcePrimaryUser = await sourceKey.getPrimaryUser(); expect(sourceKey.getUserIDs()).to.deep.equal(exportedSourceKey.getUserIDs()); expect(sourcePrimaryUser.user.userID).to.deep.equal((await exportedSourceKey.getPrimaryUser()).user.userID); // target key users should have changed const updatedPrimaryUser = await exportedUpdatedKey.getPrimaryUser(); expect(exportedUpdatedKey.getUserIDs()).to.deep.equal(['<new1@pm.me>', '<new2@pm.me>']); expect(sourceKey.getUserIDs()).to.not.deep.equal(exportedUpdatedKey.getUserIDs()); expect(sourcePrimaryUser.user.userID).to.not.deep.equal(updatedPrimaryUser.user.userID); expect(updatedPrimaryUser.user.userID?.userID).to.equal('<new1@pm.me>'); }); it('cloneKeyAndChangeUserIDs - the returned key is equivalent to the original one', async () => { const originalKey = await openpgp_readPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYkMx+RYJKwYBBAHaRw8BAQdA2wiwC/FbumCQYlJAEHeRCm2GZD0S1aPt BG6ZcpuehWUAAQDpWPNfvUtTnn6AiJ/xEQ09so7ZWF+2GHlaOglSQUADwQ5J zQ88Y0B3b3JrZXIudGVzdD7CiQQQFgoAGgUCYkMx+QQLCQcIAxUICgQWAAIB AhsDAh4BACEJECO0b8qLQMw0FiEEYiHKmAo/cFLglZrtI7RvyotAzDRu6QEA mbhLi00tsTr7hmJxIPw4JLHGw8UVvztUfeyFE6ZqAIsBAJtF8P9pcZxHKb58 nNamH0U5+cC+9hN9uw2pn51NIY8KzQ88YkB3b3JrZXIudGVzdD7CiQQQFgoA GgUCYkMx+QQLCQcIAxUICgQWAAIBAhsDAh4BACEJECO0b8qLQMw0FiEEYiHK mAo/cFLglZrtI7RvyotAzDSSNwD+JDTJNbf8/0u9QUS3liusBKk5qKUPXG+j ezH+Sgw1wagA/36wOxNMHxVUJXBjYiOIrZjcUKwXPR2pjke6zgntRuQOx10E YkMx+RIKKwYBBAGXVQEFAQEHQJDjVd81zZuOdxAkjMe6Y+8Bj8gF9PKBkMJ+ I8Yc2OQKAwEIBwAA/2Ikos/IDw3uCSa6DGRoMDzQzZSwyzIO0XhoP9cgKSb4 Dw/CeAQYFggACQUCYkMx+QIbDAAhCRAjtG/Ki0DMNBYhBGIhypgKP3BS4JWa 7SO0b8qLQMw02YoBAOwG3hB8S5NBjdam/kRWvRjS8LMZDsVICPpOrwhQXkRl AQDFe4bzH3MY16IqrIq70QSCxqLJ0Ao+NYb1whc/mXYOAA== =p5Q+ -----END PGP PRIVATE KEY BLOCK-----`, }); const originalKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: originalKey.armor(), passphrase: null, }); const updatedKeyRef = await CryptoWorker.cloneKeyAndChangeUserIDs({ privateKey: originalKeyRef, userIDs: { email: 'updated@worker.com' }, }); const exportedOriginalKey = await openpgp_readKey({ armoredKey: await CryptoWorker.exportPrivateKey({ privateKey: originalKeyRef, passphrase: null }), }); const exportedUpdatedKey = await openpgp_readKey({ armoredKey: await CryptoWorker.exportPrivateKey({ privateKey: updatedKeyRef, passphrase: null }), }); // original key should be unchanged expect(originalKey.write()).to.deep.equal(exportedOriginalKey.write()); // keys should be identical when ignoring the users exportedOriginalKey.users = []; exportedUpdatedKey.users = []; expect(exportedUpdatedKey.write()).to.deep.equal(exportedOriginalKey.write()); }); it('cloneKeyAndChangeUserIDs - the returned key has a separate key reference', async () => { const passphrase = 'passphrase'; const originalKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xYYEYjh/NRYJKwYBBAHaRw8BAQdAAJW2i9biFMIXiH15J6vGU1GCAqcp5utw C+y+CeZ+h4L+CQMI/K3Ebi8BpsUAzexw43SwgpD0mDGd/d4ORX77AiUoq/rp DKjS+0lpIszAa6SVWcA6xQZsz1ztdNBktEg4t/gybivH88kGTIprO/HWetM+ j80RPHRlc3RAd29ya2VyLmNvbT7CjAQQFgoAHQUCYjh/NQQLCQcIAxUICgQW AAIBAhkBAhsDAh4BACEJEFx55sPEaXlKFiEE+PdMNIqw4jCyqqnuXHnmw8Rp eUoC8QD+NdQzOAWdIJEp1eMeEa3xx9rkCpD2TXUeV7goHtixyQIBANcgmRTg gN0O2hdiL9kjN4MPhbkz3dNTpkiO/K6O8UIDx4sEYjh/NRIKKwYBBAGXVQEF AQEHQF3XUaFXbb6O9Qcas72x5nhNupZ3iIrIx8wKeUdgdkBNAwEIB/4JAwjK CPlfkyHxBABYJC70HwO36TjRBxROY480CvL40r1bJ3NSLlV4aIZXLP2723PH tsnD3fhK5ZbGqC7FCmmDKEh1ibl3Lw6rEoE0Z6Fq72x6wngEGBYIAAkFAmI4 fzUCGwwAIQkQXHnmw8RpeUoWIQT490w0irDiMLKqqe5ceebDxGl5Sl9wAQC+ 9Jb0r5pG7sMbNclmp3s1OIfWG9tJ9RoXSHU/bCFHlgEA/ggjJKzRuja0MWZ6 8IDTErKCgaYSPES5+mwT27LYvw0= =D7EW -----END PGP PRIVATE KEY BLOCK-----`, passphrase, }); const updatedKeyRef = await CryptoWorker.cloneKeyAndChangeUserIDs({ privateKey: originalKeyRef, userIDs: { email: 'updated@worker.com' }, }); expect(updatedKeyRef.getUserIDs()).to.have.length(1); expect(updatedKeyRef.getUserIDs().includes('<updated@worker.com>')); expect(originalKeyRef.getUserIDs()).to.have.length(1); expect(originalKeyRef.getUserIDs()).includes('<test@worker.com>'); await CryptoWorker.clearKey({ key: originalKeyRef }); // this clears the private params as well const armoredKey = await CryptoWorker.exportPrivateKey({ privateKey: updatedKeyRef, passphrase }); const decryptedKeyFromArmored = await openpgp_decryptKey({ privateKey: await openpgp_readPrivateKey({ armoredKey }), passphrase, }); expect(decryptedKeyFromArmored.isDecrypted()).to.be.true; }); it('generateE2EEForwardingMaterial - the generated key is encrypted', async () => { const bobKey = await CryptoWorker.importPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYkMx+RYJKwYBBAHaRw8BAQdA2wiwC/FbumCQYlJAEHeRCm2GZD0S1aPt BG6ZcpuehWUAAQDpWPNfvUtTnn6AiJ/xEQ09so7ZWF+2GHlaOglSQUADwQ5J zQ88Y0B3b3JrZXIudGVzdD7CiQQQFgoAGgUCYkMx+QQLCQcIAxUICgQWAAIB AhsDAh4BACEJECO0b8qLQMw0FiEEYiHKmAo/cFLglZrtI7RvyotAzDRu6QEA mbhLi00tsTr7hmJxIPw4JLHGw8UVvztUfeyFE6ZqAIsBAJtF8P9pcZxHKb58 nNamH0U5+cC+9hN9uw2pn51NIY8KzQ88YkB3b3JrZXIudGVzdD7CiQQQFgoA GgUCYkMx+QQLCQcIAxUICgQWAAIBAhsDAh4BACEJECO0b8qLQMw0FiEEYiHK mAo/cFLglZrtI7RvyotAzDSSNwD+JDTJNbf8/0u9QUS3liusBKk5qKUPXG+j ezH+Sgw1wagA/36wOxNMHxVUJXBjYiOIrZjcUKwXPR2pjke6zgntRuQOx10E YkMx+RIKKwYBBAGXVQEFAQEHQJDjVd81zZuOdxAkjMe6Y+8Bj8gF9PKBkMJ+ I8Yc2OQKAwEIBwAA/2Ikos/IDw3uCSa6DGRoMDzQzZSwyzIO0XhoP9cgKSb4 Dw/CeAQYFggACQUCYkMx+QIbDAAhCRAjtG/Ki0DMNBYhBGIhypgKP3BS4JWa 7SO0b8qLQMw02YoBAOwG3hB8S5NBjdam/kRWvRjS8LMZDsVICPpOrwhQXkRl AQDFe4bzH3MY16IqrIq70QSCxqLJ0Ao+NYb1whc/mXYOAA== =p5Q+ -----END PGP PRIVATE KEY BLOCK-----`, passphrase: null, }); const { proxyInstances, forwardeeKey } = await CryptoWorker.generateE2EEForwardingMaterial({ forwarderKey: bobKey, userIDsForForwardeeKey: { email: 'bob@test.com', comment: 'Forwarding from Bob' }, passphrase: 'passphrase', }); expect(proxyInstances).to.have.length(1); expect(proxyInstances[0].proxyParameter).to.have.length(32); const charlieKey = await CryptoWorker.importPrivateKey({ armoredKey: forwardeeKey, passphrase: 'passphrase', }); expect(charlieKey.equals(bobKey)).to.be.false; // sanity check expect(charlieKey.subkeys.length).to.equal(1); expect(proxyInstances[0].keyVersion).to.equal(4); expect(arrayToHexString(proxyInstances[0].forwarderKeyFingerprint)).to.include(bobKey.subkeys[0].getKeyID()); expect(arrayToHexString(proxyInstances[0].forwardeeKeyFingerprint)).to.include( charlieKey.subkeys[0].getKeyID() ); }); it('generateE2EEForwardingMaterial - supports proxying multiple subkeys', async () => { // three subkeys, where the middle one cannot encrypt. the other 2 are compatible with forwarding. const bobKey = await CryptoWorker.importPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEZPCzChYJKwYBBAHaRw8BAQdAzNwn+VrPldqod1clidK65VV9A8Z7EP42 nBsRLC5VbkYAAQDk09zgBMPtfL+yECBFUfxbxFyTTljJWopHlJRw1mOYQxEr zRJCb2IgPGluZm9AYm9iLmNvbT7CjAQQFgoAPgWCZPCzCgQLCQcICZBGtkcg A5qpvQMVCAoEFgACAQIZAQKbAwIeARYhBFDSH63FH1VAxOw+4Ea2RyADmqm9 AABWGwD+IQWdHPmJaFf1Bez5Pw9nnHAB0GWcg4gY46I4lSIgTbQA/iYzMNjz 8hYexOvGf7hYuDBqlKxiVIuuzEEd8QEKORcAx10EZPCzChIKKwYBBAGXVQEF AQEHQBGVJmsdqGIHqvcg/yZGRVXargSkWcZudHxB2hYOwH9cAwEIBwAA/0qX 4OS46seRkKl1tRmOx8cLGCvrCCIOV4BRIFHEbyBAD9PCeAQYFggAKgWCZPCz CgmQRrZHIAOaqb0CmwwWIQRQ0h+txR9VQMTsPuBGtkcgA5qpvQAAmWcA/R/F dQiOqdLbQ7F46lgS6T18DJ8g64GaX0vQG283xunHAQDtImMFRT0j5MFVRBmf 0T/i5VJRlKxdMqV5+KIId1l2BsdYBGTwswoWCSsGAQQB2kcPAQEHQIV3i+Nv Q9kDBvM/4cuglV0EGYhLvsDT9VUOu1eV+WiZAAEAxrJslu4wUCzNf9I+c5P/ 73Q9Aoy9h8uThRLsBFvM5UgRIcLALwQYFgoAoQWCZPCzCgmQRrZHIAOaqb0C mwJ2oAQZFgoAJwWCZPCzCgmQxpgQadwIqbMWIQQQBJo9CdJeHhXAYrXGmBBp 3AipswAA+aQBAKvKgxuGRmQcwRcQ0BGYLWHHmCjXR37hboZtpVhcJ4q9AQDb OLXyzKjb5ZEIXcMD2IbyucPdijCC6pz6TM0XeD/7BRYhBFDSH63FH1VAxOw+ 4Ea2RyADmqm9AACmowD/QD6kaeQ8hBqI0133Q/xQ4onW1YnasvUSVgumILUP FY4BAOY4aXiZdCsiALm9FwzlQzAabwv6r5qzLnTYfo4yjnABx10EZPCzChIK KwYBBAGXVQEFAQEHQP1eT6H0RIV9HGF2QFnI86T733sHBeckitHkpF8WyIwJ AwEIBwAA/3uWKfmjoblcKAeEKmQ8dcssCOG6xiCRFNPZ2oJ1LVVYD/nCeAQY FggAKgWCZPCzCgmQRrZHIAOaqb0CmwwWIQRQ0h+txR9VQMTsPuBGtkcgA5qp vQAAn/gBAIgdrWJLDYdgCLQXXzfdpW7KUyq4YTXPa++wWlX9MAxsAP9iHWYQ RudYbmMe/pzU8NRMIy8Ldd06k4vd0sClRAeGDg== =XyY6 -----END PGP PRIVATE KEY BLOCK-----`, passphrase: null, }); const { proxyInstances, forwardeeKey } = await CryptoWorker.generateE2EEForwardingMaterial({ forwarderKey: bobKey, userIDsForForwardeeKey: { email: 'bob@test.com', comment: 'Forwarding from Bob' }, passphrase: 'passphrase', }); expect(proxyInstances).to.have.length(2); const bobForwardingSubkeys = [bobKey.subkeys[0], bobKey.subkeys[2]]; // second subkey is sign-only const charlieKey = await CryptoWorker.importPrivateKey({ armoredKey: forwardeeKey, passphrase: 'passphrase', }); expect(charlieKey.equals(bobKey)).to.be.false; // sanity check expect(charlieKey.subkeys.length).to.equal(2); proxyInstances.forEach((proxyInstance, i) => { expect(proxyInstance.proxyParameter).to.have.length(32); expect(proxyInstance.keyVersion).to.equal(4); expect(arrayToHexString(proxyInstance.forwarderKeyFingerprint)).to.include( bobForwardingSubkeys[i].getKeyID() ); expect(arrayToHexString(proxyInstance.forwardeeKeyFingerprint)).to.include( charlieKey.subkeys[i].getKeyID() ); }); }); it('generateE2EEForwardingMaterial - throws on unsuitable forwarder key (NIST P256)', async () => { const bobKey = await CryptoWorker.importPrivateKey({ armoredKey: keyWithP256AndCurve25519Subkeys, passphrase: null, }); await expect( CryptoWorker.generateE2EEForwardingMaterial({ forwarderKey: bobKey, userIDsForForwardeeKey: { email: 'bob@test.com', comment: 'Forwarding from Bob' }, passphrase: 'passphrase', }) ).to.be.rejectedWith(/unsuitable for forwarding/); }); it('doesKeySupportE2EEForwarding - returns true on newly generated key', async () => { // this test is a sanity check of our defaults const bobKey = await CryptoWorker.generateKey({ userIDs: { email: 'bob@test.com' } }); expect(await CryptoWorker.doesKeySupportE2EEForwarding({ forwarderKey: bobKey })).to.be.true; }); it('doesKeySupportE2EEForwarding - returns false for P256 key', async () => { const bobKey = await CryptoWorker.importPrivateKey({ armoredKey: keyWithP256AndCurve25519Subkeys, passphrase: null, }); expect(await CryptoWorker.doesKeySupportE2EEForwarding({ forwarderKey: bobKey })).to.be.false; }); it('isE2EEForwardingKey', async () => { const charlieKeyEncrypted = await CryptoWorker.importPublicKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xYYEZAdtGBYJKwYBBAHaRw8BAQdAcNgHyRGEaqGmzEqEwCobfUkyrJnY8faB vsf9R2c5Zzb+CQMI0YEeYODMnX7/8Bm7rq3beejbyFxINLDKMehud14ePBBw 0t2bzVTtdpNDh1ck070XBO5oRF8zRzFw2ziyShz5KyA0MwQxu+B0q9rbJ2pl C80bY2hhcmxlcyA8Y2hhcmxlc0Bwcm90b24ubWU+wooEExYIADwFAmQHbRgJ kBVybZgcw4XXFiEEZdoDX5cqZdV40VFfFXJtmBzDhdcCGwMCHgECGQECCwcC FQgCFgACIgEAACSmAP9qmNejs8qOXqzc+dVKylYhKFmxKcGN12hVTc68Pm+e GAEAu815H/7eYnspb45gPyyc9/6oB+RsWUWAB/TU6VsltQLHnwRkB20YEgor BgEEAZdVAQUBAQdAsZsaCbB01k4hE1oeO6LfpMDQNYQpNSGAfVsCsZF2MiAX /woJil81dTgz/5kZrYgWFVflUJOTBRD+CQMIjcTRUSYiwLP/ectAkFq9iyz9 qXjJe4T8RAwMG7UDIhE89gwTwfbSBOxKWpg5v3H/Yk4Fi7LKrg5K3pdVxvrL sAAEJmKlJMGXnZ4HOB75NsJ4BBgWCAAqBQJkB20YCZAVcm2YHMOF1xYhBGXa A1+XKmXVeNFRXxVybZgcw4XXAhtQAACX/wD+IhLNYGwFHxWeg5V8QT5VCTpg CLtxNKNwyN+wh7P0Vi0BAMqj1oIyFSiFjKq+FmOfxpLe32Yhk9z7NEm0IfNB iaEO =Szic -----END PGP PRIVATE KEY BLOCK-----`, }); const charlieKey = await CryptoWorker.importPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEZAdtGBYJKwYBBAHaRw8BAQdAcNgHyRGEaqGmzEqEwCobfUkyrJnY8faBvsf9 R2c5ZzYAAP9bFL4nPBdo04ei0C2IAh5RXOpmuejGC3GAIn/UmL5cYQ+XzRtjaGFy bGVzIDxjaGFybGVzQHByb3Rvbi5tZT7CigQTFggAPAUCZAdtGAmQFXJtmBzDhdcW IQRl2gNflypl1XjRUV8Vcm2YHMOF1wIbAwIeAQIZAQILBwIVCAIWAAIiAQAAJKYA /2qY16Ozyo5erNz51UrKViEoWbEpwY3XaFVNzrw+b54YAQC7zXkf/t5ieylvjmA/ LJz3/qgH5GxZRYAH9NTpWyW1AsdxBGQHbRgSCisGAQQBl1UBBQEBB0CxmxoJsHTW TiETWh47ot+kwNA1hCk1IYB9WwKxkXYyIBf/CgmKXzV1ODP/mRmtiBYVV+VQk5MF EAAA/1NW8D8nMc2ky140sPhQrwkeR7rVLKP2fe5n4BEtAnVQEB3CeAQYFggAKgUC ZAdtGAmQFXJtmBzDhdcWIQRl2gNflypl1XjRUV8Vcm2YHMOF1wIbUAAAl/8A/iIS zWBsBR8VnoOVfEE+VQk6YAi7cTSjcMjfsIez9FYtAQDKo9aCMhUohYyqvhZjn8aS 3t9mIZPc+zRJtCHzQYmhDg== =lESj -----END PGP PRIVATE KEY BLOCK-----`, passphrase: null, }); const bobKey = await CryptoWorker.importPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEZAdtGBYJKwYBBAHaRw8BAQdAGzrOpvCFCxQ6hmpP52fBtbYmqkPM+TF9oBei x9QWcnEAAQDa54PERHLvDqIMo0f03+mJXMTR3Dwq+qi5LTaflQFDGxEdzRNib2Ig PGJvYkBwcm90b24ubWU+wooEExYIADwFAmQHbRgJkCLL+xMJ+Hy4FiEEm77zV6Zb syLVIzOyIsv7Ewn4fLgCGwMCHgECGQECCwcCFQgCFgACIgEAAAnFAPwPoXgScgPr KQFzu1ltPuHodEaDTtb+/wRQ1oAbuSdDgQD7B82NJgyEZInC/4Bwuc+ysFgaxW2W gtypuW5vZm44FAzHXQRkB20YEgorBgEEAZdVAQUBAQdAeUTOhlO2RBUGH6B7127u a82Mmjv62/GKZMpbNFJgqAcDAQoJAAD/Sd14Xkjfy1l8r0vQ5Rm+jBG4EXh2G8XC PZgMz5RLa6gQ4MJ4BBgWCAAqBQJkB20YCZAiy/sTCfh8uBYhBJu+81emW7Mi1SMz siLL+xMJ+Hy4AhsMAAAKagEA4Knj6S6nG24nuXfqkkytPlFTHwzurjv3+qqXwWL6 3RgA/Rvy/NcpCizSOL3tLLznwSag7/m6JVy9g6unU2mZ5QoI =un5O -----END PGP PRIVATE KEY BLOCK-----`, passphrase: null, }); await expect(CryptoWorker.isE2EEForwardingKey({ key: charlieKeyEncrypted })).to.eventually.be.true; await expect(CryptoWorker.isE2EEForwardingKey({ key: charlieKey })).to.eventually.be.true; await expect(CryptoWorker.isE2EEForwardingKey({ key: bobKey })).to.eventually.be.false; }); describe('Key management API', () => { it('can export a generated key', async () => { const privateKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, }); const passphrase = 'passphrase'; const armoredKey = await CryptoWorker.exportPrivateKey({ privateKey: privateKeyRef, passphrase }); const binaryKey = await CryptoWorker.exportPrivateKey({ privateKey: privateKeyRef, passphrase, format: 'binary', }); const decryptedKeyFromArmored = await openpgp_decryptKey({ privateKey: await openpgp_readPrivateKey({ armoredKey }), passphrase, }); expect(decryptedKeyFromArmored.isDecrypted()).to.be.true; // @ts-ignore missing `s2k` field definition expect(decryptedKeyFromArmored.keyPacket.s2k.c).to.equal(96); // setting should be lowered since passphrases are already salted const decryptedKeyFromBinary = await openpgp_decryptKey({ privateKey: await openpgp_readPrivateKey({ binaryKey }), passphrase, }); expect(decryptedKeyFromBinary.isDecrypted()).to.be.true; }); it('can export an imported key', async () => { const passphrase = 'passphrase'; const { privateKey: keyToImport } = await generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, format: 'object', passphrase, }); const importedKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: keyToImport.armor(), passphrase }); expect(importedKeyRef.getCreationTime()).to.deep.equal(keyToImport.getCreationTime()); expect(importedKeyRef.subkeys.map((subkey) => subkey.getAlgorithmInfo())).to.deep.equal( keyToImport.subkeys.map((subkey) => subkey.getAlgorithmInfo()) ); expect(importedKeyRef.getUserIDs()).to.deep.equal(['name <email@test.com>']); const armoredPublicKey = await CryptoWorker.exportPublicKey({ key: importedKeyRef }); const exportedPublicKey = await openpgp_readKey({ armoredKey: armoredPublicKey }); expect(exportedPublicKey.isPrivate()).to.be.false; expect(exportedPublicKey.getKeyID().toHex()).equals(importedKeyRef.getKeyID()); expect(exportedPublicKey.getKeyID().equals(keyToImport.getKeyID())); const exportPassphrase = 'another passphrase'; const armoredPrivateKey = await CryptoWorker.exportPrivateKey({ privateKey: importedKeyRef, passphrase: exportPassphrase, }); const exportedPrivateKey = await openpgp_readPrivateKey({ armoredKey: armoredPrivateKey }); expect(exportedPrivateKey.getKeyID().equals(keyToImport.getKeyID())); // make sure the exported key is encrypted with the new passphrase const decryptedExportedKey = await openpgp_decryptKey({ privateKey: exportedPrivateKey, passphrase: exportPassphrase, }); expect(decryptedExportedKey.isDecrypted()).to.be.true; }); it('exports an unencrypted key only when given a null passphrase', async () => { const keyReference = await CryptoWorker.generateKey({ userIDs: { name: 'name', email: 'email@test.com' } }); // empty passphrase not allowed await expect( CryptoWorker.exportPrivateKey({ privateKey: keyReference, passphrase: '' }) ).to.be.rejectedWith(/passphrase is required for key encryption/); const armoredEncryptedKey = await CryptoWorker.exportPrivateKey({ privateKey: keyReference, passphrase: 'passphrase', }); const encryptedKey = await openpgp_readPrivateKey({ armoredKey: armoredEncryptedKey }); expect(encryptedKey.isDecrypted()).to.be.false; const armoredUnencryptedKey = await CryptoWorker.exportPrivateKey({ privateKey: keyReference, passphrase: null, }); const unencryptedKey = await openpgp_readPrivateKey({ armoredKey: armoredUnencryptedKey }); expect(unencryptedKey.isDecrypted()).to.be.true; }); it('cannot import or export a public key as a private key', async () => { const passphrase = 'passphrase'; const { publicKey: publicKeyToImport } = await generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, format: 'object', passphrase, }); // this give no typescript error since serialised keys are indistinguishable for TS await expect( CryptoWorker.importPrivateKey({ armoredKey: publicKeyToImport.armor(), passphrase }) ).to.be.rejectedWith(/not of type private key/); const importedKeyRef = await CryptoWorker.importPublicKey({ armoredKey: publicKeyToImport.armor() }); expect(importedKeyRef.isPrivate()).to.be.false; expect(importedKeyRef.getCreationTime()).to.deep.equal(publicKeyToImport.getCreationTime()); // @ts-expect-error for non-private key reference await expect(CryptoWorker.exportPrivateKey({ privateKey: importedKeyRef })).to.be.rejectedWith( /Private key expected/ ); const armoredPublicKey = await CryptoWorker.exportPublicKey({ key: importedKeyRef }); const exportedPublicKey = await openpgp_readKey({ armoredKey: armoredPublicKey }); expect(exportedPublicKey.isPrivate()).to.be.false; expect(exportedPublicKey.getKeyID().equals(publicKeyToImport.getKeyID())); }); it('rejects importing a private key encrypted using argon2', async () => { const passphrase = 'passphrase'; const argon2Key = `-----BEGIN PGP PRIVATE KEY BLOCK----- xY8EZBsDeBYJKwYBBAHaRw8BAQdA+q1zyp3azB9V6zZSf+GejE5fiY4TUXKB 3ZhHyIfGRpj+CQSSisPQuR0D6KLh+VMUC3ajAwQQJiOXsJlZd5bzJyAckMnm EcP1IJ9cbqfUiVVyftKU5XaSs75Z4VEUMg0lkufCqvhEXq6qX+K+uENG6IIc t9ziGOMPCIEQgM0YbmFtZSA8ZW1haWxAYXJnb24yLnRlc3Q+wowEEBYKAD4F gmQbA3gECwkHCAmQqdOOOdbaF0kDFQgKBBYAAgECGQECmwMCHgEWIQTJB5NG /MI1Uadr6pWp04451toXSQAAzp0BALdGS+QDK75+4nVmsfbO49XlGm8BTcoj ul76mQ0eBXwvAPwIVBkUpVZ4mZQdigm4pUubIsw745TjlvrWQCEYFElNCceU BGQbA3gSCisGAQQBl1UBBQEBB0Dc0WBjkzK/rnUPIJuFpXLfV6Tn9D3L8tHc nwx9SURjLQMBCAf+CQRMjXT++0oAAQI7CEdQ18zOAwQQWxKyMceDiPXcySM6 TR6BoEVjr5mAoy2t4cEw1WqT/mhvwx0UET7q0bJJyOpAxwTPWSSotbEoYbzT kB98NBNP3D+QNiNCtsJ4BBgWCAAqBYJkGwN4CZCp04451toXSQKbDBYhBMkH k0b8wjVRp2vqlanTjjnW2hdJAAAEcQD7B5iqgIxMvSaT5NWQJvydNABhm2rl pD1DtUiJfTUyCKgA/jQvs7QVxXk4ixfK1f3EvD02I1whktPixZy1B0iGmrAG =jg+l -----END PGP PRIVATE KEY BLOCK-----`; await expect(CryptoWorker.importPrivateKey({ armoredKey: argon2Key, passphrase })).to.be.rejectedWith( /Keys encrypted using Argon2 are not supported yet/ ); }); it('allows importing a private key as long as it can be decrypted', async () => { const passphrase = 'passphrase'; const { privateKey } = await generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, passphrase, format: 'object', }); const importedKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: privateKey.armor(), passphrase }); expect(importedKeyRef.isPrivate()).to.be.true; await expect( CryptoWorker.importPrivateKey({ armoredKey: privateKey.armor(), passphrase: 'wrong passphrase' }) ).to.be.rejectedWith(/Error decrypting private key: Incorrect key passphrase/); }); it('allows importing a decrypted key only when given a null passphrase', async () => { const decryptedArmoredKey = `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYgQEWRYJKwYBBAHaRw8BAQdAhR6qir63dgL1bSt19bLFQfCIhvYnrk6f OmvFwcYNf4wAAQCV4uj6Pg+08r+ztuloyzTDAV7eC/jenjm7AdYikQ0MZxFC zQDCjAQQFgoAHQUCYgQEWQQLCQcIAxUICgQWAAIBAhkBAhsDAh4BACEJENDb nirC49EHFiEEDgVXCWrFg3oEwWgN0NueKsLj0QdayAD+O1Qq4UrAn1Tz67d7 O3uWdpRWmbgfUr7XygeyWr57crYA/0/37SvtPoI6MHyrVYijXspJlVo0ZABb dueO4TQCpPkAx10EYgQEWRIKKwYBBAGXVQEFAQEHQCVlPjHtTH0KaiZmgAeQ f1tglgIeoZuT1fYWQMR5s0QkAwEIBwAA/1T9jghk9P2FAzix+Fst0go8OQ6l clnLKMx9jFlqLmqAD57CeAQYFggACQUCYgQEWQIbDAAhCRDQ254qwuPRBxYh BA4FVwlqxYN6BMFoDdDbnirC49EHobgA/R/1yGmo8/xrdipXIWTbL38sApGf XU0oD7GPQhGsaxZjAQCmjVBDdt+CgmU9NFYwtTIWNHxxJtyf7TX7DY9RH1t2 DQ== =2Lb6 -----END PGP PRIVATE KEY BLOCK-----`; const importedKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: decryptedArmoredKey, passphrase: null, }); expect(importedKeyRef.isPrivate()).to.be.true; await expect( CryptoWorker.importPrivateKey({ armoredKey: decryptedArmoredKey, passphrase: 'passphrase' }) ).to.be.rejectedWith(/Key packet is already decrypted/); }); it('reformatted key has a separate key reference', async () => { const passphrase = 'passphrase'; const originalKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xYYEYjh/NRYJKwYBBAHaRw8BAQdAAJW2i9biFMIXiH15J6vGU1GCAqcp5utw C+y+CeZ+h4L+CQMI/K3Ebi8BpsUAzexw43SwgpD0mDGd/d4ORX77AiUoq/rp DKjS+0lpIszAa6SVWcA6xQZsz1ztdNBktEg4t/gybivH88kGTIprO/HWetM+ j80RPHRlc3RAd29ya2VyLmNvbT7CjAQQFgoAHQUCYjh/NQQLCQcIAxUICgQW AAIBAhkBAhsDAh4BACEJEFx55sPEaXlKFiEE+PdMNIqw4jCyqqnuXHnmw8Rp eUoC8QD+NdQzOAWdIJEp1eMeEa3xx9rkCpD2TXUeV7goHtixyQIBANcgmRTg gN0O2hdiL9kjN4MPhbkz3dNTpkiO/K6O8UIDx4sEYjh/NRIKKwYBBAGXVQEF AQEHQF3XUaFXbb6O9Qcas72x5nhNupZ3iIrIx8wKeUdgdkBNAwEIB/4JAwjK CPlfkyHxBABYJC70HwO36TjRBxROY480CvL40r1bJ3NSLlV4aIZXLP2723PH tsnD3fhK5ZbGqC7FCmmDKEh1ibl3Lw6rEoE0Z6Fq72x6wngEGBYIAAkFAmI4 fzUCGwwAIQkQXHnmw8RpeUoWIQT490w0irDiMLKqqe5ceebDxGl5Sl9wAQC+ 9Jb0r5pG7sMbNclmp3s1OIfWG9tJ9RoXSHU/bCFHlgEA/ggjJKzRuja0MWZ6 8IDTErKCgaYSPES5+mwT27LYvw0= =D7EW -----END PGP PRIVATE KEY BLOCK-----`, passphrase, }); const reformattedKeyRef = await CryptoWorker.reformatKey({ privateKey: originalKeyRef, userIDs: { email: 'reformatted@worker.com' }, }); expect(reformattedKeyRef.getUserIDs()).to.have.length(1); expect(reformattedKeyRef.getUserIDs().includes('<reformatted@worker.com>')); expect(originalKeyRef.getUserIDs()).to.have.length(1); expect(originalKeyRef.getUserIDs()).includes('<test@worker.com>'); await CryptoWorker.clearKey({ key: originalKeyRef }); // this clears the private params as well const armoredKey = await CryptoWorker.exportPrivateKey({ privateKey: reformattedKeyRef, passphrase }); const decryptedKeyFromArmored = await openpgp_decryptKey({ privateKey: await openpgp_readPrivateKey({ armoredKey }), passphrase, }); expect(decryptedKeyFromArmored.isDecrypted()).to.be.true; }); it('isWeak() - it correctly marks a weak key', async () => { const weakKeyReference = await CryptoWorker.importPublicKey({ armoredKey: rsa512BitsKey }); expect(weakKeyReference.isWeak()).to.be.true; const keyReference = await CryptoWorker.importPublicKey({ armoredKey: ecc25519Key }); expect(keyReference.isWeak()).to.be.false; }); it('equals - returns true for equal public keys', async () => { const userIDs = { name: 'name', email: 'email@test.com' }; const { privateKey, publicKey } = await generateKey({ userIDs, format: 'object' }); const privateKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: privateKey.armor(), passphrase: null, }); const publicKeyRef = await CryptoWorker.importPublicKey({ armoredKey: publicKey.armor() }); expect(privateKeyRef.equals(publicKeyRef)).to.be.true; // change expiration time const { privateKey: armoredReformattedKey } = await reformatKey({ privateKey, userIDs, keyExpirationTime: 3600, }); const reformattedKeyRef = await CryptoWorker.importPrivateKey({ armoredKey: armoredReformattedKey, passphrase: null, }); expect(privateKeyRef.equals(reformattedKeyRef)).to.be.false; }); it('equals - can ignore third-party certifications', async () => { const publicKey = await openpgp_readKey({ armoredKey: keyWithThirdPartyCertifications }); expect(publicKey.users[0].otherCertifications).to.have.length(1); publicKey.users[0].otherCertifications = []; const publicKeyRef = await CryptoWorker.importPublicKey({ armoredKey: publicKey.armor(), }); const certifiedPublicKeyRef = await CryptoWorker.importPublicKey({ armoredKey: keyWithThirdPartyCertifications, }); expect(certifiedPublicKeyRef.equals(publicKeyRef)).to.be.false; expect(certifiedPublicKeyRef.equals(publicKeyRef, true)).to.be.true; }); it('clearKey - cannot reference a cleared key', async () => { const privateKeyRef = await CryptoWorker.generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, }); // confirm key is in the store expect(await CryptoWorker.exportPublicKey({ key: privateKeyRef })).length.above(0); await CryptoWorker.clearKey({ key: privateKeyRef }); await expect(CryptoWorker.exportPublicKey({ key: privateKeyRef })).to.be.rejectedWith(/Key not found/); }); it('clearKeyStore - cannot reference any key after clearing the store', async () => { const privateKeyRef1 = await CryptoWorker.generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, }); const privateKeyRef2 = await CryptoWorker.generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, }); // (lazily) confirm that keys are in the store expect(await CryptoWorker.exportPublicKey({ key: privateKeyRef1 })).length.above(0); expect(await CryptoWorker.exportPublicKey({ key: privateKeyRef2 })).length.above(0); await CryptoWorker.clearKeyStore(); await expect(CryptoWorker.exportPublicKey({ key: privateKeyRef1 })).to.be.rejectedWith(/Key not found/); await expect(CryptoWorker.exportPublicKey({ key: privateKeyRef2 })).to.be.rejectedWith(/Key not found/); }); }); });
7,259
0
petrpan-code/ProtonMail/WebClients/packages/crypto/test
petrpan-code/ProtonMail/WebClients/packages/crypto/test/worker/workerPool.spec.ts
import { use as chaiUse, expect } from 'chai'; import chaiAsPromised from 'chai-as-promised'; import { generateKey } from 'pmcrypto'; import { decryptKey as openpgp_decryptKey, readKey as openpgp_readKey, readPrivateKey as openpgp_readPrivateKey, } from 'pmcrypto/lib/openpgp'; import { VERIFICATION_STATUS } from '../../lib'; import { arrayToHexString } from '../../lib/utils'; import { CryptoWorkerPool } from '../../lib/worker/workerPool'; chaiUse(chaiAsPromised); describe('Worker Pool', () => { const poolSize = 2; before(async () => { await CryptoWorkerPool.init({ poolSize }); }); afterEach(async () => { await CryptoWorkerPool.clearKeyStore(); }); after(async () => { await CryptoWorkerPool.destroy(); }); it('should encrypt/sign and decrypt/verify text and binary data', async () => { const aliceKeyRef = await CryptoWorkerPool.generateKey({ userIDs: { name: 'alice', email: 'alice@test.com' } }); const bobKeyRef = await CryptoWorkerPool.generateKey({ userIDs: { name: 'bob', email: 'bob@test.com' } }); const { message: encryptedArmoredMessage } = await CryptoWorkerPool.encryptMessage({ textData: 'hello world', encryptionKeys: bobKeyRef, signingKeys: aliceKeyRef, }); const textDecryptionResult = await CryptoWorkerPool.decryptMessage({ armoredMessage: encryptedArmoredMessage, decryptionKeys: bobKeyRef, verificationKeys: aliceKeyRef, }); expect(textDecryptionResult.data).to.equal('hello world'); expect(textDecryptionResult.signatures).to.have.length(1); expect(textDecryptionResult.verificationErrors).to.not.exist; expect(textDecryptionResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); const { message: encryptedBinaryMessage } = await CryptoWorkerPool.encryptMessage({ binaryData: new Uint8Array([1, 2, 3]), encryptionKeys: bobKeyRef, signingKeys: aliceKeyRef, format: 'binary', }); const binaryDecryptionResult = await CryptoWorkerPool.decryptMessage({ binaryMessage: encryptedBinaryMessage, decryptionKeys: bobKeyRef, verificationKeys: aliceKeyRef, format: 'binary', }); expect(binaryDecryptionResult.data).to.deep.equal(new Uint8Array([1, 2, 3])); expect(binaryDecryptionResult.signatures).to.have.length(1); expect(binaryDecryptionResult.verificationErrors).to.not.exist; expect(binaryDecryptionResult.verified).to.equal(VERIFICATION_STATUS.SIGNED_AND_VALID); }); it('computeHashStream - the hash instance should not be disrupted with multiple workers', async () => { const data = new Uint8Array(100).fill(1); const dataStream = new ReadableStream<Uint8Array>({ pull: (controller) => { for (let i = 0; i < 10; i++) { controller.enqueue(data.subarray(i, i + 10)); } controller.close(); }, }); const testHashSHA1Streamed = await CryptoWorkerPool.computeHashStream({ algorithm: 'unsafeSHA1', dataStream, }).then(arrayToHexString); const testHashSHA1 = await CryptoWorkerPool.computeHash({ algorithm: 'unsafeSHA1', data }).then( arrayToHexString ); expect(testHashSHA1Streamed).to.equal(testHashSHA1); }); it('replaceUserIDs - the target key should be updated in all workers', async () => { const sourceKey = await openpgp_readKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYkMx+RYJKwYBBAHaRw8BAQdA2wiwC/FbumCQYlJAEHeRCm2GZD0S1aPt BG6ZcpuehWUAAQDpWPNfvUtTnn6AiJ/xEQ09so7ZWF+2GHlaOglSQUADwQ5J zQ88Y0B3b3JrZXIudGVzdD7CiQQQFgoAGgUCYkMx+QQLCQcIAxUICgQWAAIB AhsDAh4BACEJECO0b8qLQMw0FiEEYiHKmAo/cFLglZrtI7RvyotAzDRu6QEA mbhLi00tsTr7hmJxIPw4JLHGw8UVvztUfeyFE6ZqAIsBAJtF8P9pcZxHKb58 nNamH0U5+cC+9hN9uw2pn51NIY8KzQ88YkB3b3JrZXIudGVzdD7CiQQQFgoA GgUCYkMx+QQLCQcIAxUICgQWAAIBAhsDAh4BACEJECO0b8qLQMw0FiEEYiHK mAo/cFLglZrtI7RvyotAzDSSNwD+JDTJNbf8/0u9QUS3liusBKk5qKUPXG+j ezH+Sgw1wagA/36wOxNMHxVUJXBjYiOIrZjcUKwXPR2pjke6zgntRuQOx10E YkMx+RIKKwYBBAGXVQEFAQEHQJDjVd81zZuOdxAkjMe6Y+8Bj8gF9PKBkMJ+ I8Yc2OQKAwEIBwAA/2Ikos/IDw3uCSa6DGRoMDzQzZSwyzIO0XhoP9cgKSb4 Dw/CeAQYFggACQUCYkMx+QIbDAAhCRAjtG/Ki0DMNBYhBGIhypgKP3BS4JWa 7SO0b8qLQMw02YoBAOwG3hB8S5NBjdam/kRWvRjS8LMZDsVICPpOrwhQXkRl AQDFe4bzH3MY16IqrIq70QSCxqLJ0Ao+NYb1whc/mXYOAA== =p5Q+ -----END PGP PRIVATE KEY BLOCK-----`, }); const targetKey = await openpgp_readPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYkMx+RYJKwYBBAHaRw8BAQdA2wiwC/FbumCQYlJAEHeRCm2GZD0S1aPt BG6ZcpuehWUAAQDpWPNfvUtTnn6AiJ/xEQ09so7ZWF+2GHlaOglSQUADwQ5J zQ88Y0B3b3JrZXIudGVzdD7CiQQQFgoAGgUCYkMx+QQLCQcIAxUICgQWAAIB AhsDAh4BACEJECO0b8qLQMw0FiEEYiHKmAo/cFLglZrtI7RvyotAzDRu6QEA mbhLi00tsTr7hmJxIPw4JLHGw8UVvztUfeyFE6ZqAIsBAJtF8P9pcZxHKb58 nNamH0U5+cC+9hN9uw2pn51NIY8Kx10EYkMx+RIKKwYBBAGXVQEFAQEHQJDj Vd81zZuOdxAkjMe6Y+8Bj8gF9PKBkMJ+I8Yc2OQKAwEIBwAA/2Ikos/IDw3u CSa6DGRoMDzQzZSwyzIO0XhoP9cgKSb4Dw/CeAQYFggACQUCYkMx+QIbDAAh CRAjtG/Ki0DMNBYhBGIhypgKP3BS4JWa7SO0b8qLQMw02YoBAOwG3hB8S5NB jdam/kRWvRjS8LMZDsVICPpOrwhQXkRlAQDFe4bzH3MY16IqrIq70QSCxqLJ 0Ao+NYb1whc/mXYOAA== =AjeC -----END PGP PRIVATE KEY BLOCK-----`, }); const sourceKeyRef = await CryptoWorkerPool.importPublicKey({ armoredKey: sourceKey.armor() }); const targetKeyRef = await CryptoWorkerPool.importPrivateKey({ armoredKey: targetKey.armor(), passphrase: null, }); await CryptoWorkerPool.replaceUserIDs({ sourceKey: sourceKeyRef, targetKey: targetKeyRef }); const exportedTargetKeys = await Promise.all( new Array(poolSize).fill(null).map(async () => openpgp_readKey({ armoredKey: await CryptoWorkerPool.exportPublicKey({ key: targetKeyRef }), }) ) ); exportedTargetKeys.forEach((exportedTargetKey) => { expect(sourceKey.getUserIDs()).to.deep.equal(exportedTargetKey.getUserIDs()); }); }); it('cloneKeyAndChangeUserIDs - the target key should be updated in all workers', async () => { const sourceKey = await openpgp_readKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xVgEYkMx+RYJKwYBBAHaRw8BAQdA2wiwC/FbumCQYlJAEHeRCm2GZD0S1aPt BG6ZcpuehWUAAQDpWPNfvUtTnn6AiJ/xEQ09so7ZWF+2GHlaOglSQUADwQ5J zQ88Y0B3b3JrZXIudGVzdD7CiQQQFgoAGgUCYkMx+QQLCQcIAxUICgQWAAIB AhsDAh4BACEJECO0b8qLQMw0FiEEYiHKmAo/cFLglZrtI7RvyotAzDRu6QEA mbhLi00tsTr7hmJxIPw4JLHGw8UVvztUfeyFE6ZqAIsBAJtF8P9pcZxHKb58 nNamH0U5+cC+9hN9uw2pn51NIY8KzQ88YkB3b3JrZXIudGVzdD7CiQQQFgoA GgUCYkMx+QQLCQcIAxUICgQWAAIBAhsDAh4BACEJECO0b8qLQMw0FiEEYiHK mAo/cFLglZrtI7RvyotAzDSSNwD+JDTJNbf8/0u9QUS3liusBKk5qKUPXG+j ezH+Sgw1wagA/36wOxNMHxVUJXBjYiOIrZjcUKwXPR2pjke6zgntRuQOx10E YkMx+RIKKwYBBAGXVQEFAQEHQJDjVd81zZuOdxAkjMe6Y+8Bj8gF9PKBkMJ+ I8Yc2OQKAwEIBwAA/2Ikos/IDw3uCSa6DGRoMDzQzZSwyzIO0XhoP9cgKSb4 Dw/CeAQYFggACQUCYkMx+QIbDAAhCRAjtG/Ki0DMNBYhBGIhypgKP3BS4JWa 7SO0b8qLQMw02YoBAOwG3hB8S5NBjdam/kRWvRjS8LMZDsVICPpOrwhQXkRl AQDFe4bzH3MY16IqrIq70QSCxqLJ0Ao+NYb1whc/mXYOAA== =p5Q+ -----END PGP PRIVATE KEY BLOCK-----`, }); const sourceKeyRef = await CryptoWorkerPool.importPrivateKey({ armoredKey: sourceKey.armor(), passphrase: null, }); const updateKeyRef = await CryptoWorkerPool.cloneKeyAndChangeUserIDs({ privateKey: sourceKeyRef, userIDs: { email: 'updated@worker.com' }, }); const exportedTargetKeys = await Promise.all( new Array(poolSize).fill(null).map(async () => openpgp_readKey({ armoredKey: await CryptoWorkerPool.exportPublicKey({ key: updateKeyRef }), }) ) ); exportedTargetKeys.forEach((exportedTargetKey) => { expect(exportedTargetKey.getUserIDs()).to.deep.equal(['<updated@worker.com>']); }); }); describe('Key management API', () => { it('can export a generated key', async () => { const privateKeyRef = await CryptoWorkerPool.generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, }); const passphrase = 'passphrase'; const armoredKey = await CryptoWorkerPool.exportPrivateKey({ privateKey: privateKeyRef, passphrase }); const binaryKey = await CryptoWorkerPool.exportPrivateKey({ privateKey: privateKeyRef, passphrase, format: 'binary', }); const decryptedKeyFromArmored = await openpgp_decryptKey({ privateKey: await openpgp_readPrivateKey({ armoredKey }), passphrase, }); expect(decryptedKeyFromArmored.isDecrypted()).to.be.true; const decryptedKeyFromBinary = await openpgp_decryptKey({ privateKey: await openpgp_readPrivateKey({ binaryKey }), passphrase, }); expect(decryptedKeyFromBinary.isDecrypted()).to.be.true; }); it('can export an imported key', async () => { const passphrase = 'passphrase'; const { privateKey: keyToImport } = await generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, format: 'object', passphrase, }); const importedKeyRef = await CryptoWorkerPool.importPrivateKey({ armoredKey: keyToImport.armor(), passphrase, }); expect(importedKeyRef.getCreationTime()).to.deep.equal(keyToImport.getCreationTime()); expect(importedKeyRef.subkeys.map((subkey) => subkey.getAlgorithmInfo())).to.deep.equal( keyToImport.subkeys.map((subkey) => subkey.getAlgorithmInfo()) ); expect(importedKeyRef.getUserIDs()).to.deep.equal(['name <email@test.com>']); const armoredPublicKey = await CryptoWorkerPool.exportPublicKey({ key: importedKeyRef }); const exportedPublicKey = await openpgp_readKey({ armoredKey: armoredPublicKey }); expect(exportedPublicKey.isPrivate()).to.be.false; expect(exportedPublicKey.getKeyID().toHex()).equals(importedKeyRef.getKeyID()); expect(exportedPublicKey.getKeyID().equals(keyToImport.getKeyID())); const exportPassphrase = 'another passphrase'; const armoredPrivateKey = await CryptoWorkerPool.exportPrivateKey({ privateKey: importedKeyRef, passphrase: exportPassphrase, }); const exportedPrivateKey = await openpgp_readPrivateKey({ armoredKey: armoredPrivateKey }); expect(exportedPrivateKey.getKeyID().equals(keyToImport.getKeyID())); // make sure the exported key is encrypted with the new passphrase const decryptedExportedKey = await openpgp_decryptKey({ privateKey: exportedPrivateKey, passphrase: exportPassphrase, }); expect(decryptedExportedKey.isDecrypted()).to.be.true; }); it('reformatted key has a separate key reference', async () => { const passphrase = 'passphrase'; const originalKeyRef = await CryptoWorkerPool.importPrivateKey({ armoredKey: `-----BEGIN PGP PRIVATE KEY BLOCK----- xYYEYjh/NRYJKwYBBAHaRw8BAQdAAJW2i9biFMIXiH15J6vGU1GCAqcp5utw C+y+CeZ+h4L+CQMI/K3Ebi8BpsUAzexw43SwgpD0mDGd/d4ORX77AiUoq/rp DKjS+0lpIszAa6SVWcA6xQZsz1ztdNBktEg4t/gybivH88kGTIprO/HWetM+ j80RPHRlc3RAd29ya2VyLmNvbT7CjAQQFgoAHQUCYjh/NQQLCQcIAxUICgQW AAIBAhkBAhsDAh4BACEJEFx55sPEaXlKFiEE+PdMNIqw4jCyqqnuXHnmw8Rp eUoC8QD+NdQzOAWdIJEp1eMeEa3xx9rkCpD2TXUeV7goHtixyQIBANcgmRTg gN0O2hdiL9kjN4MPhbkz3dNTpkiO/K6O8UIDx4sEYjh/NRIKKwYBBAGXVQEF AQEHQF3XUaFXbb6O9Qcas72x5nhNupZ3iIrIx8wKeUdgdkBNAwEIB/4JAwjK CPlfkyHxBABYJC70HwO36TjRBxROY480CvL40r1bJ3NSLlV4aIZXLP2723PH tsnD3fhK5ZbGqC7FCmmDKEh1ibl3Lw6rEoE0Z6Fq72x6wngEGBYIAAkFAmI4 fzUCGwwAIQkQXHnmw8RpeUoWIQT490w0irDiMLKqqe5ceebDxGl5Sl9wAQC+ 9Jb0r5pG7sMbNclmp3s1OIfWG9tJ9RoXSHU/bCFHlgEA/ggjJKzRuja0MWZ6 8IDTErKCgaYSPES5+mwT27LYvw0= =D7EW -----END PGP PRIVATE KEY BLOCK-----`, passphrase, }); const reformattedKeyRef = await CryptoWorkerPool.reformatKey({ privateKey: originalKeyRef, userIDs: { email: 'reformatted@worker.com' }, }); expect(reformattedKeyRef.getUserIDs()).to.have.length(1); expect(reformattedKeyRef.getUserIDs().includes('<reformatted@worker.com>')); expect(originalKeyRef.getUserIDs()).to.have.length(1); expect(originalKeyRef.getUserIDs()).includes('<test@worker.com>'); await CryptoWorkerPool.clearKey({ key: originalKeyRef }); // this clears the private params as well const armoredKey = await CryptoWorkerPool.exportPrivateKey({ privateKey: reformattedKeyRef, passphrase }); const decryptedKeyFromArmored = await openpgp_decryptKey({ privateKey: await openpgp_readPrivateKey({ armoredKey }), passphrase, }); expect(decryptedKeyFromArmored.isDecrypted()).to.be.true; }); it('clearKey - cannot reference a cleared key', async () => { const privateKeyRef = await CryptoWorkerPool.generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, }); // confirm key is in the store expect(await CryptoWorkerPool.exportPublicKey({ key: privateKeyRef })).length.above(0); await CryptoWorkerPool.clearKey({ key: privateKeyRef }); await expect(CryptoWorkerPool.exportPublicKey({ key: privateKeyRef })).to.be.rejectedWith(/Key not found/); }); it('clearKeyStore - cannot reference any key after clearing the store', async () => { const privateKeyRef1 = await CryptoWorkerPool.generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, }); const privateKeyRef2 = await CryptoWorkerPool.generateKey({ userIDs: { name: 'name', email: 'email@test.com' }, }); // (lazily) confirm that keys are in the store expect(await CryptoWorkerPool.exportPublicKey({ key: privateKeyRef1 })).length.above(0); expect(await CryptoWorkerPool.exportPublicKey({ key: privateKeyRef2 })).length.above(0); await CryptoWorkerPool.clearKeyStore(); await expect(CryptoWorkerPool.exportPublicKey({ key: privateKeyRef1 })).to.be.rejectedWith(/Key not found/); await expect(CryptoWorkerPool.exportPublicKey({ key: privateKeyRef2 })).to.be.rejectedWith(/Key not found/); }); }); });
7,260
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/.eslintignore
test/**/*data.js
7,261
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/.eslintrc.js
module.exports = { extends: ['@proton/eslint-config-proton'], parser: '@typescript-eslint/parser', parserOptions: { tsconfigRootDir: __dirname, project: './tsconfig.json', }, ignorePatterns: ['.eslintrc.js'], };
7,262
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/.prettierignore
test
7,263
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/LICENSE
The MIT License (MIT) Copyright (c) 2019 by Proton Technologies A.G. (Switzerland) Email: contact@protonmail.com Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
7,264
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/package.json
{ "name": "@proton/encrypted-search", "description": "ProtonMail Encrypted Search library", "license": "MIT", "author": "ProtonMail", "main": "lib/index.ts", "scripts": { "check-types": "tsc", "i18n:validate": "proton-i18n validate lint-functions", "i18n:validate:context": "proton-i18n extract && proton-i18n validate", "lint": "eslint lib test --ext .js,.ts,tsx --quiet --cache", "pretty": "prettier --write $(find lib test -type f -name '*.js' -o -name '*.ts' -o -name '*.tsx')", "test": "NODE_ENV=test karma start test/karma.conf.js" }, "dependencies": { "@proton/components": "workspace:packages/components", "@proton/crypto": "workspace:packages/crypto", "@proton/i18n": "workspace:packages/i18n", "@proton/shared": "workspace:packages/shared", "@proton/utils": "workspace:packages/utils", "idb": "^7.1.1", "react": "^17.0.2", "ttag": "^1.7.29" }, "devDependencies": { "@proton/eslint-config-proton": "workspace:packages/eslint-config-proton", "@types/jasmine": "^5.1.3", "eslint": "^8.54.0", "jasmine": "^5.1.0", "jasmine-core": "^5.1.1", "karma": "^6.4.2", "karma-chrome-launcher": "^3.2.0", "karma-jasmine": "^5.1.0", "karma-spec-reporter": "^0.0.36", "karma-webpack": "^5.0.0", "playwright": "^1.40.0", "ts-loader": "^9.5.1", "typescript": "^5.3.2", "webpack": "^5.89.0" } }
7,265
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/tsconfig.json
{ "extends": "../../tsconfig.base.json" }
7,266
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/constants.ts
import noop from '@proton/utils/noop'; import { ESIndexingState, ESProgress, EncryptedSearchFunctions, OptionalESCallbacks } from './models'; /** * Number of items to add to the search results list during * a partial search. It corresponds to one page of results in mail */ export const ES_EXTRA_RESULTS_LIMIT = 50; /** * Size of a batch of items during indexing and syncing. * It corresponds to the maximum number of items' metadata returned * by mail API */ export const ES_MAX_PARALLEL_ITEMS = 150; /** * Number of items to fetch and process concurrently during indexing. Some * browsers internally set the maximum concurrent requests to handle to 100, * therefore we impose a slightly more stringent limit to allow some room for * other requests the app might send. Note that this should not be used for backgound * indexing, in which case ES_BACKGROUND_CONCURRENT should be used instead */ export const ES_MAX_CONCURRENT = 20; /** * Number of items to fetch and process concurrently when the indexing is started in background mode */ export const ES_BACKGROUND_CONCURRENT = 1; /** * Number of characters to retain from an item's metadata when highlighting it */ export const ES_MAX_INITIAL_CHARS = 20; /** * Maximum size of cached items expressed in MB. It is heuristically determined * so to cover most users (it should be enough for ~50k emails, and more than 95% of * paid users have less than that, based on an extrapolation made in 2021) yet not * to be too heavy on their devices' memory. The target size is 500 MB, however the * number is larger due to our size estimation function being more conservative * than the actual memory occupation */ export const ES_MAX_CACHE = 600000000; // 600 MB /** * Maximum number of metadata "pages" per batch during metadata indexing */ export const ES_MAX_METADATA_BATCH = 20; export const ES_BACKGROUND_METADATA_BATCH = 1; /** * Upper bound of number of items queried from IndexedDB at once */ export const ES_MAX_ITEMS_PER_BATCH = 1000; /** * Current version of the most up-to-date ES IndexedDB */ export const INDEXEDDB_VERSION = 2; /** * Maximum number of times an API call to fetch an item * content will be retried before being stored locally * for a later attempt */ export const ES_MAX_RETRIES = 10; /** * Error codes that are deemed temporary and therefore will trigger a retry * during API calls that the ES library does */ export const ES_TEMPORARY_ERRORS = [408, 429, 502, 503]; /** * Regular expression used to find and/or remove diacritics for the purpose of * searching and highlighting text. It matches all combining characters */ export const DIACRITICS_REGEXP = /\p{Mark}/gu; /** * Regular expression used to turn all fancy quotes into normal ones */ export const QUOTES_REGEXP = /\u00ab|\u00bb|\u201e|\u201c|\u201f|\u201d|\u275d|\u275e|\u276e|\u276f|\u2e42|\u301d|\u301e|\u301f|\uff02/gu; /** * Regular expression used to turn all fancy apostrophes into normal ones */ export const APOSTROPHES_REGEXP = /\u2018|\u2019|\u02bc/gu; /** * Configuration of the Web Crypto API to symmetrically encrypt items in IndexedDB */ export const AesKeyGenParams: AesKeyGenParams = { name: 'AES-GCM', length: 128 }; export const KeyUsages: KeyUsage[] = ['encrypt', `decrypt`]; /** * ENUMS */ export enum INDEXING_STATUS { INACTIVE, INDEXING, PAUSED, ACTIVE, } export enum TIMESTAMP_TYPE { STOP, START, STEP, } export enum ES_SYNC_ACTIONS { DELETE, CREATE, UPDATE_CONTENT, UPDATE_METADATA, } export enum STORING_OUTCOME { FAILURE, SUCCESS, QUOTA, } /** * DEFAULTS */ export const defaultESStatus = { permanentResults: [], setResultsList: () => {}, lastTimePoint: undefined, previousESSearchParams: undefined, cachedIndexKey: undefined, dbExists: false, isEnablingContentSearch: false, isDBLimited: false, esEnabled: false, esSupported: true, isRefreshing: false, isSearchPartial: false, isSearching: false, isFirstSearch: true, isEnablingEncryptedSearch: false, isContentIndexingPaused: false, isMetadataIndexingPaused: false, contentIndexingDone: false, isConfigFromESDBLoaded: false, getCacheStatus: () => ({ isCacheReady: false, isCacheLimited: false }), }; export const defaultESCache = { esCache: new Map(), cacheSize: 0, isCacheLimited: false, isCacheReady: false, }; export const defaultESIndexingState: ESIndexingState = { esProgress: 0, estimatedMinutes: 0, totalIndexingItems: 0, currentProgressValue: 0, }; export const defaultESContext: EncryptedSearchFunctions<any, any, any> = { encryptedSearch: async () => false, highlightString: () => '', highlightMetadata: () => ({ numOccurrences: 0, resultJSX: null as any }), enableEncryptedSearch: async () => false, enableContentSearch: async () => {}, handleEvent: async () => {}, isSearchResult: () => false, esDelete: async () => {}, shouldHighlight: () => false, initializeES: async () => {}, pauseContentIndexing: async () => {}, pauseMetadataIndexing: async () => {}, cacheIndexedDB: async () => {}, toggleEncryptedSearch: async () => {}, getCache: () => new Map(), resetCache: () => {}, correctDecryptionErrors: async () => 0, esStatus: defaultESStatus, progressRecorderRef: { current: [0, 0] }, esIndexingProgressState: defaultESIndexingState, }; export const defaultESCallbacks: OptionalESCallbacks<any, any, any> = { checkIsReverse: () => true, shouldOnlySortResults: () => false, resetSort: noop, getSearchInterval: () => ({ begin: undefined, end: undefined }), applyFilters: () => true, onContentDeletion: async () => {}, correctDecryptionErrors: async () => 0, }; export const defaultESProgress: ESProgress = { totalItems: 0, numPauses: 0, isRefreshed: false, timestamps: [], originalEstimate: 0, recoveryPoint: undefined, status: INDEXING_STATUS.INACTIVE, };
7,267
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/index.ts
export * from './esIDB'; export * from './esHelpers'; export * from './models'; export * from './constants'; export { default as useEncryptedSearch } from './useEncryptedSearch'; export { default as useEncryptedSearchIndexingProgress } from './useEncryptedSearchIndexingProgress';
7,268
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/useEncryptedSearch.tsx
import { useEffect, useMemo, useRef } from 'react'; import { IDBPDatabase } from 'idb'; import { c } from 'ttag'; import useOnLogout from '@proton/components/containers/app/useOnLogout'; import useApi from '@proton/components/hooks/useApi'; import useNotifications from '@proton/components/hooks/useNotifications'; import useUser from '@proton/components/hooks/useUser'; import { useGetUserKeys } from '@proton/components/hooks/useUserKeys'; import { SECOND } from '@proton/shared/lib/constants'; import { hasBit } from '@proton/shared/lib/helpers/bitset'; import { isFirefox } from '@proton/shared/lib/helpers/browser'; import isDeepEqual from '@proton/shared/lib/helpers/isDeepEqual'; import { wait } from '@proton/shared/lib/helpers/promise'; import { ES_EXTRA_RESULTS_LIMIT, INDEXING_STATUS, STORING_OUTCOME, TIMESTAMP_TYPE, defaultESCache, defaultESCallbacks, defaultESProgress, defaultESStatus, } from './constants'; import { IndexingMetrics, buildContentDB, buildMetadataDB, cacheIDB, esSentryReport, findItemIndex, gatherIndexingMetrics, getIndexKey, highlightJSX, hybridSearch, initializeEncryptedSearch, insertMarks, refreshESCache, removeESFlags, requestPersistence, retryAPICalls, retryContentIndexing, sendIndexingMetricsForMail, sendSearchingMetrics, syncItemEvents, uncachedSearch, } from './esHelpers'; import { IndexedDBRow, checkVersionedESDB, contentIndexingProgress, deleteESDB, metadataIndexingProgress, openESDB, readAllLastEvents, readEnabled, readLimited, readNumContent, readNumMetadata, setLimited, toggleEnabled, writeAllEvents, } from './esIDB'; import { ESCache, ESCallbacks, ESEvent, ESItem, ESProgress, ESStatus, ESTimepoint, EnableContentSearch, EnableEncryptedSearch, EncryptedSearch, EncryptedSearchDB, EncryptedSearchExecution, EncryptedSearchFunctions, EventsObject, HighlightMetadata, HighlightString, InternalESCallbacks, } from './models'; import useEncryptedSearchIndexingProgress from './useEncryptedSearchIndexingProgress'; import { useEncryptedSearchStatus } from './useEncryptedSearchStatus'; interface Props<ESItemMetadata, ESSearchParameters, ESItemContent = void> { refreshMask: number; esCallbacks: ESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent>; contentIndexingSuccessMessage?: string; onMetadataIndexed?: (metrics: IndexingMetrics) => void; sendMetricsOnSearch?: boolean; } /** * Provide the core functionalities of ES. * @param refreshMask A number representing the bit the BE sets to REFRESH_ALL on the specific * client * @param esCallbacks All the callbacks that are product-specific and therefore need to be passed * to the ES core functions to work * @param contentIndexingSuccessMessage The text that is showing in a green notification upon completing indexing * @param sendMetricsOnSearch Determines whether to send metrics on each single search. Only meant for Mail * @returns An empty instance of the ES IndexedDB */ const useEncryptedSearch = <ESItemMetadata extends Object, ESSearchParameters, ESItemContent = void>({ refreshMask, esCallbacks: inputESCallbacks, contentIndexingSuccessMessage, onMetadataIndexed, sendMetricsOnSearch, }: Props<ESItemMetadata, ESSearchParameters, ESItemContent>) => { const getUserKeys = useGetUserKeys(); const api = useApi(); const [user] = useUser(); const { ID: userID } = user; const { createNotification } = useNotifications(); const esCallbacks: InternalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent> = { ...defaultESCallbacks, ...inputESCallbacks, }; const { getSearchParams } = esCallbacks; const { isSearch } = getSearchParams(); // Keep a reference to cached items, such that they can be queried at any time const esCacheRef = useRef<ESCache<ESItemMetadata, ESItemContent>>(defaultESCache); // Allow to abort indexing const abortIndexingRef = useRef<AbortController>(new AbortController()); // Allow to abort searching const abortSearchingRef = useRef<AbortController>(new AbortController()); // Allow to track progress during syncing const syncingEventsRef = useRef<Promise<void>>(Promise.resolve()); const [esStatus, setESStatus] = useEncryptedSearchStatus<ESItemMetadata, ESSearchParameters, ESItemContent>({ esCacheRef, userID, getUserKeys, }); const { esIndexingProgressState, progressRecorderRef, recordProgress } = useEncryptedSearchIndexingProgress(); const resetProgress = (indexDBRow: IndexedDBRow) => { void recordProgress(0, indexDBRow); }; const recordMetadataProgress = async () => { const newProgress = (await readNumMetadata(userID)) || 0; void recordProgress(newProgress, 'metadata'); }; const recordContentProgress = async () => { const newProgress = (await readNumContent(userID)) || 0; void recordProgress(newProgress, 'content'); }; /** * Chain several synchronisations to account for events being fired when * previous ones are still being processed */ const addSyncing = async (callback: () => Promise<void>) => { syncingEventsRef.current = syncingEventsRef.current.then(() => callback()); }; /** * Return cache */ const getCache = () => { return esCacheRef.current.esCache; }; /** * Reset the cache to its default empty state */ const resetCache = () => { // Note that assigning values from defaultESCache doesn't work esCacheRef.current.esCache = new Map(); esCacheRef.current.cacheSize = 0; esCacheRef.current.isCacheLimited = false; esCacheRef.current.isCacheReady = false; }; useOnLogout(async () => resetCache()); /** * Wipe all local data related to ES */ const esDelete = async () => { abortIndexingRef.current.abort(); abortSearchingRef.current.abort(); resetCache(); // Note that currently no local storage blobs exist, // however in a legacy version they did removeESFlags(userID); setESStatus(() => ({ ...defaultESStatus, isConfigFromESDBLoaded: true, })); return deleteESDB(userID); }; /** * Notify the user the DB is deleted. Typically this is needed if the key is no * longer usable to decrypt it */ const dbCorruptError = async () => { await esDelete(); createNotification({ text: c('Error').t`Please activate your search again`, type: 'error', }); }; /** * Reset to default only the parameters of ESStatus that are related to a search */ const resetSearchStatus = ( esStatus: ESStatus<ESItemMetadata, ESItemContent, ESSearchParameters> ): ESStatus<ESItemMetadata, ESItemContent, ESSearchParameters> => { return { ...esStatus, permanentResults: defaultESStatus.permanentResults, setResultsList: defaultESStatus.setResultsList, lastTimePoint: defaultESStatus.lastTimePoint, previousESSearchParams: defaultESStatus.previousESSearchParams, isSearchPartial: defaultESStatus.isSearchPartial, isSearching: defaultESStatus.isSearching, }; }; /** * Deactivates ES. This does not remove anything, and the database keeps being synced. * It is used to switch ES temporarily off in cases when server side search is available. */ const toggleEncryptedSearch = async () => { const currentOption = esStatus.esEnabled; await toggleEnabled(userID); setESStatus((esStatus) => ({ ...esStatus, esEnabled: !currentOption, })); if (currentOption) { abortSearchingRef.current.abort(); } else { // Every time ES is enabled, we reset sorting to avoid carrying on with SIZE sorting in // case it was previously used. SIZE sorting is not supported by ES const { isSearch } = getSearchParams(); if (isSearch) { esCallbacks.resetSort(); } } }; /** * Start the caching routine, i.e. fetching and decrypting as many items from the ES * database as possible to be stored in memory for quick access */ const cacheIndexedDB = async () => { const { esEnabled, dbExists, cachedIndexKey } = esStatus; if (!dbExists || !esEnabled || esCacheRef.current.isCacheReady) { return; } const indexKey = cachedIndexKey || (await getIndexKey(getUserKeys, userID)); if (!indexKey) { await dbCorruptError(); return; } setESStatus((esStatus) => ({ ...esStatus, cachedIndexKey: indexKey, })); return cacheIDB<ESItemMetadata, ESItemContent>(indexKey, userID, esCacheRef); }; const correctDecryptionErrors = async () => { const { cachedIndexKey } = esStatus; const indexKey = cachedIndexKey || (await getIndexKey(getUserKeys, userID)); if (!indexKey) { await dbCorruptError(); return 0; } abortIndexingRef.current = new AbortController(); const result = await esCallbacks.correctDecryptionErrors( userID, indexKey, abortIndexingRef, esStatus, recordProgress ); if (result > 0) { resetCache(); } return result; }; /** * Keep IndexedDB in sync with new events */ const syncIndexedDB = async (event: ESEvent<ESItemMetadata>, indexKey: CryptoKey | undefined) => { const { Items, attemptReDecryption } = event; const { permanentResults, setResultsList } = esStatus; // In case a key is reactivated, try to fix any decryption error that might // have happened during indexing, but only if content indexing is done, otherwise // there might not be all content in IDB if (attemptReDecryption) { setESStatus((esStatus) => ({ ...esStatus, isRefreshing: true, })); if (indexKey) { abortIndexingRef.current = new AbortController(); const newItemsFound = await correctDecryptionErrors(); // In case new items were added to ESDB this way, cache should be reset. // Next time the user interacts with the searchbar or searches, it will be rebuilt if (newItemsFound) { resetCache(); } } } if (!Items || !Items.length) { return; } const { esSearchParams } = getSearchParams(); const searchChanged = await syncItemEvents<ESItemContent, ESItemMetadata, ESSearchParameters>( Items, userID, esCacheRef, permanentResults, indexKey, esSearchParams, esCallbacks ); if (searchChanged) { setResultsList(permanentResults); setESStatus((esStatus) => ({ ...esStatus, permanentResults, })); } }; /** * Conclude any type of syncing routine */ const finaliseSyncing = async (eventsToStore: EventsObject, indexKey: CryptoKey | undefined) => { // In case everything goes through, save the last event(s) from which to // catch up the next time, but only in case either content is not indexed at all // of if it already finished. If content indexing is ongoing, we don't overwrite the // last event IDs from IDB because we'll need to catch up from them to update content. // In other words, metadata will be re-synced once the latter happens const { isEnablingContentSearch, isContentIndexingPaused } = esStatus; if (!isEnablingContentSearch && !isContentIndexingPaused) { await writeAllEvents(userID, eventsToStore); } // In case many items were removed from cache, or from IDB, fill the remaining space let isDBLimited: boolean | undefined; if (!!indexKey) { const contentProgress = await contentIndexingProgress.read(userID); if (!!contentProgress && contentProgress.status === INDEXING_STATUS.ACTIVE) { abortIndexingRef.current = new AbortController(); await retryContentIndexing(userID, indexKey, esCallbacks, abortIndexingRef); } await refreshESCache<ESItemMetadata, ESItemContent>(indexKey, userID, esCacheRef, esCallbacks.getItemInfo); await retryAPICalls<ESItemContent>(userID, indexKey, esCallbacks.fetchESItemContent); // Check if DB became limited or not after the update isDBLimited = await readLimited(userID); } setESStatus((esStatus) => ({ ...esStatus, isRefreshing: false, isDBLimited: isDBLimited ?? esStatus.isDBLimited, })); }; /** * Catch up with all changes contained in the given event */ const catchUpFromEvent = async (indexKey: CryptoKey, currentEvent: ESEvent<ESItemMetadata>): Promise<void> => { try { await syncIndexedDB(currentEvent, indexKey); return await finaliseSyncing(currentEvent.eventsToStore, indexKey); } catch (error: any) { esSentryReport('catchUpFromEvent: syncIndexedDB', { error }); setESStatus((esStatus) => ({ ...esStatus, isRefreshing: false, })); return catchUpFromEvent(indexKey, currentEvent); } }; /** * Fetch all events since a previously stored one */ const catchUpFromLastEvents = async ( indexKey: CryptoKey | undefined, newEvents: ESEvent<ESItemMetadata>[], eventsToStore: EventsObject ): Promise<void> => { setESStatus((esStatus) => ({ ...esStatus, isRefreshing: true, })); // Resetting is necessary to show appropriate UI when syncing immediately after refreshing void resetProgress('content'); try { // It's important that these events are synced sequentially for (const eventToCheck of newEvents) { await syncIndexedDB(eventToCheck, indexKey); } return await finaliseSyncing(eventsToStore, indexKey); } catch (error: any) { esSentryReport('catchUpFromLastEvents: syncIndexedDB', { error }); setESStatus((esStatus) => ({ ...esStatus, isRefreshing: false, })); return catchUpFromLastEvents(indexKey, newEvents, eventsToStore); } }; /** * Pause the currently ongoing indexing process, if any */ const pauseMetadataIndexing = async () => { abortIndexingRef.current.abort(); setESStatus((esStatus) => ({ ...esStatus, isEnablingEncryptedSearch: false, isMetadataIndexingPaused: true, })); await metadataIndexingProgress.setStatus(userID, INDEXING_STATUS.PAUSED); await metadataIndexingProgress.incrementNumPauses(userID); await metadataIndexingProgress.addTimestamp(userID, TIMESTAMP_TYPE.STOP); }; /** * Set up the ES IndexedDB and populate it with items metadata. It optionally accepts * an object with one property. * @param isRefreshed is only used to be forward to the metrics route for statistical purposes. * Whenever the user manually starts indexing, the latter shouldn't be specified (and defaults to false). */ const enableEncryptedSearch: EnableEncryptedSearch = async ({ isRefreshed = false, isBackgroundIndexing = false, showErrorNotification = true, } = {}) => { // If indexing instance is already in progress, don't start a new one const { isEnablingEncryptedSearch } = esStatus; if (isEnablingEncryptedSearch) { return false; } setESStatus((esStatus) => ({ ...esStatus, isEnablingEncryptedSearch: true, isMetadataIndexingPaused: false, })); const handleError = async (esSupported = true) => { if (showErrorNotification) { createNotification({ text: esSupported ? c('Error').t`A problem occurred, please try again.` : c('Error') .t`Content search cannot be enabled in this browser. Please quit private browsing mode or use another browser.`, type: 'error', }); } await esDelete(); return false; }; const esdbExists = await checkVersionedESDB(userID); /* There are several cases for this variable: - if ESDB exists, it means that a metadata indexing had already been started, therefore the "previous event ID", i.e. prior to starting it, was already stored in ESDB. In other words, the event ID we're querying now is just a random later one and should therefore be overwritten with the one from ESDB. - if ESDB doesn't exist and can be created, i.e. IndexedDB is supported by the browser, then the event ID we're querying is the one immediately prior to metadata indexing, therefore it should be stored in ESDB. - if ESDB doesn't exist but cannot be created, e.g. Firefox in incognito mode, then this also represents the one immediately prior to metadata indexing. We should keep track of it to sync metadata, but we cannot store it to ESDB because the latter can't exist. */ let previousEventID = await esCallbacks.getPreviousEventID(); const expectedTotalIndexed = await esCallbacks.getTotalItems(); void recordProgress([esIndexingProgressState.esProgress, expectedTotalIndexed], 'metadata'); let indexKey: CryptoKey | undefined; let esSupported = true; if (esdbExists) { const esProgress = await metadataIndexingProgress.read(userID); // If indexing was already completed, don't start a new one if (esProgress && esProgress.status === INDEXING_STATUS.ACTIVE) { return true; } // By virtue of the first point above about previousEventID, we overwrite it // with the one stored in ESDB const storedPreviousEventID = await readAllLastEvents(userID); if (!storedPreviousEventID) { return handleError(); } previousEventID = storedPreviousEventID; // Otherwise indexing should resume const esDB = await openESDB(userID); indexKey = await getIndexKey(getUserKeys, userID); esSupported = !!indexKey && !!esDB; esDB?.close(); // Note that at this point esDB nor indexKey can be undefined, and there // is a problem if either of them is if (!esSupported) { return handleError(); } } else { try { let esDB: IDBPDatabase<EncryptedSearchDB> | undefined; ({ indexKey, esDB } = await initializeEncryptedSearch( userID, getUserKeys, previousEventID, isRefreshed, expectedTotalIndexed )); esSupported = !!indexKey && !!esDB; esDB?.close(); } catch (error: any) { esSentryReport('initializeEncryptedSearch', { error }); esSupported = false; } } // In case IDB cannot be instantiated, we temporarily don't allow to continue // with metadata indexing. However by removing this check alone, memory-only // metadata indexing can be supported if (!esSupported) { return handleError(false); } // esSupported is false when we can't initialise IndexedDB and therefore ES // will be cache-only. setESStatus((esStatus) => ({ ...esStatus, esSupported, cachedIndexKey: indexKey, dbExists: true, })); // Even though this procedure cannot be paused, this is still useful // in case of clearing data and logout abortIndexingRef.current = new AbortController(); const previousProgress = await metadataIndexingProgress.read(userID); if (previousProgress) { await metadataIndexingProgress.setStatus(userID, INDEXING_STATUS.INDEXING); } let success = false; let isInitialIndexing = true; while (!success) { success = await buildMetadataDB<ESItemMetadata>({ userID, esSupported, indexKey, esCacheRef, queryItemsMetadata: esCallbacks.queryItemsMetadata, getItemInfo: esCallbacks.getItemInfo, abortIndexingRef, recordProgress: recordMetadataProgress, isInitialIndexing, isBackgroundIndexing, }); // Kill switch in case user logs out or deletes data if (abortIndexingRef.current.signal.aborted) { return false; } // In case the procedure failed, wait some time before re-starting if (!success) { isInitialIndexing = false; await wait(2 * SECOND); } } // Catch up with events since the last one before indexing, which was set in // the Event blob in localStorage during initialization. Note that we finalise // indexing even it this step fails, because it will be retried at every new // event and refresh let newEvents: ESEvent<ESItemMetadata>[]; let shouldRefresh: boolean; let eventsToStore: EventsObject; try { ({ newEvents, shouldRefresh, eventsToStore } = await esCallbacks.getEventFromIDB(previousEventID)); } catch (error: any) { return handleError(); } if (shouldRefresh) { return handleError(); } const catchUpPromise = catchUpFromLastEvents(indexKey, newEvents, eventsToStore); void addSyncing(() => catchUpPromise); await catchUpPromise; const wasESDBCreated = await checkVersionedESDB(userID); // Paginated indexing has the problem that if the user deletes completely some items // in pages that have already been queried, all subsequent items are shifted. Therefore, // later pages will contain different items than if the deletion had never occurred. The // end result is that some items are not indexed. Since it's tricky and slow to figure // out which ones, we instead just delete everything and notify users let metrics; if (wasESDBCreated) { const totalIndexed = await readNumMetadata(userID); if (typeof totalIndexed === 'undefined' || totalIndexed < expectedTotalIndexed) { return handleError(); } await metadataIndexingProgress.addTimestamp(userID, TIMESTAMP_TYPE.STOP); metrics = await gatherIndexingMetrics(userID, 'metadata'); await metadataIndexingProgress.setActiveStatus(userID); await toggleEnabled(userID); } setESStatus((esStatus) => ({ ...esStatus, isEnablingEncryptedSearch: false, esEnabled: true, })); if (metrics) { onMetadataIndexed?.(metrics); } // In case this process did not create an IDB, e.g. because it's a memory only // metadata index, it cannot be considered successful as otherwise content // indexing would be tried return wasESDBCreated; }; /** * Pause the currently ongoing indexing process, if any */ const pauseContentIndexing = async () => { abortIndexingRef.current.abort(); setESStatus((esStatus) => ({ ...esStatus, isEnablingContentSearch: false, isContentIndexingPaused: true, })); await contentIndexingProgress.setStatus(userID, INDEXING_STATUS.PAUSED); await contentIndexingProgress.incrementNumPauses(userID); await contentIndexingProgress.addTimestamp(userID, TIMESTAMP_TYPE.STOP); }; /** * Start indexing for the first time or resume it after the user paused it. It optionally accepts * an object with two properties. * @param notify specifies whether any pop-up banner will be displayed to the user indicating success * or failure of the indexing process * @param isRefreshed is only used to be forward to the metrics route for statistical purposes. * Whenever the user manually starts indexing, the latter shouldn't be specified (and defaults to false). */ const enableContentSearch: EnableContentSearch = async ({ notify = true, isRefreshed = false, isBackgroundIndexing = false, } = {}) => { // If there is no fetch content callback, content search cannot be activated at all const { fetchESItemContent } = esCallbacks; if (!fetchESItemContent) { return; } // If an indexing instance is already in progress don't start a new one const { isEnablingContentSearch, esSupported } = esStatus; if (isEnablingContentSearch) { return; } if (!esSupported) { createNotification({ text: c('Error') .t`Content search cannot be enabled in this browser. Please quit private browsing mode or use another browser.`, type: 'error', }); return; } const indexKey = await getIndexKey(getUserKeys, userID); if (!indexKey) { return dbCorruptError(); } setESStatus((esStatus) => ({ ...esStatus, isEnablingContentSearch: true, isContentIndexingPaused: false, esEnabled: false, })); const previousProgress = await contentIndexingProgress.read(userID); const expectedTotalIndexed = await esCallbacks.getTotalItems(); void recordProgress([esIndexingProgressState.esProgress, expectedTotalIndexed], 'content'); let totalItems = 0; let recoveryPoint: ESTimepoint | undefined; if (!previousProgress) { // Save the event before starting building IndexedDB. The number of items // before indexing aims to show progress, as new items will be synced only // after indexing has completed await writeAllEvents(userID, await esCallbacks.getPreviousEventID()); totalItems = expectedTotalIndexed; const initialProgress: ESProgress = { ...defaultESProgress, totalItems, isRefreshed, status: INDEXING_STATUS.INDEXING, }; await contentIndexingProgress.write(userID, initialProgress); } else { await contentIndexingProgress.setStatus(userID, INDEXING_STATUS.INDEXING); ({ totalItems, recoveryPoint } = previousProgress); } abortIndexingRef.current = new AbortController(); // We request storage persistence to prevent IDB from being evicted. In Firefox this // operation will trigger a popup asking the user to grant storage permission. If // such a popup appears after the user has explicitly activated ES, then its request // should come at no surprise. However, there are cases (e.g. at refresh or during // welcome flow for new users) in which indexing starts without a manual input from // the user, therefore such a popup will seem unrelated to any actions from the user's // perspective. For this reason, only when the browser is Firefox, we don't request // permission in cases indexing was not manually triggered. if (!isFirefox() || notify) { await requestPersistence(); } // We default to having the limited flag to true and // only revert if by the end IDB is not limited await setLimited(userID, true); let indexingOutcome = STORING_OUTCOME.SUCCESS; let success = totalItems === 0; while (!success) { try { indexingOutcome = await buildContentDB<ESItemContent>( userID, indexKey, abortIndexingRef, recordContentProgress, fetchESItemContent, recoveryPoint, true, isBackgroundIndexing ); } catch (error: any) { if (abortIndexingRef.current.signal.aborted) { return; } esSentryReport('buildContentDB', { error }); return dbCorruptError(); } // Kill switch in case user logs out or pauses if (abortIndexingRef.current.signal.aborted) { return; } success = indexingOutcome === STORING_OUTCOME.SUCCESS || indexingOutcome === STORING_OUTCOME.QUOTA; // In case the procedure failed, wait some time before re-starting if (!success) { await wait(2 * SECOND); } } // Since we default to having the limited flag to true in IDB, // in case it is not limited we overwrite it if (indexingOutcome === STORING_OUTCOME.SUCCESS) { await setLimited(userID, false); } // Catch up with events since the last one before indexing, which was set in // the Event blob in localStorage during initialization. Note that we finalise // indexing even it this step fails, because it will be retried at every new // event and refresh let newEvents: ESEvent<ESItemMetadata>[]; let shouldRefresh: boolean; let eventsToStore: EventsObject; try { ({ newEvents, shouldRefresh, eventsToStore } = await esCallbacks.getEventFromIDB()); } catch (error: any) { return dbCorruptError(); } if (shouldRefresh) { return dbCorruptError(); } await contentIndexingProgress.setActiveStatus(userID); setESStatus((esStatus) => ({ ...esStatus, isEnablingContentSearch: false, contentIndexingDone: true, esEnabled: true, })); const catchUpPromise = catchUpFromLastEvents(indexKey, newEvents, eventsToStore); void addSyncing(() => catchUpPromise); await catchUpPromise; await contentIndexingProgress.addTimestamp(userID, TIMESTAMP_TYPE.STOP); void sendIndexingMetricsForMail(api, userID); if (notify && contentIndexingSuccessMessage) { createNotification({ text: contentIndexingSuccessMessage, }); } }; /** * Execute an encrypted search */ const newEncryptedSearch: EncryptedSearchExecution<ESItemMetadata, ESItemContent, ESSearchParameters> = async ( setResultsList, esSearchParams, minimumItems, sendMetricsOnSearch ) => { const t1 = performance.now(); const { previousESSearchParams, permanentResults, isSearchPartial: wasSearchPartial, cachedIndexKey, isFirstSearch, } = esStatus; abortSearchingRef.current = new AbortController(); // In case only sorting changed, for complete searches it doesn't make sense to perform a new search if (!wasSearchPartial && previousESSearchParams) { const shouldSortOnly = esCallbacks.shouldOnlySortResults(esSearchParams, previousESSearchParams); if (shouldSortOnly) { setResultsList(permanentResults); return true; } } setESStatus((esStatus) => ({ ...esStatus, isSearching: true, isSearchPartial: true, isFirstSearch: false, })); const controlledSetResultsList = (items: ESItem<ESItemMetadata, ESItemContent>[]) => { if (!abortSearchingRef.current.signal.aborted) { setResultsList(items); } }; let searchResults: ESItem<ESItemMetadata, ESItemContent>[] = []; let isSearchPartial = false; let lastTimePoint: ESTimepoint | undefined; try { ({ searchResults, isSearchPartial, lastTimePoint } = await hybridSearch< ESItemMetadata, ESItemContent, ESSearchParameters >( esCacheRef, esSearchParams, cachedIndexKey, getUserKeys, userID, controlledSetResultsList, abortSearchingRef, esCallbacks, minimumItems )); } catch (error: any) { esSentryReport('encryptedSearch: hybridSearch', { error }); // If the key is the problem, then we want to wipe the DB and fall back to // server-side search, otherwise we want to show a generic error and still // fall back to server-side search if (error.message === 'Key not found') { return dbCorruptError().then(() => false); } throw error; } if (!abortSearchingRef.current.signal.aborted) { setESStatus((esStatus) => ({ ...esStatus, permanentResults: searchResults, setResultsList: setResultsList, lastTimePoint, previousESSearchParams: esSearchParams, isSearchPartial, isSearching: false, })); setResultsList(searchResults); if (sendMetricsOnSearch) { const t2 = performance.now(); void sendSearchingMetrics( api, userID, esCacheRef.current.cacheSize, Math.ceil(t2 - t1), isFirstSearch, esCacheRef.current.isCacheLimited ); } } return true; }; /** * Increase the number of results in order to reach at least the next multiple of ES_EXTRA_RESULTS_LIMIT, * in case the cache is limited and the user wishes more */ const incrementSearch: EncryptedSearchExecution<ESItemMetadata, ESItemContent, ESSearchParameters> = async ( setResultsList, esSearchParams, minimumItems ) => { const { permanentResults, lastTimePoint, cachedIndexKey } = esStatus; const extraItems = Math.max( ES_EXTRA_RESULTS_LIMIT * Math.ceil(permanentResults.length / ES_EXTRA_RESULTS_LIMIT) - permanentResults.length, minimumItems || 0 ); setESStatus((esStatus) => ({ ...esStatus, isSearching: true, })); const indexKey = cachedIndexKey || (await getIndexKey(getUserKeys, userID)); if (!indexKey) { await dbCorruptError(); return false; } const hasApostrophe = (esCallbacks.getKeywords(esSearchParams) || []).some((keyword) => keyword.includes(`'`)); const { resultsArray, newLastTimePoint } = await uncachedSearch< ESItemMetadata, ESItemContent, ESSearchParameters >( userID, indexKey, esSearchParams, esCallbacks, lastTimePoint, extraItems, hasApostrophe, undefined, abortSearchingRef ); if (!abortSearchingRef.current.signal.aborted) { permanentResults.push(...resultsArray); setESStatus((esStatus) => ({ ...esStatus, permanentResults, isSearchPartial: !!newLastTimePoint, lastTimePoint: newLastTimePoint, isSearching: false, })); setResultsList(permanentResults); } return true; }; /** * Perform a new encrypted search or increment an existing one. * @param setResultsList a callback that will be given the items to show, i.e. those found as search * results, and that should handle the UI part of displaying them to the users * @param minimumItems is optional and refers to the smallest number of items that the search is * expected to produce. If specified this parameter instructs the search to try finding at least * this number of items from disk both when performing a new search with limited cache and when * incrementing an existing partial search * @returns a boolean indicating the success of the search */ const encryptedSearch: EncryptedSearch<ESItemMetadata, ESItemContent> = async (setResultsList, minimumItems) => { const { dbExists, esEnabled, isSearchPartial, previousESSearchParams } = esStatus; // In these cases no ES should be performed if (!dbExists || !esEnabled) { return false; } const { isSearch, esSearchParams } = getSearchParams(); if (!isSearch || !esSearchParams) { return false; } const { isCacheLimited } = esCacheRef.current; if ( isSearchPartial && isCacheLimited && previousESSearchParams && isDeepEqual(esSearchParams, previousESSearchParams) && !abortSearchingRef.current.signal.aborted ) { return incrementSearch(setResultsList, previousESSearchParams, minimumItems); } // Prevent old searches from interfering with newer ones abortSearchingRef.current.abort(); setESStatus((esStatus) => resetSearchStatus(esStatus)); return newEncryptedSearch(setResultsList, esSearchParams, minimumItems, sendMetricsOnSearch); }; /** * @returns whether some conditions to apply highlighting are met, i.e. whether a search is * on and there are keywords. For example in cases where the user only specifies filters * and not keywords, this function returns false */ const shouldHighlight = () => { const { isSearch, esSearchParams } = getSearchParams(); if (!isSearch || !esSearchParams) { return false; } const keywords = esCallbacks.getKeywords(esSearchParams); return typeof keywords !== 'undefined' && !!keywords.length; }; /** * Insert the <mark></mark> highlighting markdown in a string and returns a string containing it, * which then needs to be displayed in the UI. Note that the keywords to highlight are extracted * directly with the parseSearchParams callback * @param content the string where to insert the markdown * @param setAutoScroll whether to insert the data-auto-scroll attribute to the first instance of * the inserted mark tags. The UI should automatically scroll, if possible, to said first tag * @returns the string containing the markdown */ const highlightString: HighlightString = (content, setAutoScroll) => { const { esSearchParams } = getSearchParams(); if (!esSearchParams) { return content; } const keywords = esCallbacks.getKeywords(esSearchParams); if (!keywords) { return content; } return insertMarks(content, keywords, setAutoScroll); }; /** * Inserts the <mark></mark> highlighting markdown in a string and returns directly the JSX node * to be used in React * @param metadata the string where to insert the markdown * @param isBold specifies whether the text should also be bolded (e.g. in some headers) * @param trim specifies whether to substitute the initial portion of the string by an ellipsis * if it's too long * @returns an object containing two properties: numOccurrences is the total number of times the * markdown tag has been added to the given string, while resultJSX is the actual React node to be * displayed */ const highlightMetadata: HighlightMetadata = (metadata, isBold, trim) => { const noData = { numOccurrences: 0, resultJSX: <span>{metadata}</span>, }; const { esSearchParams } = getSearchParams(); if (!esSearchParams) { return noData; } const keywords = esCallbacks.getKeywords(esSearchParams); if (!keywords) { return noData; } return highlightJSX(metadata, keywords, isBold, trim); }; /** * @returns whether a given item, specified by its ID, is part of the currently shown search results or not. * It returns false if a search is not happening on going */ const isSearchResult = (ID: string) => { const { dbExists, esEnabled, permanentResults } = esStatus; const { isSearch } = getSearchParams(); if (!(dbExists && esEnabled && isSearch)) { return false; } return findItemIndex(ID, permanentResults, esCallbacks.getItemInfo) !== -1; }; /** * Remove the index and restart ES by creating a new one from scratch */ const restartIndexing = async () => { // Retrieve whether content was already being indexed and reindex it too const contentProgress = await contentIndexingProgress.read(userID); const wasContentIndexed = contentProgress && contentProgress.status !== INDEXING_STATUS.INACTIVE; await esDelete(); return enableEncryptedSearch({ isRefreshed: true }).then(() => { if (wasContentIndexed) { return enableContentSearch({ isRefreshed: true }); } }); }; /** * Process events (according to the provided callbacks). It should be used in whatever event handling * system the product uses to correctly sync the ES database. * @param event a single event containing a change to the items stored in the ES database */ const handleEvent = async (event: ESEvent<ESItemMetadata> | undefined) => { // An event can be undefined in case of network instability, but since the app doesn't receive // the update inside the event it's ok to ignore it if (!event) { return; } const { dbExists, cachedIndexKey, isEnablingEncryptedSearch } = esStatus; // We want to sync new events while content indexing is ongoing so that metadata search // can still be used, therefore we don't check whether content is being indexed if (!dbExists || isEnablingEncryptedSearch) { return; } const indexKey = cachedIndexKey || (await getIndexKey(getUserKeys, userID)); if (!indexKey) { return dbCorruptError(); } // Every time a new event happens, we simply catch up everything since the last // processed event. In case any failure occurs, the event ID stored will not be // overwritten if (hasBit(event.Refresh, refreshMask)) { return restartIndexing(); } void addSyncing(() => catchUpFromEvent(indexKey, event)); }; /** * Run some initial checks on the status of ES. This must be the first function that * the EncryptedSearchProvider runs, as it checks for new events, continues indexing in * case a previous one was started, checks whether the index key is still accessible */ const initializeES = async () => { // Check whether the ES IDB exists for the current user. Nothing else is // needed in case it doesn't if (!(await checkVersionedESDB(userID))) { return; } // At this point the indexKey must exist const indexKey = await getIndexKey(getUserKeys, userID); if (!indexKey) { return dbCorruptError(); } // If metadata indexing was ongoing, continue it. // Note that if IDB exists and metadata progress doesn't, // something is wrong const metadataProgress = await metadataIndexingProgress.read(userID); if (!metadataProgress) { return dbCorruptError(); } if (metadataProgress.status === INDEXING_STATUS.PAUSED) { return; } if (metadataProgress.status === INDEXING_STATUS.INDEXING) { void enableEncryptedSearch(); return; } const esEnabled = await readEnabled(userID); const isDBLimited = await readLimited(userID); if (typeof esEnabled === 'undefined' || typeof isDBLimited === 'undefined') { return dbCorruptError(); } setESStatus((esStatus) => ({ ...esStatus, dbExists: true, esEnabled, isDBLimited, })); // Check whether content indexing was ongoing const contentProgress = await contentIndexingProgress.read(userID); const isIndexingContent = contentProgress?.status === INDEXING_STATUS.INDEXING; const isContentIndexingPaused = contentProgress?.status === INDEXING_STATUS.PAUSED; const contentIndexingDone = contentProgress?.status === INDEXING_STATUS.ACTIVE; if (isIndexingContent) { return enableContentSearch(); } setESStatus((esStatus) => ({ ...esStatus, cachedIndexKey: indexKey, isContentIndexingPaused, contentIndexingDone, })); // Compare the last event "seen" by the DB (saved in localStorage) and // the present one to check whether any event has happened while offline, // but only if indexing was successful let newEvents: ESEvent<ESItemMetadata>[]; let shouldRefresh: boolean; let eventsToStore: EventsObject; try { ({ newEvents, shouldRefresh, eventsToStore } = await esCallbacks.getEventFromIDB()); } catch (error: any) { return await dbCorruptError(); } if (shouldRefresh) { return restartIndexing(); } void addSyncing(() => catchUpFromLastEvents(indexKey, newEvents, eventsToStore)); }; /** * Remove previous search data from the status when no longer in search mode */ useEffect(() => { if (!isSearch) { abortSearchingRef.current.abort(); setESStatus((esStatus) => resetSearchStatus(esStatus)); } }, [isSearch]); const esFunctions: EncryptedSearchFunctions<ESItemMetadata, ESSearchParameters, ESItemContent> = useMemo(() => { return { encryptedSearch, cacheIndexedDB, toggleEncryptedSearch, enableEncryptedSearch, enableContentSearch, pauseContentIndexing, pauseMetadataIndexing, correctDecryptionErrors, highlightString, highlightMetadata, shouldHighlight, isSearchResult, esDelete, handleEvent, initializeES, getCache, resetCache, esStatus, esIndexingProgressState, progressRecorderRef, }; }, [userID, esStatus, esIndexingProgressState, inputESCallbacks]); return esFunctions; }; export default useEncryptedSearch;
7,269
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/useEncryptedSearchIndexingProgress.ts
import { useRef, useState } from 'react'; import { useUser } from '@proton/components/hooks'; import { defaultESIndexingState as defaultESIndexingProgressState } from './constants'; import { estimateIndexingProgress } from './esHelpers'; import { ESIndexingState, RecordProgress } from './models'; /** * This hook provides helpers related to the progress of the ES indexing */ const useEncryptedSearchIndexingProgress = () => { const [user] = useUser(); /** * State to display progress indicator in the UI */ const [esIndexingProgressState, setESIndexingProgressState] = useState<ESIndexingState>(defaultESIndexingProgressState); /** * Last record progress to compare with new one */ const progressRecorderRef = useRef<[number, number]>([0, 0]); /** * Last record timestamp to compare with new one */ const recordTimestampRef = useRef<number | null>(null); const recordProgress: RecordProgress = async (newProgress, indexedDbRow) => { const [prevProgress, totalItems] = progressRecorderRef.current; const prevRecordTimestamp = recordTimestampRef.current; const currentRecordTimestamp = performance.now(); progressRecorderRef.current = Array.isArray(newProgress) ? newProgress : [newProgress, totalItems]; recordTimestampRef.current = currentRecordTimestamp; const [currentProgress] = progressRecorderRef.current; if (prevRecordTimestamp && prevProgress) { const estimationResult = await estimateIndexingProgress( user.ID, totalItems, prevProgress, prevRecordTimestamp, currentProgress, currentRecordTimestamp, indexedDbRow ); if (!estimationResult) { return; } const { estimatedMinutes, currentProgressValue } = estimationResult; setESIndexingProgressState((prev) => { return { ...prev, esProgress: currentProgress, estimatedMinutes, currentProgressValue, totalIndexingItems: totalItems, }; }); } }; return { esIndexingProgressState, progressRecorderRef, recordProgress }; }; export default useEncryptedSearchIndexingProgress;
7,270
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/useEncryptedSearchStatus.ts
import { useEffect, useState } from 'react'; import { DecryptedKey } from '@proton/shared/lib/interfaces'; import { INDEXING_STATUS, defaultESStatus } from './constants'; import { getIndexKey } from './esHelpers'; import { checkVersionedESDB, contentIndexingProgress, metadataIndexingProgress, readEnabled, readLimited, } from './esIDB'; import { ESCache, ESStatus } from './models'; /** * @returns a tuple composed of both the _esStatus_ and a setter for it */ export const useEncryptedSearchStatus = <ESItemMetadata extends Object, ESSearchParameters, ESItemContent = void>({ esCacheRef, getUserKeys, userID, }: { esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>; getUserKeys: () => Promise<DecryptedKey[]>; userID: string; }): [ ESStatus<ESItemMetadata, ESItemContent, ESSearchParameters>, React.Dispatch<React.SetStateAction<ESStatus<ESItemMetadata, ESItemContent, ESSearchParameters>>>, ] => { const [esStatus, setESStatus] = useState<ESStatus<ESItemMetadata, ESItemContent, ESSearchParameters>>(defaultESStatus); useEffect(() => { const initEsStatus = async () => { try { const esdbExists = await checkVersionedESDB(userID); if (esdbExists) { const indexKey = await getIndexKey(getUserKeys, userID); const esEnabled = await readEnabled(userID); const isDBLimited = await readLimited(userID); const metadataIndexingProgressState = await metadataIndexingProgress.read(userID); const contentIndexingProgressState = await contentIndexingProgress.read(userID); setESStatus((esStatus) => ({ ...esStatus, cachedIndexKey: indexKey, esEnabled: esEnabled ?? false, isDBLimited: isDBLimited ?? false, isMetadataIndexingPaused: metadataIndexingProgressState?.status === INDEXING_STATUS.PAUSED, isContentIndexingPaused: contentIndexingProgressState?.status === INDEXING_STATUS.PAUSED, })); } } catch (error) { console.warn('an error occurred on init es status', error); } /** * We need to set those variables whether we have already existing esdb or not */ setESStatus((esStatus) => ({ ...esStatus, isConfigFromESDBLoaded: true, getCacheStatus: () => ({ isCacheReady: esCacheRef.current.isCacheReady, isCacheLimited: esCacheRef.current.isCacheLimited, }), })); }; void initEsStatus(); }, []); return [esStatus, setESStatus]; };
7,271
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esAPI.ts
import { getIsOfflineError, getIsTimeoutError, isNotExistError } from '@proton/shared/lib/api/helpers/apiErrorHelper'; import { METRICS_LOG, SECOND } from '@proton/shared/lib/constants'; import { randomDelay, sendMetricsReport } from '@proton/shared/lib/helpers/metrics'; import { wait } from '@proton/shared/lib/helpers/promise'; import { captureMessage } from '@proton/shared/lib/helpers/sentry'; import { Api } from '@proton/shared/lib/interfaces'; import { ES_MAX_RETRIES, ES_TEMPORARY_ERRORS } from '../constants'; import { contentIndexingProgress, readNumMetadata, readSize } from '../esIDB'; import { ESIndexMetrics, ESSearchMetrics } from '../models'; import { estimateIndexingDuration } from './esBuild'; /** * Helper to send ES-related sentry reports * @param errorMessage the error message that will appear in the title of the log * @param extra any other contextual information that will be attached to the log */ export const esSentryReport = (errorMessage: string, extra?: any) => { captureMessage(`[EncryptedSearch] ${errorMessage}`, { extra }); }; /** * Helper to run api calls for ES. They have the following properties: * - they are transparent to users, i.e. silence is set to true; * - they have low priority to avoid jailing, i.e. the Priority header is set to u=7; * - in case of temporary failuers (i.e. codes 408, 429, 502, 503, NetworkError or TimeoutError) * they are retried (using the retry-after header if present); * - in case of permanent failures (i.e. none of the above), a sentry report is sent with * the EncryptedSearch tag; * - in case a user ID is provided, which should be only during indexing, a blob is stored in * local storage to store the timestamp of a correctly indexed batch of items to estimate indexing time. * @param api callback to send api requests * @param signal abort signal to interrupt requests * @param options the payload and route of the api request * @param callingContext contextual information on the caller of this helper. It is used only to * include in sentry reports in case of permanent errors * @param retries the number of times the same call has already been retried */ export const apiHelper = async <T>( api: Api, signal: AbortSignal | undefined, options: Object, callingContext: string, retries: number = 1 ): Promise<T | undefined> => { if (signal?.aborted) { return; } let apiResponse: T; try { apiResponse = await api<T>({ ...options, silence: true, headers: { Priority: 'u=7' }, signal, }); } catch (error: any) { const isUnknownError = !getIsOfflineError(error) && !getIsTimeoutError(error) && error.name !== 'NetworkError' && error.message !== 'Failed to fetch' && error.message !== 'Load failed' && !ES_TEMPORARY_ERRORS.includes(error.status) && error.message !== 'Operation aborted' && error.name !== 'AbortError'; if (isUnknownError) { esSentryReport(`apiHelper: ${callingContext}`, { error }); } if (retries >= ES_MAX_RETRIES || isNotExistError(error)) { return; } const retryAfterSeconds = parseInt(error.response?.headers?.get('retry-after') || '5', 10); await wait(retryAfterSeconds * SECOND); return apiHelper<T>(api, signal, options, callingContext, retries + 1); } return apiResponse; }; /** * Send metrics about encrypted search */ type SendESMetrics = { (api: Api, Title: 'index', Data: ESIndexMetrics): Promise<void>; (api: Api, Title: 'search', Data: ESSearchMetrics): Promise<void>; }; const sendESMetrics: SendESMetrics = async (api, Title, Data) => sendMetricsReport(api, METRICS_LOG.ENCRYPTED_SEARCH, Title, Data); /** * Send metrics about the indexing process (only for Mail) * TODO: move to Mail and convert into generic callback for useEncryptedSearch */ export const sendIndexingMetricsForMail = async (api: Api, userID: string) => { const progressBlob = await contentIndexingProgress.read(userID); if (!progressBlob) { return; } const { totalItems, isRefreshed, numPauses, timestamps, originalEstimate } = progressBlob; const { indexTime, totalInterruptions } = estimateIndexingDuration(timestamps); const indexSize = (await readSize(userID)) || 0; return sendESMetrics(api, 'index', { numInterruptions: totalInterruptions - numPauses, indexSize, originalEstimate, indexTime, // Note: the metrics dashboard expects a variable called "numMessagesIndexed" but // it doesn't make too much sense in general to talk about "messages" numMessagesIndexed: totalItems, isRefreshed, numPauses, }); }; /** * Send metrics about a single encrypted search */ export const sendSearchingMetrics = async ( api: Api, userID: string, cacheSize: number, searchTime: number, isFirstSearch: boolean, isCacheLimited: boolean ) => { // Note: the metrics dashboard expects a variable called "numMessagesIndexed" but // it doesn't make too much sense in general to talk about "messages" const numMessagesIndexed = await readNumMetadata(userID); if (typeof numMessagesIndexed === 'undefined') { // If this is undefined, something went wrong when accessing IDB, // therefore it makes little sense to send metrics return; } const indexSize = (await readSize(userID)) || 0; return sendESMetrics(api, 'search', { indexSize, numMessagesIndexed, cacheSize, searchTime, isFirstSearch, isCacheLimited, }); }; /** * Send a sentry report for when ES is too slow * @param userID the user ID */ export const sendSlowSearchReport = async (userID: string) => { const numItemsIndexed = await readNumMetadata(userID); await randomDelay(); esSentryReport('Search is taking too long', { numItemsIndexed }); };
7,272
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esBuild.ts
import { IDBPDatabase } from 'idb'; import { CryptoProxy } from '@proton/crypto'; import runInQueue from '@proton/shared/lib/helpers/runInQueue'; import isTruthy from '@proton/utils/isTruthy'; import { AesKeyGenParams, ES_BACKGROUND_CONCURRENT, ES_MAX_CONCURRENT, ES_MAX_PARALLEL_ITEMS, INDEXING_STATUS, KeyUsages, STORING_OUTCOME, TIMESTAMP_TYPE, defaultESProgress, } from '../constants'; import { contentIndexingProgress, createESDB, executeContentOperations, executeMetadataOperations, initializeConfig, metadataIndexingProgress, openESDB, readIndexKey, readLimited, readMetadataBatch, readSortedIDs, setLimited, wrappedGetOldestInfo, writeAllEvents, } from '../esIDB'; import { AesGcmCiphertext, ESCache, ESItemInfo, ESProgress, ESTimepoint, EncryptedItemWithInfo, EncryptedSearchDB, EventsObject, GetItemInfo, GetUserKeys, InternalESCallbacks, } from '../models'; import { esSentryReport } from './esAPI'; import { sizeOfESItem } from './esCache'; import { isObjectEmpty } from './esUtils'; /** * Execute the initial steps of a new metadata indexing, i.e. generating an index key and the DB itself */ export const initializeEncryptedSearch = async ( userID: string, getUserKeys: GetUserKeys, previousEventIDs: EventsObject, isRefreshed: boolean, totalItems: number ) => { let esDB: IDBPDatabase<EncryptedSearchDB>; let indexKey: CryptoKey; try { esDB = await createESDB(userID); indexKey = await crypto.subtle.generateKey(AesKeyGenParams, true, KeyUsages); } catch (error: any) { // In case IndexedDB cannot be initialised, or something is wrong with the index key, // we still want to continue the indexing process without any permanent // storage but only keeping the cache in memory return { indexKey: undefined, esDB: undefined }; } // The index key is encrypted using the primary user key, the resulting ciphertext // is kept in binary form since IndexedDB allows this format const userKeysList = await getUserKeys(); const primaryUserKey = userKeysList[0]; const keyToEncrypt = await crypto.subtle.exportKey('jwk', indexKey); const { message: encryptedKey } = await CryptoProxy.encryptMessage({ textData: JSON.stringify(keyToEncrypt), encryptionKeys: [primaryUserKey.publicKey], signingKeys: [primaryUserKey.privateKey], }); const initialProgress: ESProgress = { ...defaultESProgress, totalItems, isRefreshed, status: INDEXING_STATUS.INDEXING, }; await initializeConfig(userID, encryptedKey); await writeAllEvents(userID, previousEventIDs); await metadataIndexingProgress.write(userID, initialProgress); return { indexKey, esDB }; }; /** * Decrypt the given encrypted index key. */ export const decryptIndexKey = async (getUserKeys: GetUserKeys, encryptedKey: string) => { const userKeysList = await getUserKeys(); const decryptionResult = await CryptoProxy.decryptMessage({ armoredMessage: encryptedKey, verificationKeys: userKeysList.map(({ publicKey }) => publicKey), decryptionKeys: userKeysList.map(({ privateKey }) => privateKey), }); const { data: decryptedKey } = decryptionResult; const importedKey = await crypto.subtle.importKey( 'jwk', JSON.parse(decryptedKey), { name: AesKeyGenParams.name }, true, KeyUsages ); if ((importedKey as CryptoKey).algorithm) { return importedKey; } throw new Error('Importing key failed'); }; /** * Retrieve and decrypt the index key. Return undefined if something goes wrong * or if there is no key. */ export const getIndexKey = async (getUserKeys: GetUserKeys, userID: string) => { try { const encrypted = await readIndexKey(userID); if (!encrypted) { throw new Error('Reading index key error'); } return await decryptIndexKey(getUserKeys, encrypted); } catch (error: any) { esSentryReport('getIndexKey', { error }); } }; /** * Create the encrypted object to store in IndexedDB */ export const encryptItem = async (itemToStore: Object, indexKey: CryptoKey): Promise<AesGcmCiphertext> => { const itemToEncrypt = JSON.stringify(itemToStore); const textEncoder = new TextEncoder(); const iv = new Uint8Array(12); crypto.getRandomValues(iv); const ciphertext = await crypto.subtle.encrypt( { iv, name: AesKeyGenParams.name }, indexKey, textEncoder.encode(itemToEncrypt) ); return { ciphertext, iv }; }; /** * Store one batch of items metadata to IndexedDB */ export const storeItemsMetadata = async <ESItemMetadata extends Object>( userID: string, resultMetadata: ESItemMetadata[], esSupported: boolean, indexKey: CryptoKey | undefined, getItemInfo: GetItemInfo<ESItemMetadata>, esCacheRef?: React.MutableRefObject<ESCache<ESItemMetadata, unknown>> ) => { const batchSize = resultMetadata.reduce((sum, item) => sum + sizeOfESItem(item), 0); // If either indexKey or esDB are undefined, we still want to index all metadata // and store them in cache only if (esSupported && indexKey) { const itemsToAdd: EncryptedItemWithInfo[] = await Promise.all( resultMetadata.map(async (itemToStore) => ({ ID: getItemInfo(itemToStore).ID, timepoint: getItemInfo(itemToStore).timepoint, aesGcmCiphertext: await encryptItem(itemToStore, indexKey), })) ); await executeMetadataOperations(userID, [], itemsToAdd); } else if (esCacheRef) { resultMetadata.forEach((metadataItem) => { esCacheRef.current.esCache.set(getItemInfo(metadataItem).ID, { metadata: metadataItem }); }); esCacheRef.current.cacheSize += batchSize; } return true; }; /** * Start metadata indexing * @returns true when process is gracefully stopped (or paused) */ export const buildMetadataDB = async <ESItemMetadata extends Object>({ userID, esSupported, indexKey, esCacheRef, queryItemsMetadata, getItemInfo, abortIndexingRef, recordProgress, isInitialIndexing = true, isBackgroundIndexing, }: { userID: string; esSupported: boolean; indexKey: CryptoKey | undefined; esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, unknown>>; queryItemsMetadata: InternalESCallbacks<ESItemMetadata, unknown>['queryItemsMetadata']; getItemInfo: GetItemInfo<ESItemMetadata>; abortIndexingRef: React.MutableRefObject<AbortController>; recordProgress: () => Promise<void>; isInitialIndexing?: boolean; isBackgroundIndexing?: boolean; }) => { if (isInitialIndexing) { await metadataIndexingProgress.addTimestamp(userID, TIMESTAMP_TYPE.START); } let { resultMetadata, setRecoveryPoint } = await queryItemsMetadata( abortIndexingRef.current.signal, isBackgroundIndexing ); // If it's undefined, it means an error occurred if (!resultMetadata) { return false; } while (resultMetadata.length) { const success = await storeItemsMetadata<ESItemMetadata>( userID, resultMetadata, esSupported, indexKey, getItemInfo, esCacheRef ).catch((error: any) => { if ( !(error.message && error.message === 'Operation aborted') && !(error.name && error.name === 'AbortError') ) { esSentryReport('storeItemsBatches: storeItems', { error }); } return false; }); if (!success) { return false; } await recordProgress(); if (setRecoveryPoint) { await setRecoveryPoint(); } /** * If process gets aborted, we shut it down right before next fetch batch */ if (abortIndexingRef.current.signal.aborted) { return true; } ({ resultMetadata, setRecoveryPoint } = await queryItemsMetadata( abortIndexingRef.current.signal, isBackgroundIndexing )); if (!resultMetadata) { return false; } } if (!esSupported) { esCacheRef.current.isCacheReady = true; } return true; }; /** * Add content to an existing metadata DB */ export const buildContentDB = async <ESItemContent>( userID: string, indexKey: CryptoKey, abortIndexingRef: React.MutableRefObject<AbortController>, recordProgress: (progress: number) => void, fetchESItemContent: Required<InternalESCallbacks<unknown, unknown, ESItemContent>>['fetchESItemContent'], inputrecoveryPoint: ESTimepoint | undefined, isInitialIndexing: boolean = true, isBackgroundIndexing?: boolean ): Promise<STORING_OUTCOME> => { let counter = 0; if (isInitialIndexing) { await contentIndexingProgress.addTimestamp(userID, TIMESTAMP_TYPE.START); } let abortFetching = new AbortController(); const esIteratee = async ( ID: string, timepoint: ESTimepoint ): Promise<{ ID: string; timepoint: ESTimepoint; aesGcmCiphertext?: AesGcmCiphertext; }> => { // In case any other parallel executions of this function fails, abortFetching // is triggered such that all others stop as well if (abortIndexingRef.current.signal.aborted || abortFetching.signal.aborted) { throw new Error('Operation aborted'); } const itemToStore = await fetchESItemContent(ID, abortIndexingRef.current.signal); if (!itemToStore) { throw new Error('Item fetching failed'); } if (isObjectEmpty(itemToStore)) { // If decryption fails, we want to anyway count the item recordProgress(++counter); return { ID, timepoint }; } const aesGcmCiphertext = await encryptItem(itemToStore, indexKey); recordProgress(++counter); return { ID, timepoint, aesGcmCiphertext }; }; let indexingOutcome = STORING_OUTCOME.SUCCESS; let sortedIDs = await readSortedIDs(userID, true, inputrecoveryPoint); if (!sortedIDs) { throw new Error('IDB caching cannot read sorted IDs'); } /** * If we are recovering, e.g. after a password reset, we don't * want to re-index content items that are already present in IDB * * TODO: improve decryption error correction to handle cases like: * - user refreshes browser during correction (remaining undecrypted item won't be processed) */ if (!isInitialIndexing) { const esDB = await openESDB(userID); if (!esDB) { throw new Error('ESDB not available during content indexing'); } const maybeSortedIDsWithoutContent = await Promise.all( sortedIDs.map(async (ID) => ((await esDB.count('content', ID)) === 1 ? undefined : ID)) ); sortedIDs = maybeSortedIDsWithoutContent.filter(isTruthy); esDB.close(); } let recoveryPoint: ESItemInfo | undefined; let IDs = sortedIDs.slice(0, ES_MAX_PARALLEL_ITEMS); while (IDs.length) { if (abortIndexingRef.current.signal.aborted) { return STORING_OUTCOME.FAILURE; } const infoMap = new Map( await readMetadataBatch(userID, IDs).then((encryptedMetadata) => { if (!encryptedMetadata) { return; } return encryptedMetadata .map((metadata): [string, ESTimepoint] | undefined => !!metadata ? [metadata.ID, metadata.timepoint] : undefined ) .filter(isTruthy); }) ); if (infoMap.size !== IDs.length) { throw new Error('Metadata not available to index content'); } // In case any of the parallel execution fails, we want all other to stop but still // retain the outcome of those which had succeeded in order to index at least those and // to set the recovery point accordingly let fetchingFailure = false; const maxProcessing = isBackgroundIndexing ? ES_BACKGROUND_CONCURRENT : ES_MAX_CONCURRENT; const encryptedContent = await runInQueue( IDs.map( // eslint-disable-next-line @typescript-eslint/no-loop-func (ID) => () => esIteratee(ID, infoMap.get(ID)!).catch(() => { fetchingFailure = true; abortFetching.abort(); }) ), maxProcessing ); // Later fetches can finish later than earlier ones. To be on the safe side we consider as // valid anything before the first undefined is found, but only in case of a failure (because // legitimate undefined can exist) const firstUndefined = fetchingFailure ? encryptedContent.indexOf(undefined) : -1; const itemsToAdd = ( firstUndefined === -1 ? encryptedContent : encryptedContent.slice(0, firstUndefined) ).filter(isTruthy); if (itemsToAdd.length) { const last = itemsToAdd[itemsToAdd.length - 1]; recoveryPoint = { ID: last.ID, timepoint: last.timepoint }; const storingOutcome = await executeContentOperations( userID, [], itemsToAdd.filter((item): item is EncryptedItemWithInfo => !!item.aesGcmCiphertext) ); if (storingOutcome === STORING_OUTCOME.SUCCESS) { // In case the batch was successfully stored, we keep on with the following batch if (isInitialIndexing) { await contentIndexingProgress.setRecoveryPoint(userID, recoveryPoint.timepoint); } } else if (storingOutcome === STORING_OUTCOME.QUOTA) { // If we have reached the quota, we need to stop indexing indexingOutcome = STORING_OUTCOME.QUOTA; break; } } if (abortIndexingRef.current.signal.aborted) { return STORING_OUTCOME.FAILURE; } abortFetching = new AbortController(); if (isInitialIndexing) { await contentIndexingProgress.addTimestamp(userID); } const index = !!recoveryPoint ? sortedIDs.indexOf(recoveryPoint.ID) + 1 : 0; IDs = sortedIDs.slice(index, index + ES_MAX_PARALLEL_ITEMS); } return indexingOutcome; }; /** * In case IDB is limited, trigger a new content indexing by using * the oldest indexed content as recovery point. This is done in case * items are removed after a syncing operation, therefore we might have * some space left to index more content */ export const retryContentIndexing = async <ESItemMetadata, ESSearchParameters, ESItemContent>( userID: string, indexKey: CryptoKey, esCallbacks: InternalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent>, abortIndexingRef: React.MutableRefObject<AbortController> ) => { const isDBLimited = await readLimited(userID); if (!isDBLimited) { return; } const itemInfo = await wrappedGetOldestInfo(userID); if (!itemInfo) { return; } const { fetchESItemContent } = esCallbacks; if (!fetchESItemContent) { return; } const storingOutcome = await buildContentDB<ESItemContent>( userID, indexKey, abortIndexingRef, () => {}, fetchESItemContent, itemInfo.timepoint, false ); // In case we have recovered, we set the flag accordingly if (storingOutcome === STORING_OUTCOME.SUCCESS) { await setLimited(userID, false); } }; /** * Compute the total indexing time based on locally cached timestamps */ export const estimateIndexingDuration = ( timestamps: { type: TIMESTAMP_TYPE; time: number; }[] ) => { let indexTime = 0; let totalInterruptions = 0; for (let index = 0; index < timestamps.length - 1; index++) { const [timestamp1, timestamp2] = timestamps.slice(index, index + 2); if (timestamp1.type !== TIMESTAMP_TYPE.STOP && timestamp2.type !== TIMESTAMP_TYPE.START) { indexTime += timestamp2.time - timestamp1.time; } else if (timestamp1.type !== TIMESTAMP_TYPE.STOP || timestamp2.type !== TIMESTAMP_TYPE.STOP) { totalInterruptions++; } } return { indexTime, totalInterruptions }; };
7,273
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esCache.ts
import { ES_MAX_CACHE, ES_MAX_ITEMS_PER_BATCH } from '../constants'; import { readContentBatch, readMetadataBatch, readSortedIDs } from '../esIDB'; import { CachedItem, ESCache, ESTimepoint, GetItemInfo } from '../models'; import { decryptFromDB } from './esSearch'; import { isTimepointSmaller } from './esUtils'; /** * Estimate the size of a ESItem object in memory */ export const sizeOfESItem = (value: any): number => { if (typeof value === 'boolean') { return 4; } else if (typeof value === 'string') { return value.length * 2; } else if (typeof value === 'number') { return 8; } else if (Array.isArray(value)) { return value.map(sizeOfESItem).reduce((p, c) => p + c, 0); } else if (value === null) { // This is to avoid the "typeof null === 'object'" bug return 0; } else if (typeof value === 'object') { // Note that object keys are ignored as this function is already an // over-estimate of the actual memory footprint return sizeOfESItem(Object.values(value)); } // Only 'undefined' type should reach this point return 0; }; /** * Cache both content and metadata at once */ export const cacheIDB = async <ESItemMetadata, ESItemContent>( indexKey: CryptoKey, userID: string, esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, checkpoint?: ESTimepoint ) => { esCacheRef.current.isCacheReady = false; const sortedIDs = await readSortedIDs(userID, true, checkpoint); if (!sortedIDs) { throw new Error('IDB caching cannot read sorted IDs'); } // In case IDB is empty, there is nothing to cache if (!sortedIDs.length) { esCacheRef.current.isCacheReady = true; return; } for (let i = 0; i < sortedIDs.length; i += ES_MAX_ITEMS_PER_BATCH) { const IDs = sortedIDs.slice(i, i + ES_MAX_ITEMS_PER_BATCH); const [metadata, content] = await Promise.all([readMetadataBatch(userID, IDs), readContentBatch(userID, IDs)]); if (!metadata || !content) { throw new Error('IDB caching failed to get data'); } const data = await Promise.all( metadata.map(async (encryptedMetadata, index) => { if (!encryptedMetadata) { return; } const encryptedContent = content[index]; const [plaintextMetadata, plaintextContent] = await Promise.all([ decryptFromDB<ESItemMetadata>(encryptedMetadata.aesGcmCiphertext, indexKey), !!encryptedContent ? decryptFromDB<ESItemContent>(encryptedContent, indexKey) : undefined, ]); return { ID: encryptedMetadata.ID, metadata: plaintextMetadata, content: plaintextContent }; }) ); data.forEach((dataPoint) => { if (!dataPoint || esCacheRef.current.cacheSize >= ES_MAX_CACHE) { return; } const { ID, metadata, content } = dataPoint; esCacheRef.current.esCache.set(ID, { metadata, content, }); esCacheRef.current.cacheSize += sizeOfESItem(metadata) + sizeOfESItem(content); if (esCacheRef.current.cacheSize >= ES_MAX_CACHE) { esCacheRef.current.isCacheLimited = true; } }); } esCacheRef.current.isCacheReady = true; }; /** * Remove a single item from cache. If contentOnly is true, * the item's metadata is kept, otherwise it is completely * removed from cache. It returns the size of the removed item * (or portion thereof) */ export const removeFromESCache = <ESItemMetadata, ESItemContent>( itemID: string, esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, contentOnly: boolean ) => { let size = 0; const item = esCacheRef.current.esCache.get(itemID); if (!item) { return size; } if (contentOnly) { esCacheRef.current.esCache.set(itemID, { metadata: item.metadata }); size = sizeOfESItem(item.content); } else { esCacheRef.current.esCache.delete(itemID); size = sizeOfESItem(item); } esCacheRef.current.cacheSize -= size; }; /** * Return the oldest cached item, which is the last one since * the cache is in reverse chronological order */ const getOldestCachedItem = <ESItemMetadata, ESItemContent>( esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>> ): CachedItem<ESItemMetadata, ESItemContent> | undefined => { if (!esCacheRef.current.isCacheReady) { return; } const values = [...esCacheRef.current.esCache.values()]; return values.pop(); }; /** * Return the oldest cached item's timepoint, which is the last one since * the cache is in reverse chronological order */ export const getOldestCachedTimepoint = <ESItemMetadata>( esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, unknown>>, getItemInfo: GetItemInfo<ESItemMetadata> ) => { const oldestItem = getOldestCachedItem(esCacheRef); if (!oldestItem) { return; } return getItemInfo(oldestItem.metadata).timepoint; }; /** * Remove items to make room for the content of the * given one */ const freeCacheSpace = <ESItemMetadata, ESItemContent>( esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, getItemInfo: GetItemInfo<ESItemMetadata>, oldestItem: CachedItem<ESItemMetadata, ESItemContent> | undefined, itemSize: number ) => { while (oldestItem && esCacheRef.current.esCache.size + itemSize >= ES_MAX_CACHE) { removeFromESCache(getItemInfo(oldestItem.metadata).ID, esCacheRef, true); oldestItem = getOldestCachedItem(esCacheRef); } }; /** * Return the most recent cached item */ const getMostRecentCachedItem = <ESItemMetadata, ESItemContent>( esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>> ): CachedItem<ESItemMetadata, ESItemContent> | undefined => { if (!esCacheRef.current.isCacheReady) { return; } const values = [...esCacheRef.current.esCache.values()]; return values.shift(); }; /** * Restructure the cache in such a way that the order of insertion * correspond to the reverse chronological order of items */ const reorderCache = <ESItemMetadata, ESItemContent>( esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, getItemInfo: GetItemInfo<ESItemMetadata> ) => { const entries = [...esCacheRef.current.esCache.entries()]; entries.sort(([, i1], [, i2]) => isTimepointSmaller(getItemInfo(i1.metadata).timepoint, getItemInfo(i2.metadata).timepoint) ? 1 : -1 ); esCacheRef.current.esCache.clear(); entries.forEach(([ID, value]) => { esCacheRef.current.esCache.set(ID, value); }); }; /** * Add a single item to cache, depending on whether the size limit has been reached or not */ export const addToESCache = <ESItemMetadata, ESItemContent>( inputItem: CachedItem<ESItemMetadata, ESItemContent>, esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, getItemInfo: GetItemInfo<ESItemMetadata> ) => { if (!esCacheRef.current.esCache.size && !esCacheRef.current.isCacheReady) { return; } const itemSize = sizeOfESItem(inputItem); let wereItemsRemoved = false; if (esCacheRef.current.isCacheLimited || esCacheRef.current.cacheSize + itemSize >= ES_MAX_CACHE) { // The oldest item is needed as a reference to decide whether to include the given one or not const oldestItem = getOldestCachedItem(esCacheRef); if ( oldestItem && isTimepointSmaller(getItemInfo(inputItem.metadata).timepoint, getItemInfo(oldestItem.metadata).timepoint) ) { return; } freeCacheSpace<ESItemMetadata, ESItemContent>(esCacheRef, getItemInfo, oldestItem, itemSize); wereItemsRemoved = true; } // In case the item already existed and this is just updating it, we want to first remove the size // of the existing version. In case it doesn't exist this variable will simply be 0 const previousItemSize = sizeOfESItem(esCacheRef.current.esCache.get(getItemInfo(inputItem.metadata).ID)); esCacheRef.current.esCache.set(getItemInfo(inputItem.metadata).ID, inputItem); esCacheRef.current.cacheSize += itemSize - previousItemSize; esCacheRef.current.isCacheLimited ||= wereItemsRemoved; // If the item to be added is not newer than the most recent item in // cache, the whole cache needs to be rebuilt to keep the reverse // chronological order of the cache (which is a map, therefore insertion // order matters) const mostRecentMetadata = getMostRecentCachedItem(esCacheRef)?.metadata; if ( mostRecentMetadata && isTimepointSmaller(getItemInfo(inputItem.metadata).timepoint, getItemInfo(mostRecentMetadata).timepoint) ) { return reorderCache(esCacheRef, getItemInfo); } }; /** * Add more content to a limited cache in case many were removed */ export const refreshESCache = async <ESItemMetadata, ESItemContent>( indexKey: CryptoKey, userID: string, esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, getItemInfo: GetItemInfo<ESItemMetadata> ) => { const { cacheSize, isCacheReady, isCacheLimited } = esCacheRef.current; // Perform this operation only if there is space left in cache but not all items are cached, and if the initial // caching operation had succeeded if (cacheSize < ES_MAX_CACHE && isCacheLimited && isCacheReady) { const oldestItem = getOldestCachedItem(esCacheRef); return cacheIDB<ESItemMetadata, ESItemContent>( indexKey, userID, esCacheRef, !!oldestItem ? getItemInfo(oldestItem.metadata).timepoint : undefined ); } };
7,274
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esHighlight.tsx
import { ReactNode } from 'react'; import { parseStringToDOM } from '@proton/shared/lib/helpers/dom'; import { DIACRITICS_REGEXP, ES_MAX_INITIAL_CHARS } from '../constants'; import { HighlightMetadata } from '../models'; import { esSentryReport } from './esAPI'; import { normalizeString } from './esUtils'; /** * Removes overlapping intervals to highlight */ const sanitisePositions = (positions: [number, number][]) => { if (positions.length < 2) { return positions; } positions.sort((position1, position2) => position1[0] - position2[0]); const result = []; let previousValue = positions[0]; for (let i = 1; i < positions.length; i += 1) { if (previousValue[1] >= positions[i][0]) { previousValue = [previousValue[0], Math.max(previousValue[1], positions[i][1])]; } else { result.push(previousValue); previousValue = positions[i]; } } result.push(previousValue); return result; }; /** * List the indexes of all ligatures, as well as to how many * characters they split to after a NFKD transformation */ const findLigatures = (text: string, normalNFKD: string): [number, number][] => { const normalNFD = normalizeString(text, 'NFD'); if (normalNFD.length === normalNFKD.length) { return []; } const result: [number, number][] = []; for (let i = 0; i < normalNFD.length; i++) { if (!normalNFKD.includes(normalNFD[i], i)) { result.push([i, normalizeString(normalNFD[i], 'NFKD').length]); } } return result; }; /** * List the indexes of all diacritics */ const findDiacritics = (text: string) => Array.from(text.matchAll(DIACRITICS_REGEXP), (match) => match.index!); /** * Convert an index from the diacritics-free NFD string to the original one */ const nfdToText = (diacritics: number[], index: number) => { for (let d = 0; d < diacritics.length; d++) { if (index >= diacritics[d]) { index++; } else { break; } } return index; }; /** * Convert an index from the decomposed NFKD string to the potentially * more compact NFD representation, both diacritics-free */ const nfkdToNFD = (ligatures: [number, number][], index: number, isEnd: boolean) => { for (let l = 0; l < ligatures.length; l++) { if (index > ligatures[l][0]) { if (index >= ligatures[l][0] + ligatures[l][1]) { index -= ligatures[l][1] - 1; } else { return ligatures[l][0] + (isEnd ? 1 : 0); } } else { break; } } return index; }; /** * Convert a start index from the diacritics-free NFKD string to the original one */ const updateStartIndex = (diacritics: number[], ligatures: [number, number][], index: number) => nfdToText(diacritics, nfkdToNFD(ligatures, index, false)); /** * Convert an end index from the diacritics-free NFKD string to the original one */ const updateEndIndex = (diacritics: number[], ligatures: [number, number][], index: number) => nfdToText(diacritics, nfkdToNFD(ligatures, index, true)); /** * Find occurrences of the given keywords in the raw text. It returns a list * of couples where the first element is the index where the match starts, while * the second is the index where the match ends (inclusive). Indexes refer to the * input raw text. Matching intervals are sanitised in such a way that they are * non-overlapping */ const findOccurrences = (rawText: string, normalizedKeywords: string[]) => { const searchText = normalizeString(rawText); const diacritics = findDiacritics(rawText); const ligatures = findLigatures(rawText, searchText); const positions: [number, number][] = []; for (const keyword of normalizedKeywords) { let finder = 0; let startFrom = 0; while (finder !== -1) { finder = searchText.indexOf(keyword, startFrom); if (finder !== -1) { // Each occurrence is represented as a [start, end) couple // of indexes, i.e. the start is inclusive while the end is not positions.push([ updateStartIndex(diacritics, ligatures, finder), updateEndIndex(diacritics, ligatures, finder + keyword.length), ]); startFrom = finder + keyword.length; } } } // These represent the start indexes and end indexes (inclusive) // of all text to highlight, with reference to the index numbering // of the raw text return sanitisePositions(positions); }; /** * Check whether the current node is not visibile, either because it's * a script or style node, or because of its CSS properties */ const isInvisibileNode = (node: Node) => { const { nodeName, nodeType } = node; const isHidden = nodeType === Node.ELEMENT_NODE ? (node as HTMLElement).style.display === 'none' || (node as HTMLElement).style.visibility === 'hidden' : false; return nodeName === 'STYLE' || nodeName === 'SCRIPT' || isHidden; }; /** * Insert the highlighting HTML tags inside a parsed HTML document */ const insertMarksKeywords = (html: Document, normalizedKeywords: string[]) => { const rawText = (html.body.textContent || '').toLocaleLowerCase(); const sanitisedPositions = findOccurrences(rawText, normalizedKeywords); // If no occurrences are found, simply don't insert anything if (!sanitisedPositions.length) { return; } // Stack to perform a DFS of the DOM tree const tempStack: Node[] = [html.body]; // Stack to keep track of nodes in reverse order const nodeStack: Node[] = []; // Map to keep track of invisibile nodes' positions in the // node stack and their text content's length const tempInvisibileNodes: Map<number, number> = new Map(); // Post-order tree traversal while (tempStack.length) { const node = tempStack.shift(); if (!node) { continue; } nodeStack.unshift(node); // Invisible nodes are added to the stack such that // their text content length can be accounted for while // traversing the tree, but their children are not added // to the node stack, otherwise they will be visited if (isInvisibileNode(node)) { tempInvisibileNodes.set(nodeStack.length, (node.textContent || '').length); } else if (node.childNodes.length) { tempStack.unshift(...node.childNodes); } } // Re-adjust invisibile nodes' indexes based on the number of unshifts const invisibileNodes: Map<number, number> = new Map(); tempInvisibileNodes.forEach((value, key) => { invisibileNodes.set(nodeStack.length - key, value); }); // Initialisation of the range to highlight the keyword // even if it spans multiple tags. It has to be reset at // every instance of the keyword const range = html.createRange(); // Counter to keep track of how many characters have been // "visisted", i.e. how far along the textContet we are, // in reverse order let charCount = rawText.length; const getNumContainedIndexes = (nodeLength: number, charCount: number) => sanitisedPositions.filter( ([startIndex, endIndex]) => // Whether the match starts in the node (startIndex >= charCount && startIndex <= charCount + nodeLength) || // Whether the match ends in the node (endIndex >= charCount && endIndex <= charCount + nodeLength) || // Edge case: whether the node is fully contained within the match (charCount >= startIndex && charCount <= endIndex && charCount + nodeLength >= startIndex && charCount + nodeLength <= endIndex) ); const insertMarkTag = (range: Range) => { const markNode = document.createElement('mark'); markNode.setAttribute('class', 'proton-search-highlight'); markNode.appendChild(range.extractContents()); range.insertNode(markNode); }; for (let n = 0; n < nodeStack.length; n++) { const node = nodeStack[n]; const { nodeName, textContent } = node; // We don't want to accidentally insert mark tags inside invisible nodes, // yet the char count needs to be updated for consistency const invisibleLength = invisibileNodes.get(n); if (invisibleLength) { charCount -= invisibleLength; continue; } if (nodeName === '#text' && !!textContent) { // We set charCount to be the index of the beginning of the current // textual node const nodeLength = textContent.length; charCount -= nodeLength; // We check how many instances of the keyword start in the current node // and then we process them in reverse order const containedIndexes = getNumContainedIndexes(nodeLength, charCount).reverse(); for (let i = 0; i < containedIndexes.length; i++) { const [startIndex, endIndex] = containedIndexes[i]; // If the node is fully contained in the mark, we simply highlight it all if ( containedIndexes.length == 1 && charCount >= startIndex && charCount <= endIndex && charCount + nodeLength >= startIndex && charCount + nodeLength <= endIndex ) { range.setStart(node, 0); range.setEnd(node, nodeLength); insertMarkTag(range); continue; } // Since we traverse the node in reverse order, we set the end // of the range first if (charCount + nodeLength > endIndex) { range.setEnd(node, endIndex - charCount); // If this is the last position in the array of indexes, // i.e. it's the first instance in the node, and the instance // crosses the tag, we close the range at the start of the node. // Even though ranges can handle crossing element boundaries, // extracting such ranges can cause unexpected results // (e.g. when it splits a paragraph in two). if (i === containedIndexes.length - 1 && startIndex < charCount) { range.setStart(node, 0); insertMarkTag(range); } } // Since we traverse the node in reverse order, we set the start // of the range second if (startIndex >= charCount) { range.setStart(node, startIndex - charCount); // If this is the first position in the array of indexes, // i.e. it's the last instance in the node, and the instance // crosses the tag, we open the range at this instane and close it at the end of the node. // Even though ranges can handle crossing element boundaries, // extracting such ranges can cause unexpected results // (e.g. when it splits a paragraph in two). if (i === 0 && endIndex >= charCount + nodeLength) { range.setEnd(node, nodeLength); } insertMarkTag(range); } } } } }; /** * Insert the highlighting HTML tags inside an HTML document given as a string */ export const insertMarks = (htmlContent: string, normalizedKeywords: string[], setAutoScroll: boolean) => { const html = parseStringToDOM(htmlContent); try { insertMarksKeywords(html, normalizedKeywords); } catch (error: any) { // There could be potential edge cases in the highlighting. In the // worst case we want to fail gracefully and only highlight partially void esSentryReport('insertMarksKeywords', { error }); } if (setAutoScroll) { const marks = html.body.getElementsByClassName('proton-search-highlight'); marks.item(0)?.setAttribute('data-auto-scroll', 'true'); } return html.documentElement.outerHTML; }; /** * Creates an element containing the highlighted email metadata */ export const highlightJSX = (metadata: string, keywords: string[], isBold: boolean = false, trim: boolean = false) => { const sanitisedPositions = findOccurrences(metadata, keywords); let previousIndex = 0; return { numOccurrences: sanitisedPositions.length, resultJSX: !sanitisedPositions.length ? ( <span>{metadata}</span> ) : ( <span> {sanitisedPositions.map((position, index) => { const oldPreviousIndex = previousIndex; previousIndex = position[1]; // Find where to trim and avoid breaking words const estimatedStartIndex = Math.max(0, position[0] - ES_MAX_INITIAL_CHARS); let exactStartIndex = estimatedStartIndex; if (estimatedStartIndex !== 0) { const firstSpaceIndex = metadata.slice(estimatedStartIndex).indexOf(' '); if (firstSpaceIndex !== -1) { // We add 1 to account for the position of the space, // which we won't show at the beginning of the trimmed // sentence exactStartIndex += firstSpaceIndex + 1; } } const startingSlice = index === 0 && trim ? `${exactStartIndex !== 0 ? '…' : ''}${metadata.slice(exactStartIndex, position[0])}` : metadata.slice(oldPreviousIndex, position[0]); return ( <span key={index} // eslint-disable-line react/no-array-index-key > {startingSlice} <mark className={`${isBold ? 'text-bold' : ''}`}> {metadata.slice(position[0], position[1])} </mark> {index === sanitisedPositions.length - 1 ? metadata.slice(sanitisedPositions[sanitisedPositions.length - 1][1]) : null} </span> ); })} </span> ), }; }; /** * Insert highlighting markers only if a ReactNode is a string or can be parsed as such * @param node the react node in which highlight has to be inserted * @param highlightMetadata the callback to the highlightMetadata function returned by the * ES library * @returns the highlighted node */ export const highlightNode = (node: ReactNode, highlightMetadata: HighlightMetadata) => { const nodeValue = node?.valueOf(); if (typeof nodeValue === 'string') { return highlightMetadata(nodeValue).resultJSX; } if ( !!nodeValue && Object.prototype.isPrototypeOf.call(Object.prototype, nodeValue) && Object.prototype.hasOwnProperty.call(nodeValue, 'props') ) { const { props } = nodeValue as { props: any }; if ( Object.prototype.isPrototypeOf.call(Object.prototype, props) && Object.prototype.hasOwnProperty.call(props, 'children') ) { const { children } = props; if (Array.isArray(props.children) && children.every((child: any) => typeof child === 'string')) { return highlightMetadata(children.join('')).resultJSX; } } } return node; };
7,275
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esProgress.ts
import { ESProgress, estimateIndexingDuration, readSize } from '@proton/encrypted-search/lib'; import { MINUTE, SECOND } from '@proton/shared/lib/constants'; import { Unwrap } from '@proton/shared/lib/interfaces'; import { IndexedDBRow, getIndexingProgressQueryHelpers } from '../esIDB'; /** * Compute the estimated time remaining of indexing * @param userID the user ID * @param esTotal the total number of items to be indexed * @param prevProgress the number of indexed items before last iteration * @param prevRecordTimestamp the timestamp of the last iteration * @param currentProgress the number of indexed items before current iteration * @param currentRecordTimestamp the timestamp of the current iteration * @param esIndexingState the IDB row in which to store estimation * @returns the estimated time to completion (in minutes) and the current progress * expressed as a number between 0 and 100 */ export const estimateIndexingProgress = async ( userID: string, esTotal: number, prevProgress: number, prevRecordTimestamp: number, currentProgress: number, currentRecordTimestamp: number, indexedDBRow?: IndexedDBRow ) => { if (esTotal !== 0 && currentProgress !== prevProgress && currentRecordTimestamp !== prevRecordTimestamp) { const remainingItems = esTotal - currentProgress; const processDuration = currentRecordTimestamp - prevRecordTimestamp; const progressDelta = currentProgress - prevProgress; const estimatedMs = Math.ceil((processDuration / progressDelta) * remainingItems); const estimatedMinutes = Math.ceil(estimatedMs / MINUTE); if (indexedDBRow) { await getIndexingProgressQueryHelpers(indexedDBRow).setOriginalEstimate( userID, Math.ceil(estimatedMs / SECOND) ); } const ratioDone = currentProgress / esTotal; const currentProgressValue = Math.ceil(ratioDone * 100); return { estimatedMinutes, currentProgressValue }; } }; const produceIndexingMetrics = async (userID: string, progressBlob: ESProgress) => { const { totalItems, isRefreshed, numPauses, timestamps, originalEstimate } = progressBlob; const { indexTime, totalInterruptions } = estimateIndexingDuration(timestamps); const indexSize = (await readSize(userID)) || 0; return { numInterruptions: totalInterruptions - numPauses, numPauses, indexSize, originalEstimate, indexTime, totalItems, isRefreshed, }; }; export const gatherIndexingMetrics = async (userID: string, row: IndexedDBRow) => { const indexingProgress = getIndexingProgressQueryHelpers(row); const progressBlob = await indexingProgress.read(userID); if (!progressBlob) { return; } return produceIndexingMetrics(userID, progressBlob); }; export type IndexingMetrics = Unwrap<ReturnType<typeof produceIndexingMetrics>>;
7,276
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esRetries.ts
import { add } from 'date-fns'; import { serverTime } from '@proton/crypto'; import { MINUTE } from '@proton/shared/lib/constants'; import isTruthy from '@proton/utils/isTruthy'; import { executeContentOperations, openESDB, readMetadataBatch, readRetries, setRetries } from '../esIDB'; import { EncryptedItemWithInfo, InternalESCallbacks, RetryObject } from '../models'; import { encryptItem } from './esBuild'; import { isObjectEmpty } from './esUtils'; /** * Increase the number of retries by one and set a new retryTime accordingly */ export const updateRetryObject = (retry: RetryObject): RetryObject => ({ retryTime: +serverTime() + 2 ** (retry.numberRetries + 1) * MINUTE, numberRetries: retry.numberRetries + 1, }); /** * Add a new item ID to the list of retries */ export const addRetry = async (userID: string, retryID: string) => { const retryMap = await readRetries(userID); const retryObject = retryMap.get(retryID); const now = +serverTime(); if (!!retryObject) { const { retryTime } = retryObject; if (retryTime < now) { retryMap.set(retryID, updateRetryObject(retryObject)); } } else { const defaultRetryObject: RetryObject = { retryTime: now, numberRetries: 0 }; retryMap.set(retryID, defaultRetryObject); } return setRetries(userID, retryMap); }; /** * Get all items to be retried. If an item has reached a * retry time of one year, we remove it from the list */ export const getRetries = async (userID: string) => { const retryMap = await readRetries(userID); for (const [retryID, { retryTime }] of retryMap) { if (retryTime > +add(serverTime(), { years: 1 })) { retryMap.delete(retryID); } } await setRetries(userID, retryMap); return retryMap; }; /** * Retry previously failed API calls */ export const retryAPICalls = async <ESItemContent>( userID: string, indexKey: CryptoKey, fetchESItemContent?: InternalESCallbacks<unknown, unknown, ESItemContent>['fetchESItemContent'] ) => { const retryMap = await getRetries(userID); if (!retryMap.size || !fetchESItemContent) { return; } const now = +serverTime(); const esDB = await openESDB(userID); if (!esDB) { return; } const contentToAdd: EncryptedItemWithInfo[] = []; const IDs = [...retryMap.keys()]; const metadataMap = new Map( await readMetadataBatch(userID, IDs).then((metadata) => { if (!metadata) { return; } return metadata.map((encryptedMetadata, index) => { if (!encryptedMetadata) { return [IDs[index], undefined]; } return [encryptedMetadata.ID, encryptedMetadata.timepoint]; }); }) ); if (!metadataMap.size) { return; } const arrayMap = Array.from(retryMap); const newArrayMap = ( await Promise.all( arrayMap.map(async ([ID, retryObject]): Promise<[string, RetryObject] | undefined> => { if (retryObject.retryTime > now) { return [ID, retryObject]; } const item = await fetchESItemContent(ID); const timepoint = metadataMap.get(ID); if (item && !isObjectEmpty(item) && timepoint) { try { const aesGcmCiphertext = await encryptItem(item, indexKey); contentToAdd.push({ ID, timepoint, aesGcmCiphertext, }); return; } catch (error: any) { // We store it back as if it failed fetching } } return [ID, updateRetryObject(retryObject)]; }) ) ).filter(isTruthy); await executeContentOperations(userID, [], contentToAdd); return setRetries(userID, newArrayMap); };
7,277
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esSearch.ts
import { wait } from '@proton/shared/lib/helpers/promise'; import isTruthy from '@proton/utils/isTruthy'; import { AesKeyGenParams, ES_EXTRA_RESULTS_LIMIT, ES_MAX_ITEMS_PER_BATCH } from '../constants'; import { readContentBatch, readMetadataBatch, readSortedIDs } from '../esIDB'; import { AesGcmCiphertext, CachedItem, ESCache, ESItem, ESTimepoint, GetItemInfo, GetUserKeys, InternalESCallbacks, } from '../models'; import { getIndexKey } from './esBuild'; import { cacheIDB, getOldestCachedTimepoint } from './esCache'; import { normalizeString, replaceApostrophes, replaceQuotes } from './esUtils'; /** * Process the string input by the user in the searchbar by performing the following * transformations: * - trims whitespaces from the input string; * - removes diacritics; * - turn unusual quotes into normal ones, that can then be searched to split sentences; * - turn unusual apostrophes into normal ones; * - casts to locale lower case; * - splits the input string in multiple keywords if separated by whitespace, unless * it's within quotes * @param keyword the string as input by users in the searchbar * @returns the array of normalised keywords to be searched */ export const normalizeKeyword = (keyword: string) => { const trimmedKeyword = replaceApostrophes(replaceQuotes(normalizeString(keyword))); const quotesIndexes: number[] = []; let index = 0; while (index !== -1) { index = trimmedKeyword.indexOf(`"`, index); if (index !== -1) { quotesIndexes.push(index); index++; } } const normalizedKeywords: string[] = []; let previousIndex = -1; for (let index = 0; index < quotesIndexes.length; index++) { const keyword = trimmedKeyword.slice(previousIndex + 1, quotesIndexes[index]); if (index % 2 === 1) { // If the user placed quotes, we want to keep everything inside as a single block normalizedKeywords.push(keyword); } else { // Otherwise we split by whitespace normalizedKeywords.push(...keyword.split(' ')); } previousIndex = quotesIndexes[index]; } normalizedKeywords.push(...trimmedKeyword.slice(quotesIndexes[quotesIndexes.length - 1] + 1).split(' ')); return normalizedKeywords.filter(isTruthy); }; /** * Check if all given keywords are in any of the given strings. In other words, all given * keywords should be included in at least one of the searched strings * @param normalizedKeywords keywords to search * @param stringsToSearch string to be searched * @returns whether all keywords can be found in at least one given string */ export const testKeywords = (normalizedKeywords: string[], stringsToSearch: string[], hasApostrophe: boolean) => { const normalizedStrings = stringsToSearch.map((str) => normalizeString(hasApostrophe ? replaceApostrophes(str) : str) ); let result = true; let index = 0; while (result && index !== normalizedKeywords.length) { const keyword = normalizedKeywords[index]; result = result && normalizedStrings.some((string) => string.includes(keyword)); index++; } return result; }; /** * Combine both metadata and content search, the latter only if available */ export const applySearch = <ESItemMetadata, ESItemContent, ESSearchParameters>( esSearchParams: ESSearchParameters, item: CachedItem<ESItemMetadata, ESItemContent>, hasApostrophe: boolean, esCallbacks: InternalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent> ) => { const { applyFilters, searchKeywords, getKeywords } = esCallbacks; const filters = applyFilters(esSearchParams, item.metadata); const keywords = getKeywords(esSearchParams); if (!filters || !keywords) { return filters; } return searchKeywords(keywords, item, hasApostrophe); }; /** * Decrypt encrypted object from IndexedDB */ export const decryptFromDB = async <Plaintext>( aesGcmCiphertext: AesGcmCiphertext, indexKey: CryptoKey ): Promise<Plaintext> => { const textDecoder = new TextDecoder(); const decryptedMessage: ArrayBuffer = await crypto.subtle.decrypt( { iv: aesGcmCiphertext.iv, name: AesKeyGenParams.name }, indexKey, aesGcmCiphertext.ciphertext ); return JSON.parse(textDecoder.decode(new Uint8Array(decryptedMessage))); }; /** * Perform an uncached search, i.e. with data being retrieved directly from IDB */ export const uncachedSearch = async <ESItemMetadata, ESItemContent, ESSearchParameters>( userID: string, indexKey: CryptoKey, esSearchParams: ESSearchParameters, esCallbacks: InternalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent>, lastTimePoint: ESTimepoint | undefined, itemLimit: number, hasApostrophe: boolean, setIncrementalResults?: (newResults: ESItem<ESItemMetadata, ESItemContent>[]) => void, abortSearchingRef?: React.MutableRefObject<AbortController> ): Promise<{ resultsArray: ESItem<ESItemMetadata, ESItemContent>[]; newLastTimePoint: ESTimepoint | undefined }> => { const { getItemInfo, checkIsReverse } = esCallbacks; const resultsArray: ESItem<ESItemMetadata, ESItemContent>[] = []; let newLastTimePoint = lastTimePoint; let remainingItems = itemLimit; let previousLenght = 0; const isReverse = checkIsReverse(esSearchParams); const remainingIDs = await readSortedIDs(userID, isReverse, newLastTimePoint); if (!remainingIDs) { return { resultsArray, newLastTimePoint }; } for (let i = 0; i < remainingIDs.length; i += ES_MAX_ITEMS_PER_BATCH) { const IDs = remainingIDs.slice(i, i + ES_MAX_ITEMS_PER_BATCH); const [metadata, content] = await Promise.all([readMetadataBatch(userID, IDs), readContentBatch(userID, IDs)]); if (!metadata || !content || (abortSearchingRef && abortSearchingRef.current.signal.aborted)) { return { resultsArray, newLastTimePoint }; } const data = await Promise.all( metadata.map(async (encryptedMetadata, index) => { if (abortSearchingRef && abortSearchingRef.current.signal.aborted) { return; } if (!encryptedMetadata) { return; } const encryptedContent = content[index]; const [plaintextMetadata, plaintextContent] = await Promise.all([ decryptFromDB<ESItemMetadata>(encryptedMetadata.aesGcmCiphertext, indexKey), !!encryptedContent ? decryptFromDB<ESItemContent>(encryptedContent, indexKey) : undefined, ]); return { metadata: plaintextMetadata, content: plaintextContent }; }) ); if (abortSearchingRef && abortSearchingRef.current.signal.aborted) { return { resultsArray, newLastTimePoint }; } // eslint-disable-next-line @typescript-eslint/no-loop-func data.forEach((item) => { if (!item || remainingItems === 0 || (abortSearchingRef && abortSearchingRef.current.signal.aborted)) { return; } if ( applySearch<ESItemMetadata, ESItemContent, ESSearchParameters>( esSearchParams, item, hasApostrophe, esCallbacks ) ) { newLastTimePoint = getItemInfo(item.metadata).timepoint; resultsArray.push({ ...item.metadata, ...item.content }); remainingItems--; } }); // In case the callback to show new search results while searching was given // and there are new search results in the current batch, show them if (setIncrementalResults && resultsArray.length > previousLenght) { previousLenght = resultsArray.length; setIncrementalResults(resultsArray); } } return { resultsArray, newLastTimePoint }; }; /** * Perform a cached search, i.e. over cached items only, potentially over a partial * cache, i.e. still being built, therefore we need to keep track of how many * items were searched */ const cachedSearch = <ESItemMetadata, ESItemContent, ESSearchParameters>( iterator: IterableIterator<CachedItem<ESItemMetadata, ESItemContent>>, esSearchParams: ESSearchParameters, abortSearchingRef: React.MutableRefObject<AbortController>, hasApostrophe: boolean, esCallbacks: InternalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent> ) => { const searchResults: ESItem<ESItemMetadata, ESItemContent>[] = []; let iteration = iterator.next(); let iterationCount = 0; while (!iteration.done) { if (abortSearchingRef.current.signal.aborted) { break; } if (applySearch(esSearchParams, iteration.value, hasApostrophe, esCallbacks)) { searchResults.push({ ...iteration.value.metadata, ...iteration.value.content }); } iterationCount += 1; iteration = iterator.next(); } return { searchResults, iterationCount }; }; /** * Based on the time boundaries of a search and the time span of a cache, check whether any more * items from IDB are needed. This is done because even if the cache is limited, i.e. does not fully * contain the whole IDB, its time span might already cover any user selected time window. It should * also return the first time point from where to start the uncached search and potentially adjusted * search parameters that take into account the new time boundaries */ const checkCacheTimespan = <ESItemMetadata, ESItemContent>( esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, getItemInfo: GetItemInfo<ESItemMetadata>, searchTimeInterval: { begin: number | undefined; end: number | undefined; } ): { shouldKeepSearching: boolean; lastTimePoint?: ESTimepoint } => { const oldestCachedTimepoint = getOldestCachedTimepoint<ESItemMetadata>(esCacheRef, getItemInfo); if (!oldestCachedTimepoint) { return { shouldKeepSearching: true, }; } const [startCache, Order] = oldestCachedTimepoint; const { begin, end } = searchTimeInterval; const beginOrder = Order; const intervalEnd = Math.min(startCache, end || Number.MAX_SAFE_INTEGER); const intervalStart = begin || 0; const shouldKeepSearching = intervalStart < startCache; return { shouldKeepSearching, lastTimePoint: [intervalEnd, beginOrder], }; }; /** * Perform a search by switching between cached and uncached search when necessary */ export const hybridSearch = async <ESItemMetadata, ESItemContent, ESSearchParameters>( esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, esSearchParams: ESSearchParameters, cachedIndexKey: CryptoKey | undefined, getUserKeys: GetUserKeys, userID: string, setResultsList: (Elements: ESItem<ESItemMetadata, ESItemContent>[]) => void, abortSearchingRef: React.MutableRefObject<AbortController>, esCallbacks: InternalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent>, minimumItems: number | undefined ) => { const { checkIsReverse, getItemInfo, getSearchInterval, getKeywords } = esCallbacks; let searchResults: ESItem<ESItemMetadata, ESItemContent>[] = []; let isSearchPartial = false; const isReverse = checkIsReverse(esSearchParams); const hasApostrophe = (getKeywords(esSearchParams) || []).some((keyword) => keyword.includes(`'`)); // Caching needs to be triggered here for when a refresh happens on a search URL if (!esCacheRef.current.isCacheReady && esCacheRef.current.esCache.size === 0) { const indexKey = cachedIndexKey || (await getIndexKey(getUserKeys, userID)); if (!indexKey) { throw new Error('Key not found'); } void cacheIDB<ESItemMetadata, ESItemContent>(indexKey, userID, esCacheRef); } // Items in cache are the most recent ones, therefore if the cache is not ready and full and the search // is in descending order, we cannot use cached items if (isReverse || (esCacheRef.current.isCacheReady && !esCacheRef.current.isCacheLimited)) { // We have to wait for the cache to contain at least one message, because if it is empty the iterator // will be exhausted immediately and will not loop over newly inserted messages when they'll come in while (!esCacheRef.current.isCacheReady && esCacheRef.current.esCache.size === 0) { await wait(200); } /** Get current cache length */ let searchedItemsCount = 0; /** Perform search on cache */ const { searchResults: cachedSearchResults, iterationCount } = cachedSearch< ESItemMetadata, ESItemContent, ESSearchParameters >(esCacheRef.current.esCache.values(), esSearchParams, abortSearchingRef, hasApostrophe, esCallbacks); searchResults = cachedSearchResults; searchedItemsCount += iterationCount; // The first batch of results (if any) are shown only if the cache is still being built, or if it has finished // but it's limited. Otherwise we want to show all results at the end if (searchResults.length !== 0 && (!esCacheRef.current.isCacheReady || esCacheRef.current.isCacheLimited)) { setResultsList(searchResults); } /** * Incremental search * Start incremental search if cache is not ready */ if (!esCacheRef.current.isCacheReady) { while (true) { if (abortSearchingRef.current.signal.aborted) { return { searchResults, isSearchPartial, }; } const cacheIsReadyBeforeSearch = esCacheRef.current.isCacheReady; const searchCacheValues = esCacheRef.current.esCache.values(); // Go where we were at last iteration for (let i = 0; i < searchedItemsCount; i++) { searchCacheValues.next(); } // Search over newly cached items const { searchResults: cachedSearchResults, iterationCount } = cachedSearch< ESItemMetadata, ESItemContent, ESSearchParameters >(searchCacheValues, esSearchParams, abortSearchingRef, hasApostrophe, esCallbacks); searchedItemsCount += iterationCount; // Increment search result and execute callback if (cachedSearchResults.length) { searchResults.push(...cachedSearchResults); setResultsList(searchResults); } // If cache was ready before starting search, we did the last search iteration needed. if (cacheIsReadyBeforeSearch) { break; } // Or wait until it becomes ready await wait(200); } } // Once caching has terminated, if the cache turns out to be not limited, we stop searching if (!esCacheRef.current.isCacheLimited || abortSearchingRef.current.signal.aborted) { return { searchResults, isSearchPartial, }; } // If enough items to fill two pages were already found, we don't continue the search if (searchResults.length >= 2 * ES_EXTRA_RESULTS_LIMIT || abortSearchingRef.current.signal.aborted) { // The last item in cache is assumed to be the oldest const lastTimePoint = getOldestCachedTimepoint<ESItemMetadata>(esCacheRef, getItemInfo); return { searchResults, isSearchPartial: true, lastTimePoint, }; } } let shouldKeepSearching = !abortSearchingRef.current.signal.aborted; let lastTimePoint: ESTimepoint | undefined; isSearchPartial = true; // If the cache hasn't been searched because the order is ascending, the search // parameters shouldn't be influenced by the cache timespan if (isReverse) { // The remaining items are searched from DB, but only if the indicated timespan // hasn't been already covered by cache. The cache is ordered such that the last item is the oldest ({ shouldKeepSearching, lastTimePoint } = checkCacheTimespan<ESItemMetadata, ESItemContent>( esCacheRef, getItemInfo, getSearchInterval(esSearchParams) )); } const remainingItems = Math.max(2 * ES_EXTRA_RESULTS_LIMIT - searchResults.length, minimumItems || 0); if (shouldKeepSearching && remainingItems > 0) { const setIncrementalResults = (newResults: ESItem<ESItemMetadata, ESItemContent>[]) => { setResultsList(searchResults.concat(newResults)); }; const indexKey = cachedIndexKey || (await getIndexKey(getUserKeys, userID)); if (!indexKey) { throw new Error('Key not found'); } const { resultsArray, newLastTimePoint } = await uncachedSearch< ESItemMetadata, ESItemContent, ESSearchParameters >( userID, indexKey, esSearchParams, esCallbacks, lastTimePoint, remainingItems, hasApostrophe, setIncrementalResults, abortSearchingRef ); searchResults.push(...resultsArray); isSearchPartial = !!newLastTimePoint; lastTimePoint = newLastTimePoint; } return { searchResults, isSearchPartial, lastTimePoint }; };
7,278
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esSync.ts
import { IDBPDatabase } from 'idb'; import chunk from '@proton/utils/chunk'; import { ES_MAX_PARALLEL_ITEMS, ES_SYNC_ACTIONS, STORING_OUTCOME } from '../constants'; import { executeContentOperations, executeMetadataOperations, openESDB, readLimited, setLimited } from '../esIDB'; import { CachedItem, ESCache, ESItem, ESItemEvent, EncryptedItemWithInfo, EncryptedSearchDB, InternalESCallbacks, } from '../models'; import { encryptItem } from './esBuild'; import { addToESCache, removeFromESCache } from './esCache'; import { addRetry } from './esRetries'; import { applySearch } from './esSearch'; import { findItemIndex, isObjectEmpty } from './esUtils'; const prefetchContentToSync = async <ESItemMetadata extends object, ESItemContent = void>( itemEventsBatch: ESItemEvent<ESItemMetadata>[], fetchESItemContent?: (itemID: string, signal?: AbortSignal | undefined) => Promise<ESItemContent | undefined> ) => { /** * We speed up item syncing by first fetching in parallel all items that are * required and then syncing them all. In case fetchESItemContent is not defined or * no content is required, then the metadata contained in the itemEvents suffice * to sync the items */ const prefetchedContent: Map<string, ESItemContent | undefined> = new Map(); if (fetchESItemContent) { await Promise.all( itemEventsBatch.map(async (itemEvent) => { const { ID, Action } = itemEvent; if (Action === ES_SYNC_ACTIONS.CREATE || Action === ES_SYNC_ACTIONS.UPDATE_CONTENT) { prefetchedContent.set(ID, await fetchESItemContent(ID, undefined)); } }) ); } return prefetchedContent; }; /** * Synchronise IDB (and optionally cache and search results) with new ES events */ export const syncItemEvents = async <ESItemContent, ESItemMetadata extends Object, ESSearchParameters>( Items: ESItemEvent<ESItemMetadata>[], userID: string, esCacheRef: React.MutableRefObject<ESCache<ESItemMetadata, ESItemContent>>, permanentResults: ESItem<ESItemMetadata, ESItemContent>[], indexKey: CryptoKey | undefined, esSearchParams: ESSearchParameters | undefined, esCallbacks: InternalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent> ) => { const { getItemInfo, fetchESItemContent, onContentDeletion, getKeywords } = esCallbacks; let esDB: IDBPDatabase<EncryptedSearchDB> | undefined; if (!!indexKey) { esDB = await openESDB(userID); } const permanentStorage = !!esDB && !!indexKey; const useContent = !!fetchESItemContent; esDB?.close(); /** * In case something happens while displaying search results, this function keeps * the results in sync live (e.g. by creating or removing items from the results) */ let searchChanged = false; const updatePermanentResults = ({ resultIndex = -1, itemToCache, }: { resultIndex?: number; itemToCache?: ESItem<ESItemMetadata, ESItemContent>; }) => { if (itemToCache) { if (resultIndex !== -1) { permanentResults.splice(resultIndex, 1, itemToCache); } else { permanentResults.push(itemToCache); } } else { permanentResults.splice(resultIndex as number, 1); } searchChanged = true; }; /** * Any interaction with IDB is postponed */ const metadataToRemove: string[] = []; const contentToRemove: string[] = []; const metadataToAdd: EncryptedItemWithInfo[] = []; const contentToAdd: EncryptedItemWithInfo[] = []; const chunks = chunk(Items, ES_MAX_PARALLEL_ITEMS); for (const chunk of chunks) { const prefetchedContent = await prefetchContentToSync(chunk, fetchESItemContent); for (const itemEvent of chunk) { const { ID, Action, ItemMetadata } = itemEvent; /** * If an item is deleted: * - queue it to remove it from IDB * - delete it from cache * - if results are being shown, delete it from there too */ if (Action === ES_SYNC_ACTIONS.DELETE) { metadataToRemove.push(ID); if (useContent) { if (permanentStorage) { await onContentDeletion(ID, indexKey); } contentToRemove.push(ID); } removeFromESCache<ESItemMetadata, ESItemContent>(ID, esCacheRef, false); const resultIndex = findItemIndex<ESItemMetadata>(ID, permanentResults, getItemInfo); if (!!esSearchParams && resultIndex !== -1) { updatePermanentResults({ resultIndex }); } } /** * For any other type of action, the metadata of the modified item should exist */ if (!ItemMetadata) { continue; } /** * If an item is created: * - queue it to add it to IDB * - add it to cache * - if results are being shown and the new term fulfills, add it there too */ if (Action === ES_SYNC_ACTIONS.CREATE) { const content = prefetchedContent.get(ID); if (useContent && permanentStorage && !content) { /** * If an error occurred while fetching, we ignore store the item's ID for later fetching of the content */ await addRetry(userID, ID); } const itemToCache: CachedItem<ESItemMetadata, ESItemContent> = { metadata: ItemMetadata, content, }; if (permanentStorage) { metadataToAdd.push({ ID, timepoint: getItemInfo(ItemMetadata).timepoint, aesGcmCiphertext: await encryptItem(itemToCache.metadata, indexKey), }); if (itemToCache.content && !isObjectEmpty(itemToCache.content)) { contentToAdd.push({ ID, timepoint: getItemInfo(ItemMetadata).timepoint, aesGcmCiphertext: await encryptItem(itemToCache.content, indexKey), }); } } addToESCache<ESItemMetadata, ESItemContent>(itemToCache, esCacheRef, getItemInfo); if (!!esSearchParams) { const hasApostrophe = (getKeywords(esSearchParams) || []).some((keyword) => keyword.includes(`'`)); if (applySearch(esSearchParams, itemToCache, hasApostrophe, esCallbacks)) { updatePermanentResults({ itemToCache: { ...itemToCache.metadata, ...itemToCache.content } }); } } } if (Action === ES_SYNC_ACTIONS.UPDATE_CONTENT || Action === ES_SYNC_ACTIONS.UPDATE_METADATA) { let newContent: ESItemContent | undefined; if (Action === ES_SYNC_ACTIONS.UPDATE_CONTENT) { newContent = prefetchedContent.get(ID); if (useContent && permanentStorage && !newContent) { /** * If an error occurred while fetching, we store the item's ID for later fetching of the content */ await addRetry(userID, ID); } } const itemToCache: CachedItem<ESItemMetadata, ESItemContent> = { metadata: ItemMetadata, content: newContent, }; if (permanentStorage) { /** * In case the action is only updating the metadata, we ignore updating the * size estimate since it's likely very similar or exactly the same as the old one */ metadataToAdd.push({ ID, timepoint: getItemInfo(ItemMetadata).timepoint, aesGcmCiphertext: await encryptItem(itemToCache.metadata, indexKey), keepSize: Action === ES_SYNC_ACTIONS.UPDATE_METADATA, }); if (itemToCache.content && !isObjectEmpty(itemToCache.content)) { contentToAdd.push({ ID, timepoint: getItemInfo(ItemMetadata).timepoint, aesGcmCiphertext: await encryptItem(itemToCache.content, indexKey), }); } } /** * If I only want to update metadata * AND item have no content * AND cached item has content * THEN I reassign cached content */ if (Action === ES_SYNC_ACTIONS.UPDATE_METADATA && !itemToCache.content) { const previousCachedItem = esCacheRef.current.esCache.get(getItemInfo(itemToCache.metadata).ID); if (!!previousCachedItem?.content) { itemToCache.content = previousCachedItem?.content; } } addToESCache<ESItemMetadata, ESItemContent>(itemToCache, esCacheRef, getItemInfo); /** * If results are being shown: * - if the old item was part of the search and the new one still is, update it; * - if the old item was part of the search and the new one shouldn't be, delete it; * - if the old item wasn't part of the search and the new one should be, add it; */ if (!!esSearchParams) { const hasApostrophe = (getKeywords(esSearchParams) || []).some((keyword) => keyword.includes(`'`)); const resultIndex = findItemIndex(ID, permanentResults, getItemInfo); if (resultIndex !== -1) { if ( applySearch<ESItemMetadata, ESItemContent, ESSearchParameters>( esSearchParams, itemToCache, hasApostrophe, esCallbacks ) ) { updatePermanentResults({ resultIndex, itemToCache: { ...itemToCache.metadata, ...itemToCache.content }, }); } else { updatePermanentResults({ resultIndex }); } } else if ( applySearch<ESItemMetadata, ESItemContent, ESSearchParameters>( esSearchParams, itemToCache, hasApostrophe, esCallbacks ) ) { updatePermanentResults({ itemToCache: { ...itemToCache.metadata, ...itemToCache.content } }); } } } } } if (permanentStorage) { const wasLimited = await readLimited(userID); // We assume IDB is limited and revert only if it's not if (!wasLimited) { await setLimited(userID, true); } const metadataOutcome = await executeMetadataOperations(userID, metadataToRemove, metadataToAdd); const contentOutcome = await executeContentOperations(userID, contentToRemove, contentToAdd); if (!wasLimited && metadataOutcome === STORING_OUTCOME.SUCCESS && contentOutcome === STORING_OUTCOME.SUCCESS) { await setLimited(userID, false); } } return searchChanged; };
7,279
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/esUtils.ts
import { EVENT_ACTIONS } from '@proton/shared/lib/constants'; import { removeItem } from '@proton/shared/lib/helpers/storage'; import { AddressEvent } from '@proton/shared/lib/interfaces'; import { APOSTROPHES_REGEXP, DIACRITICS_REGEXP, QUOTES_REGEXP } from '../constants'; import { AesGcmCiphertext, ESTimepoint, GetItemInfo } from '../models'; /** * Remove all ES blobs in local storage related to a user */ export const removeESFlags = (userID: string) => { Object.keys(window.localStorage).forEach((key) => { const chunks = key.split(':'); if (chunks[0] === 'ES' && chunks[1] === userID) { removeItem(key); } }); }; /** * Remove milliseconds from numeric value of a date */ export const roundMilliseconds = (time: number) => Math.floor(time / 1000); /** * Request storage persistence to prevent the ES database from being evicted */ export const requestPersistence = async () => { if (window.navigator.storage && window.navigator.storage.persist) { await window.navigator.storage.persist(); } }; /** * Remove diacritics and apply other transforms to the NFKD decomposed string */ export const normalizeString = (str: string, format: 'NFD' | 'NFKD' = 'NFKD') => str.toLocaleLowerCase().normalize(format).replace(DIACRITICS_REGEXP, ''); /** * Find the index of an item in an item array. Should return -1 if the index is not found */ export const findItemIndex = <ESItemMetadata>( itemID: string, itemArray: ESItemMetadata[], getItemInfo: GetItemInfo<ESItemMetadata> ) => itemArray.findIndex((item) => getItemInfo(item).ID === itemID); /** * Compare two timestamps and return whether the first one is smaller (i.e. older) * than the second one */ export const isTimepointSmaller = (t1: ESTimepoint, t2: ESTimepoint) => t1[0] < t2[0] || (t1[0] === t2[0] && t1[1] < t2[1]); /** * Verify that a given value is of type [number, number] */ export const isESTimepoint = (value: any): value is ESTimepoint => Array.isArray(value) && value.length === 2 && typeof value[0] === 'number' && typeof value[1] === 'number'; /** * Check whether an object contains no properties. This can happen if products * return e.g. a content object with undefined properties only */ export const isObjectEmpty = (object: Object) => JSON.stringify(object) === '{}'; /** * Size in bytes of a ciphertext */ export const ciphertextSize = (ciphertext: AesGcmCiphertext | undefined) => !ciphertext ? 0 : ciphertext.iv.length + ciphertext.ciphertext.byteLength; /** * Size in bytes of a batch of ciphertexts */ export const ciphertextBatchSize = (ciphertextBatch: (AesGcmCiphertext | undefined)[]) => ciphertextBatch.reduce((p, c) => p + ciphertextSize(c), 0); /** * Turn unusual quotes into normal ones, that can then be searched to split sentences */ export const replaceQuotes = (str: string) => str.replace(QUOTES_REGEXP, `"`); /** * turn unusual apostrophes into normal ones */ export const replaceApostrophes = (str: string) => str.replace(APOSTROPHES_REGEXP, `'`); /** * Returns true if one or more keys have been reactivated */ export const hasReactivatedKey = ({ AddressEvents, numAddresses, }: { AddressEvents?: AddressEvent[]; numAddresses: number; }) => { /** * `EVENT_ACTIONS.UPDATE` on AddressEvent can have several meaning: address key reactivation, address set as default * * However, only key reactivation affects all the addresses at once, that's why we check if the number of addresses * with this action matches the total nbr of addresses * * 3 (very) edge cases: * - when we have strictly 2 addresses and change the one set as default, both will have `EVENT_ACTIONS.UPDATE` * - when we reactivate a key for only a single address, this condition won't be matched then. * - if a key gets reactivated during the indexation, this condition will be matched, but the event will be consume and we won't correct undecrypted ones */ return ( !!AddressEvents && AddressEvents.filter(({ Action }) => Action === EVENT_ACTIONS.UPDATE).length === numAddresses ); };
7,280
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esHelpers/index.ts
export * from './esAPI'; export * from './esBuild'; export * from './esCache'; export * from './esHighlight'; export * from './esSearch'; export * from './esSync'; export * from './esUtils'; export * from './esRetries'; export * from './esProgress';
7,281
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esIDB/configObjectStore.ts
import { IDBPDatabase } from 'idb'; import noop from '@proton/utils/noop'; import { ConfigKeys, ConfigValues, EncryptedSearchDB, RetryObject } from '../models'; import { openESDB, safelyWriteToIDBAbsolutely } from './indexedDB'; /** * Initialize the config object store in IDB */ export const initializeConfig = async (userID: string, indexKey: string) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('config')) { return; } const tx = esDB.transaction('config', 'readwrite'); void tx.store.put(indexKey, 'indexKey'); void tx.store.put(0, 'size'); void tx.store.put(false, 'enabled'); void tx.store.put(false, 'limited'); await tx.done; esDB.close(); }; /** * Read a row in the config object store */ const readConfigProperty = async (userID: string, configID: ConfigKeys) => { const esDB = await openESDB(userID); if (!esDB) { return; } const result = await esDB.get('config', configID); esDB.close(); return result; }; /** * Read from the config table whether IDB was migrated after the latest * version upgrade, in case it had been created before. If so, the migrated * row contains some product-specific information to perform the migration. * Note that if no such row exists, it means IDB was created after the * migration and is therefore considered already migrated */ export const readMigrated = async (userID: string) => readConfigProperty(userID, 'migrated'); /** * Read the index key from the config table */ export const readIndexKey = async (userID: string): Promise<string | undefined> => readConfigProperty(userID, 'indexKey'); /** * Read the estimated size from the config table */ export const readSize = async (userID: string): Promise<number | undefined> => readConfigProperty(userID, 'size'); /** * Read whether ES in enabled from the config table */ export const readEnabled = async (userID: string): Promise<boolean | undefined> => readConfigProperty(userID, 'enabled'); /** * Read from the config table whether there wasn't enough disk space */ export const readLimited = async (userID: string): Promise<boolean | undefined> => readConfigProperty(userID, 'limited'); /** * Read from the config table which IDs to retry */ export const readRetries = async (userID: string): Promise<Map<string, RetryObject>> => { const retries = await readConfigProperty(userID, 'retries'); if (typeof retries === 'string') { return new Map(JSON.parse(retries)); } return new Map(); }; /** * Overwrites a row in the config object store */ const writeConfigProperty = async (userID: string, configID: ConfigKeys, value: ConfigValues[ConfigKeys]) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('config')) { return; } await safelyWriteToIDBAbsolutely(value, configID, 'config', esDB); esDB.close(); }; /** * Update the estimated size by a given amount in the config object store, * but without opening a new instance of ESDB */ export const updateSize = async (esDB: IDBPDatabase<EncryptedSearchDB>, sizeDelta: number) => { if (sizeDelta === 0) { return; } const oldSize: number | undefined = await esDB.get('config', 'size'); if (typeof oldSize === 'undefined') { return; } return safelyWriteToIDBAbsolutely(oldSize + sizeDelta, 'size', 'config', esDB); }; /** * Store whether IDB is limited in terms of number of content indexed */ export const setLimited = async (userID: string, isLimited: boolean) => writeConfigProperty(userID, 'limited', isLimited).catch(noop); /** * Switch value to the enabled property in the config object store */ export const toggleEnabled = async (userID: string) => { const oldEnabled = await readEnabled(userID); const newEnabled = !oldEnabled; return writeConfigProperty(userID, 'enabled', newEnabled).catch(noop); }; /** * Remove the migrated row once migration is done */ export const setMigrated = async (userID: string) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('config')) { return; } await esDB.delete('config', 'migrated'); esDB.close(); }; /** * Store IDs of items that failed to be fetched for later retry */ export const setRetries = async (userID: string, retries: Map<string, RetryObject> | [string, RetryObject][]) => { const arrayRetries = Array.from(retries); if (arrayRetries.length) { return writeConfigProperty(userID, 'retries', JSON.stringify(arrayRetries)).catch(noop); } const esDB = await openESDB(userID); if (!esDB) { return; } await esDB.delete('config', 'retries'); esDB.close(); };
7,282
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esIDB/content.ts
import { STORING_OUTCOME } from '../constants'; import { ciphertextSize, decryptFromDB } from '../esHelpers'; import { EncryptedItemWithInfo } from '../models'; import { updateSize } from './configObjectStore'; import { openESDB, safelyWriteToIDBConditionally } from './indexedDB'; /** * Get a decrypted content item from IndexedDB */ export const readContentItem = async <ESItemContent>(userID: string, itemID: string, indexKey: CryptoKey) => { const esDB = await openESDB(userID); if (!esDB) { return; } const aesGcmCiphertext = await esDB.get('content', itemID); esDB.close(); if (!aesGcmCiphertext) { return; } return decryptFromDB<ESItemContent>(aesGcmCiphertext, indexKey); }; /** * Fetch the number of items from the content table */ export const readNumContent = async (userID: string) => { const esDB = await openESDB(userID); if (!esDB) { return; } const count = await esDB.count('content'); esDB.close(); return count; }; /** * Read a batch of content items specified by their IDs */ export const readContentBatch = async (userID: string, IDs: string[]) => { const esDB = await openESDB(userID); if (!esDB) { return; } const count = await esDB.count('content'); if (count === 0) { esDB.close(); return []; } const tx = esDB.transaction('content', 'readonly'); const content = await Promise.all(IDs.map((ID) => tx.store.get(ID))); await tx.done; esDB.close(); return content; }; /** * Remove items from and write items to the content table of IDB. Note * that this function will throw if the IDB quota is exceeded, therefore * a check needs to happen in advance to verify all items to add do fit */ export const executeContentOperations = async ( userID: string, itemsToRemove: string[], itemsToAdd: EncryptedItemWithInfo[] ) => { const esDB = await openESDB(userID); if (!esDB) { return; } const tx = esDB.transaction('content', 'readwrite'); const removeSizes = await Promise.all( itemsToRemove.map((ID) => tx.store.get(ID).then((aesGcmCiphertext) => { void tx.store.delete(ID); return ciphertextSize(aesGcmCiphertext); }) ) ); await tx.done; await updateSize(esDB, -1 * removeSizes.reduce((p, c) => p + c, 0)); const storingOutcomes: STORING_OUTCOME[] = []; // Then all items to add are inserted for (const itemToAdd of itemsToAdd) { storingOutcomes.push(await safelyWriteToIDBConditionally(itemToAdd, 'content', esDB)); } esDB.close(); if (storingOutcomes.some((storingOutcome) => storingOutcome === STORING_OUTCOME.QUOTA)) { return STORING_OUTCOME.QUOTA; } return STORING_OUTCOME.SUCCESS; };
7,283
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esIDB/events.ts
import noop from '@proton/utils/noop'; import { EventsObject } from '../models'; import { openESDB, safelyWriteToIDBAbsolutely } from './indexedDB'; /** * Read all events ID in the events table */ export const readAllLastEvents = async (userID: string) => { const result: EventsObject = {}; const esDB = await openESDB(userID); if (!esDB) { return; } let cursor = await esDB.transaction('events').store.openCursor(); while (cursor) { const { key, value } = cursor; if (value) { result[key] = value; } cursor = await cursor.continue(); } esDB.close(); return result; }; /** * Read the last event ID in the events table for the given component */ export const readLastEvent = async (userID: string, componentID: string) => { const esDB = await openESDB(userID); if (!esDB) { return; } const result = esDB.get('events', componentID); esDB.close(); return result; }; /** * Write all event IDs for all the given components to the events table */ export const writeAllEvents = async (userID: string, eventsObject: EventsObject) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('events')) { return; } for (const componentID in eventsObject) { await safelyWriteToIDBAbsolutely(eventsObject[componentID], componentID, 'events', esDB).catch(noop); } esDB.close(); }; /** * Add an event loop's last event ID to IDB */ export const addLastEvent = async (userID: string, componentID: string, eventID: string) => { const eventsObject: EventsObject = {}; eventsObject[componentID] = eventID; return writeAllEvents(userID, eventsObject); }; /** * Remove an event loop's last event ID from IDB */ export const removeLastEvent = async (userID: string, componentID: string) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('events')) { return; } await esDB.delete('events', componentID); esDB.close(); };
7,284
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esIDB/index.ts
export * from './configObjectStore'; export * from './events'; export * from './indexedDB'; export * from './indexingProgress'; export * from './content'; export * from './metadata';
7,285
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esIDB/indexedDB.ts
import { IDBPDatabase, deleteDB, openDB } from 'idb'; import noop from '@proton/utils/noop'; import { INDEXEDDB_VERSION, STORING_OUTCOME } from '../constants'; import { ciphertextSize, esSentryReport, isTimepointSmaller, removeESFlags } from '../esHelpers'; import { AesGcmCiphertext, EncryptedItemWithInfo, EncryptedMetadataItem, EncryptedSearchDB } from '../models'; import { updateSize } from './configObjectStore'; import { getOldestID, getOldestInfo } from './metadata'; /** * Format the name of the ES database for the given user ID */ const getDBName = (userID: string) => `ES:${userID}:DB`; /** * Delete the given user's IDB */ export const deleteESDB = async (userID: string) => deleteDB(getDBName(userID)).catch(noop); /** * Open an existing IDB for the given user. If the DB hadn't already existed, * undefined is returned instead. WARNING: this function will delete an old * version of IDB if it exists */ export const openESDB = async (userID: string) => { let esDB: IDBPDatabase<EncryptedSearchDB> | undefined; try { let dbExisted = true; esDB = await openDB<EncryptedSearchDB>(getDBName(userID), INDEXEDDB_VERSION, { upgrade() { dbExisted = false; }, }); if (!dbExisted) { throw new Error('Attempting to opening a non-existent DB'); } } catch (error: any) { esDB?.close(); // Flags are removed from local storage in case this code // is called due to an update from an outdated version of IDB removeESFlags(userID); await deleteESDB(userID); return; } return esDB; }; /** * Check whether the current version of IDB exists. WARNING: this function * will delete an old version of IDB if it exists */ export const checkVersionedESDB = async (userID: string) => { const esDB = await openESDB(userID); const check = !!esDB; esDB?.close(); return check; }; /** * Create an up-to-date IDB for the given user */ export const createESDB = async (userID: string) => { // Remove the database first, in case there is an old stale version that // might arise when removing it and creating a new one immediately after await deleteESDB(userID); return openDB<EncryptedSearchDB>(getDBName(userID), INDEXEDDB_VERSION, { upgrade: (esDB) => { // The object store containing the content of items, indexed by their ID. // Out-of-line keys are used esDB.createObjectStore('content'); // The object store containing all metadata of items, indexed by their ID // In-line keys are used, defined by the ID property. A temporal index // is created as well const metadataOS = esDB.createObjectStore('metadata'); metadataOS.createIndex('temporal', 'timepoint', { unique: true, multiEntry: false }); // The config object store contains ES-wide values (e.g. the encrypted index key), // configuration (e.g. whether ES is enabled) and information (e.g. an estimate // of the size) esDB.createObjectStore('config'); // The events object store contains the last event ID according to which the index has // been updated for every component of the product esDB.createObjectStore('events'); // The indexingProgress object store contains metadata information on indexing. It always // will contain a 'metadata' row, for items metadata to either search those exclusively or // to enable ES for free users, as well as a row for content in case a product decides to // have any esDB.createObjectStore('indexingProgress'); }, }); }; /** * Delete the oldest item from ESDB, both from the metadata table and the content table */ const deleteOldestItem = async (ID: string, esDB: IDBPDatabase<EncryptedSearchDB>) => { let removeSize = 0; await Promise.all([ esDB.get('metadata', ID).then((item) => { removeSize += ciphertextSize(item?.aesGcmCiphertext); return esDB.delete('metadata', ID); }), esDB.get('content', ID).then((aesGcmCiphertext) => { removeSize += ciphertextSize(aesGcmCiphertext); return esDB.delete('content', ID); }), ]); return updateSize(esDB, -removeSize); }; /** * Return whether an item fetched from either the metadata table or the content table is of type AesGcmCiphertext */ const discriminateItem = (item: EncryptedMetadataItem | AesGcmCiphertext): item is AesGcmCiphertext => Object.hasOwn(item, 'iv'); /** * Compute the size of an item from either the metadata or content table */ const getItemSize = async (ID: string, storeName: 'metadata' | 'content', esDB: IDBPDatabase<EncryptedSearchDB>) => { const item = await esDB.get(storeName, ID); if (!item) { return 0; } if (discriminateItem(item)) { return ciphertextSize(item); } return ciphertextSize(item.aesGcmCiphertext); }; /** * Write to the ES IDB and manage the case of running out of disk space. * If we do run out of space we must remove the oldest item to make space */ export const safelyWriteToIDBConditionally = async ( value: EncryptedItemWithInfo, storeName: 'metadata' | 'content', esDB: IDBPDatabase<EncryptedSearchDB>, inputStoringOutcome?: STORING_OUTCOME ): Promise<STORING_OUTCOME> => { const valueToStore: EncryptedMetadataItem | AesGcmCiphertext = storeName === 'metadata' ? { aesGcmCiphertext: value.aesGcmCiphertext, timepoint: value.timepoint } : value.aesGcmCiphertext; try { await esDB.put(storeName, valueToStore, value.ID); // We always update the size if we are storing to the content table. // If we are storing to the metadata table, we do so only if the item // was flagged to update the size if (storeName === 'content' || !value.keepSize) { const oldSize = await getItemSize(value.ID, storeName, esDB); await updateSize(esDB, ciphertextSize(value.aesGcmCiphertext) - oldSize); } return inputStoringOutcome ?? STORING_OUTCOME.SUCCESS; } catch (error: any) { if (error.name === 'QuotaExceededError') { // We check wheter the present item is newer than the oldest one, // in which case we remove the latter to make space for the former const oldestItemInfo = await getOldestInfo(esDB); if (!oldestItemInfo) { // If there is no such oldest item, it means IDB is empty, // which is a rather peculiar state to throw a quota error esSentryReport('safelyWriteToIDBConditionally: quota reached with empty IDB', { error }); return STORING_OUTCOME.FAILURE; } if (isTimepointSmaller(value.timepoint, oldestItemInfo.timepoint)) { return STORING_OUTCOME.QUOTA; } await deleteOldestItem(oldestItemInfo.ID, esDB); return safelyWriteToIDBConditionally(value, storeName, esDB, STORING_OUTCOME.QUOTA); } else { // Any other error should be interpreted as a failure esSentryReport('safelyWriteToIDBConditionally: put failed', { error }); return STORING_OUTCOME.FAILURE; } } }; /** * Write to the ES IDB and always remove the oldest item to make space for this write * in case we run out of it */ export const safelyWriteToIDBAbsolutely = async ( value: any, key: string, storeName: 'config' | 'events' | 'indexingProgress', esDB: IDBPDatabase<EncryptedSearchDB> ): Promise<void> => { try { await esDB.put(storeName, value, key); } catch (error: any) { if (error.name === 'QuotaExceededError') { // Since the data we're trying to store to IDB has absolute // precedence over content, we simply remove the oldest item // and retry const oldestItemID = await getOldestID(esDB); if (!oldestItemID) { // If there is no such oldest item, it means IDB is empty, // which is a rather peculiar state to throw a quota error esSentryReport('safelyWriteToIDBAbsolutely: quota reached with empty IDB', { error }); throw error; } await deleteOldestItem(oldestItemID, esDB); return safelyWriteToIDBAbsolutely(value, key, storeName, esDB); } else { // Any other error should be interpreted as a failure esSentryReport('safelyWriteToIDBAbsolutely: put failed', { error }); throw error; } } };
7,286
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esIDB/indexingProgress.ts
import { INDEXING_STATUS, TIMESTAMP_TYPE, defaultESProgress } from '../constants'; import { roundMilliseconds } from '../esHelpers'; import { ESProgress } from '../models'; import { openESDB, safelyWriteToIDBAbsolutely } from './indexedDB'; export type IndexedDBRow = 'metadata' | 'content'; /** * Read the indexing progress of the given type from the indexingProgress table */ const read = async (userID: string, row: IndexedDBRow) => { const esDB = await openESDB(userID); if (!esDB) { return; } const result = await esDB.get('indexingProgress', row); esDB.close(); return result; }; /** * Read the recovery point of the given type from the indexingProgress table */ const readRecoveryPoint = async (userID: string, row: IndexedDBRow) => { const progress = await read(userID, row); if (!progress) { return; } return progress.recoveryPoint; }; /** * Write the indexing progress for metadata to the indexingProgress table */ const write = async (userID: string, progress: ESProgress, row: IndexedDBRow) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('indexingProgress')) { return; } await safelyWriteToIDBAbsolutely(progress, row, 'indexingProgress', esDB); esDB.close(); }; /** * Increment by one the number of times the user has paused indexing */ const incrementNumPauses = async (userID: string, row: IndexedDBRow) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('indexingProgress')) { return; } const progress = await esDB.get('indexingProgress', row); if (!progress) { return; } progress.numPauses += 1; await safelyWriteToIDBAbsolutely(progress, row, 'indexingProgress', esDB); esDB.close(); }; /** * Add a timestamp to the set of indexing timestamps for indexing */ const addTimestamp = async (userID: string, type: TIMESTAMP_TYPE = TIMESTAMP_TYPE.STEP, row: IndexedDBRow) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('indexingProgress')) { return; } const progress = await esDB.get('indexingProgress', row); if (!progress) { return; } const { timestamps } = progress; timestamps.push({ type, time: roundMilliseconds(Date.now()) }); await safelyWriteToIDBAbsolutely({ ...progress, timestamps }, row, 'indexingProgress', esDB); esDB.close(); }; /** * Set the initial estimate in seconds, but only if it's the first of such predictions, * for the indexing process */ const setOriginalEstimate = async (userID: string, inputEstimate: number, row: IndexedDBRow) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('indexingProgress')) { return; } const progress = await esDB.get('indexingProgress', row); if (!progress) { return; } const { originalEstimate } = progress; if (originalEstimate === 0) { await safelyWriteToIDBAbsolutely( { ...progress, originalEstimate: inputEstimate }, row, 'indexingProgress', esDB ); } esDB.close(); }; /** * Overwrite the indexing process data with the given properties */ const set = async (userID: string, newProperties: Partial<ESProgress>, row: IndexedDBRow) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('indexingProgress')) { return; } const progress = await esDB.get('indexingProgress', row); if (!progress) { return; } await safelyWriteToIDBAbsolutely({ ...progress, ...newProperties }, row, 'indexingProgress', esDB); esDB.close(); }; /** * Set the recovery point of the indexing process */ const setRecoveryPoint = (userID: string, recoveryPoint: unknown, row: IndexedDBRow) => set(userID, { recoveryPoint }, row); /** * Set the status of the indexing process */ const setStatus = (userID: string, status: INDEXING_STATUS, row: IndexedDBRow) => set(userID, { status }, row); /** * Checks whether the indexing process is stopped or not */ const isIndexingPaused = async (userID: string, row: IndexedDBRow) => { const progress = await read(userID, row); if (!progress) { return false; } return progress.status === INDEXING_STATUS.PAUSED; }; /** * Checks whether the indexing process is done or not */ const isIndexingDone = async (userID: string, row: IndexedDBRow) => { const progress = await read(userID, row); if (!progress) { return false; } return progress.status === INDEXING_STATUS.ACTIVE; }; /** * Set the status of the indexing process * to ACTIVE, i.e. for when indexing is done, and reset to default * all other properties since they are no longer relevant */ const setActiveStatus = async (userID: string, row: IndexedDBRow) => { const esDB = await openESDB(userID); if (!esDB || !esDB.objectStoreNames.contains('indexingProgress')) { return; } await safelyWriteToIDBAbsolutely( { ...defaultESProgress, status: INDEXING_STATUS.ACTIVE, }, row, 'indexingProgress', esDB ); esDB.close(); }; export const getIndexingProgressQueryHelpers = (row: IndexedDBRow) => { return { read: (userID: string) => read(userID, row), readRecoveryPoint: (userID: string) => readRecoveryPoint(userID, row), write: (userID: string, progress: ESProgress) => write(userID, progress, row), incrementNumPauses: (userID: string) => incrementNumPauses(userID, row), addTimestamp: (userID: string, type?: TIMESTAMP_TYPE) => addTimestamp(userID, type, row), setOriginalEstimate: (userID: string, inputEstimate: number) => setOriginalEstimate(userID, inputEstimate, row), set: (userID: string, newProperties: Partial<ESProgress>) => set(userID, newProperties, row), setRecoveryPoint: (userID: string, recoveryPoint: unknown) => setRecoveryPoint(userID, recoveryPoint, row), setStatus: (userID: string, status: INDEXING_STATUS) => setStatus(userID, status, row), setActiveStatus: (userID: string) => setActiveStatus(userID, row), isIndexingPaused: (userID: string) => isIndexingPaused(userID, row), isIndexingDone: (userID: string) => isIndexingDone(userID, row), }; }; export const metadataIndexingProgress = getIndexingProgressQueryHelpers('metadata'); export const contentIndexingProgress = getIndexingProgressQueryHelpers('content');
7,287
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/esIDB/metadata.ts
import { IDBPDatabase } from 'idb'; import { STORING_OUTCOME } from '../constants'; import { ciphertextSize, decryptFromDB } from '../esHelpers'; import { ESItemInfo, ESTimepoint, EncryptedItemWithInfo, EncryptedSearchDB } from '../models'; import { updateSize } from './configObjectStore'; import { openESDB, safelyWriteToIDBConditionally } from './indexedDB'; /** * Get a decrypted metadata item from IndexedDB */ export const readMetadataItem = async <ESItemMetadata>(userID: string, itemID: string, indexKey: CryptoKey) => { const esDB = await openESDB(userID); if (!esDB) { return; } const encryptedMetadataItem = await esDB.get('metadata', itemID); esDB.close(); if (!encryptedMetadataItem) { return; } return decryptFromDB<ESItemMetadata>(encryptedMetadataItem.aesGcmCiphertext, indexKey); }; /** * Read a batch of metadata items specified by their IDs */ export const readMetadataBatch = async ( userID: string, IDs: string[] ): Promise<(EncryptedItemWithInfo | undefined)[] | undefined> => { const esDB = await openESDB(userID); if (!esDB) { return; } const tx = esDB.transaction('metadata', 'readonly'); const metadata = await Promise.all( IDs.map((ID) => tx.store .get(ID) .then((value) => !!value ? { ID, timepoint: value.timepoint, aesGcmCiphertext: value.aesGcmCiphertext } : undefined ) ) ); await tx.done; esDB.close(); return metadata; }; /** * Read all IDs of stored metadata, sorted according to the temporal index. * @param checkpoint is the timepoint from which to search items * @param reverse indicates whether to return items in reverse chronological order */ export const readSortedIDs = async (userID: string, reverse: boolean, checkpoint?: ESTimepoint) => { const esDB = await openESDB(userID); if (!esDB) { return; } let range: IDBKeyRange | undefined; if (!!checkpoint) { range = reverse ? IDBKeyRange.upperBound(checkpoint, true) : IDBKeyRange.lowerBound(checkpoint, true); } const IDs = await esDB.getAllKeysFromIndex('metadata', 'temporal', range); esDB.close(); if (reverse) { IDs.reverse(); } return IDs; }; /** * Fetch the number of items from the metadata table */ export const readNumMetadata = async (userID: string) => { const esDB = await openESDB(userID); if (!esDB) { return; } const count = await esDB.count('metadata'); esDB.close(); return count; }; /** * Retrieve the ID of the oldest item's metadata */ export const getOldestID = async (esDB: IDBPDatabase<EncryptedSearchDB>) => esDB.getKeyFromIndex('metadata', 'temporal', IDBKeyRange.lowerBound([0, 0])); /** * Retrieve the ID and timepoint of the oldest item's metadata */ export const getOldestInfo = async (esDB: IDBPDatabase<EncryptedSearchDB>): Promise<ESItemInfo | undefined> => getOldestID(esDB).then((ID) => ID ? esDB.get('metadata', ID).then((item) => (!!item ? { ID, timepoint: item.timepoint } : undefined)) : undefined ); /** * Wrapper for getOldestInfo that internally opens an instance of esDB */ export const wrappedGetOldestInfo = async (userID: string): Promise<ESItemInfo | undefined> => { const esDB = await openESDB(userID); if (!esDB) { return; } const oldestInfo = await getOldestInfo(esDB); esDB.close(); return oldestInfo; }; /** * Remove metadata from and write metadata to the metatadata table of IDB */ export const executeMetadataOperations = async ( userID: string, itemsToRemove: string[], itemsToAdd: EncryptedItemWithInfo[] ) => { const esDB = await openESDB(userID); if (!esDB) { return; } const tx = esDB.transaction('metadata', 'readwrite'); const removeSizes = await Promise.all( itemsToRemove.map((ID) => tx.store.get(ID).then((item) => { void tx.store.delete(ID); return ciphertextSize(item?.aesGcmCiphertext); }) ) ); await tx.done; await updateSize(esDB, -1 * removeSizes.reduce((p, c) => p + c, 0)); const storingOutcomes: STORING_OUTCOME[] = []; // Then all items to add are inserted for (const itemToAdd of itemsToAdd) { storingOutcomes.push(await safelyWriteToIDBConditionally(itemToAdd, 'metadata', esDB)); } esDB.close(); if (storingOutcomes.some((storingOutcome) => storingOutcome === STORING_OUTCOME.QUOTA)) { return STORING_OUTCOME.QUOTA; } return STORING_OUTCOME.SUCCESS; };
7,288
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/models/esCallbacks.ts
import { GetItemInfo } from './esFunctions'; import { RecordProgress } from './esIndexing'; import { CachedItem, ESEvent, ESStatusBooleans, EventsObject } from './interfaces'; /** * Interface for all the callbacks that are required to run the basic * functionalities of the ES library. All products must pass these * callbacks in order to use the library */ interface RequiredESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent> { /** * Retrieve a batch of items' metadata, the mechanism to keep track of where the fetching * has arrived is supposed to be built-in (but can optionally take a boolean indicating whether * to store progress in IDB or only in memory. It defaults to true, i.e. store in IDB too.) * @param signal an abort signal to abort potential API calls in case of sudden aborts * @param isBackgroundIndexing whether the current indexing was triggered in the background, i.e. * without explicit user prompt. This is optional but might be useful, e.g., in case throttling is needed * @returns An array of metadata items, i.e. the next batch of items that need to be indexed, * as well as a callback to set the recovery point in IndexedDB for the next call to queryItemsMetadata * to start from. This ensures that the recovery point is stored only when and if all associated items * are actually indexed. Note that this is optional and if not returned, no recovery point is set */ queryItemsMetadata: ( signal: AbortSignal, isBackgroundIndexing?: boolean ) => Promise<{ resultMetadata?: ESItemMetadata[]; setRecoveryPoint?: (setIDB?: boolean) => Promise<void>; }>; /** * Fetch the last event ID before starting building IDB to mark the point in time * where a catch-up must start, for every component affecting the specific product * @returns The event ID of the last event that happened before indexing */ getPreviousEventID: () => Promise<EventsObject>; /** * Extract the ID and timepoint of an item or from its encrypted version. The timepoint is the key according * to which items are stored in the temporal index of the main object store, while the ID is the primary * key of the object store itself * @param item Either an item or the object stored in IDB (containing the item's encryption) * @returns The time associated to the item */ getItemInfo: GetItemInfo<ESItemMetadata>; /** * Get whether there is a search happening and its search parameters * @returns @param isSearch Whether the app is on a search page * @returns @param esSearchParams The search parameters */ getSearchParams: () => { isSearch: boolean; esSearchParams: ESSearchParameters | undefined; }; /** * Extract keywords from the search parameters * @param esSearchParams The current search parameter * @returns An array of all the search keywords, or undefined if none were selected */ getKeywords: (esSearchParams: ESSearchParameters) => string[] | undefined; /** * Decide whether the decrypted content should be returned as a search result * @param keywords User specified keywords * @param itemToSearch The item on which to apply the search * @param hasApostrophe Whether apostrophes need to be normalized * @returns Whether itemToSearch is a search result or not */ searchKeywords: ( keywords: string[], itemToSearch: CachedItem<ESItemMetadata, ESItemContent>, hasApostrophe: boolean ) => boolean; /** * Return the total number of items (e.g. mails in mail, files in drive, ...) * @returns The total number of items */ getTotalItems: () => Promise<number>; /** * Read the last event according to which IDB was sycned from local storage and * return all new events that need to be synced, whether a refresh is needed and * the event ID(s) to catch up the next time. In case, instead, the event object is * given to the function, return all events since the given event object * @returns @param newEvents An array of events that need to be synced * @returns @param shouldRefresh Whether a hard reset of IDB is needed, i.e. erasing it and re-index * @returns @param eventsToStore The EventsObject containing all of the last events to be stored in * local storage for the next catch-up */ getEventFromIDB: (previousEventsObject?: EventsObject) => Promise<{ newEvents: ESEvent<ESItemMetadata>[]; shouldRefresh: boolean; eventsToStore: EventsObject; }>; } /** * Interface for all the callbacks that are optional and give access to * functionalities which are not essential for the correct functioning of the ES library. * Each callback description details what happens if the callback is not specified */ export interface OptionalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent> { /** * Resert the sorting to inverse chronological order, since ES does not support other orders * This callback is optional: if not provided, sorting is never reset */ resetSort: () => void; /** * Return true if the search is in reverse chronological order * This callback is optional: if not provided, search is always considered in reverse chronological order * @param esSearchParams The search parameters, in which the information about the order of the search is stored * @returns Whether the search is in reverse chronological order or not */ checkIsReverse: (esSearchParams: ESSearchParameters) => boolean; /** * Check whether the only thing that changed between two consecutive (and "adjacent", in the * sense that they are performed one after the other) searches is the ordering and if the * search is "complete", i.e. not with partial results * This callback is optional: if not provided, search is always performed even if only the sorting changes * @param esSearchParams1 The search parameters of the first search * @param esSearchParams2 The search parameters of the second search * @returns Whether the search filters (e.g. keywords, folder, ...) are the same, such that only a change * of ordering is needed */ shouldOnlySortResults: (esSearchParams1: ESSearchParameters, esSearchParams2: ESSearchParameters) => boolean; /** * Return the time interval inside the search parameters when specified by * the user as a search filter. * This callback is optional: if not provided, the time interval is always undefined as if the user * never selected one * @param esSearchParameters The product's specific search parameters * @returns The begin and end of the time interval selected by the user */ getSearchInterval: (esSearchParameters: ESSearchParameters) => { begin: number | undefined; end: number | undefined; }; /** * Test whether any filter applies to a specific metadata item. * This callback is optional: if not provided, filters are not applied to any item * @param esSearchParams Search parameters to apply any user selected filters * @param metadata The metadata on which to apply the filters * @returns Whether metadata passes the filters or not */ applyFilters: (esSearchParams: ESSearchParameters, metadata: ESItemMetadata) => boolean; /** * Perform a custom action when a deletion event deletes an item's content, specified by its ID. * @param ID The ID of the item being deleted * @param indexKey The symmetric key to decrypt the item's metadata */ onContentDeletion: (ID: string, indexKey: CryptoKey) => Promise<void>; /** * Send the API request to fetch the item and return it decrypted. If fetching fails, return undefined. * If decryption fails, which can happen for legitimate reasons like password reset, return an empty object * This callback is optional: if not provided, content search cannot be enabled, nor can items' content * be synced when events happen * @param itemID The unique ID used as a primary key in IDB * @param abortSignal An AbortSignal object to abort the request * @returns A decrypted item, potentially without content, or undefined if something fails and itemMetadata is not provided */ fetchESItemContent?: (itemID: string, signal?: AbortSignal) => Promise<ESItemContent | undefined>; /** * Called on key reactivation, attempt to decrypt items stored as undecryptable inside IDB * @returns The count of items that were successfully decrypted */ correctDecryptionErrors: ( userID: string, indexKey: CryptoKey, abortIndexingRef: React.MutableRefObject<AbortController>, esStatus: ESStatusBooleans, recordProgress: RecordProgress ) => Promise<number>; } export type ESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent = void> = RequiredESCallbacks< ESItemMetadata, ESSearchParameters, ESItemContent > & Partial<OptionalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent>>; export type InternalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent = void> = RequiredESCallbacks< ESItemMetadata, ESSearchParameters, ESItemContent > & OptionalESCallbacks<ESItemMetadata, ESSearchParameters, ESItemContent>;
7,289
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/models/esFunctions.ts
import { MutableRefObject } from 'react'; import { DecryptedKey } from '@proton/shared/lib/interfaces'; import { CachedItem, ESEvent, ESIndexingState, ESItem, ESItemInfo, ESStatus, ESTimepoint } from './interfaces'; /** * Show or update the search results in the UI */ export type ESSetResultsList<ESItemMetadata, ESItemContent> = ( Elements: ESItem<ESItemMetadata, ESItemContent>[] ) => void; /** * Return the user keys */ export type GetUserKeys = () => Promise<DecryptedKey[]>; /** * Extract ID and timepoint from an item, encrypted or otherwise */ export type GetItemInfo<ESItemMetadata> = (item: ESItemMetadata) => ESItemInfo; /** * Types of ES functions */ export type EncryptedSearch<ESItemMetadata, ESItemContent> = ( setResultsList: ESSetResultsList<ESItemMetadata, ESItemContent>, minimumItems?: number ) => Promise<boolean>; export type EncryptedSearchExecution<ESItemMetadata, ESItemContent, ESSearchParameters> = ( setResultsList: ESSetResultsList<ESItemMetadata, ESItemContent>, esSearchParams: ESSearchParameters, minimumItems: number | undefined, sendMetricsOnSearch?: boolean ) => Promise<boolean>; export type HighlightString = (content: string, setAutoScroll: boolean) => string; export type HighlightMetadata = ( metadata: string, isBold?: boolean, trim?: boolean ) => { numOccurrences: number; resultJSX: JSX.Element }; export type EnableContentSearch = (options?: { isRefreshed?: boolean | undefined; isBackgroundIndexing?: boolean; notify?: boolean | undefined; }) => Promise<void>; export type EnableEncryptedSearch = (options?: { isRefreshed?: boolean | undefined; isBackgroundIndexing?: boolean; showErrorNotification?: boolean; }) => Promise<boolean>; /** * Core functionalities of ES to be used in the product */ export interface EncryptedSearchFunctions<ESItemMetadata, ESSearchParameters, ESItemContent = void> { /** * Run a new encrypted search or increment an existing one (the difference is handled internally). * @param setResultsList a callback that will be given the items to show, i.e. those found as search * results, and that should handle the UI part of displaying them to the users * @param minimumItems is the optional smallest number of items that the search is expected to produce. * If specified this parameter instructs the search to try finding at least this number of items from disk, * both in case of a new search with limited cache and in case of an incremented search * @returns a boolean indicating the success of the search */ encryptedSearch: EncryptedSearch<ESItemMetadata, ESItemContent>; /** * Insert the <mark></mark> highlighting markdown in a string and returns a string containing it, * which then needs to be displayed in the UI. Note that the keywords to highlight are extracted * directly with the getSearchParams callback * @param content the string where to insert the markdown * @param setAutoScroll whether to insert the data-auto-scroll attribute to the first istance of * the inserted mark tags. The UI should automatically scroll, if possible, to said first tag * @returns the string containing the markdown */ highlightString: HighlightString; /** * Inserts the <mark></mark> highlighting markdown in a string and returns directly the JSX node * to be used in React * @param metadata the string where to insert the markdown * @param isBold specifies whether the text should also be bolded (e.g. in some headers) * @param trim specifies whether to substitute the initial portion of the string by an ellipsis * if it's too long * @returns an object containing two properties: numOccurrences is the total number of times the * markdown tag has been added to the given string, while resultJSX is the actual React node to be * displayed */ highlightMetadata: HighlightMetadata; /** * Start indexing metadata only * @param isRefreshed is only used to be forward to the metrics route for statistical purposes. * Whenever the user manually starts indexing, the latter shouldn't be specified (and defaults to false) */ enableEncryptedSearch: EnableEncryptedSearch; /** * Start indexing for the first time or resume it after the user paused it. It optionally accepts * an object with two properties. * @param notify specifies whether any pop-up banner will be displayed to the user indicating success * or failure of the indexing process * @param isRefreshed is only used to be forward to the metrics route for statistical purposes. * Whenever the user manually starts indexing, the latter shouldn't be specified (and defaults to false). * @param isResumed specifies whether to resume previously paused indexing processes. The difference is that * if it's not specified only those processes that were halted and, therefore, have the INDEXING status * saved will be resumed. If it's set to true, instead, also those that were paused, i.e. have the PAUSED * status, are resumed as well */ enableContentSearch: EnableContentSearch; /** * Process events (according to the provided callbacks). It should be used in whatever event handling * system the product uses to correctly sync the ES database. * @param event a single event containing a change to the items stored in the ES database */ handleEvent: (event: ESEvent<ESItemMetadata> | undefined) => Promise<void>; /** * @param ID the item ID * @returns whether a given item, specified by its ID, is part of the currently shown search results or not. * It returns false if a search is not happening on going */ isSearchResult: (ID: string) => boolean; /** * Wipe all local data related to ES, both from IndexedDB and local storage */ esDelete: () => Promise<void>; /** * @returns whether some conditions to apply highlighting are met, i.e. whether a search is * on and there are keywords. For example in cases where the user only specifies filters * and not keywords, this function returns false */ shouldHighlight: () => boolean; /** * Run some initial checks on the status of ES. This must be the first function that * the EncryptedSearchProvider runs, as it checks for new events, continues indexing in * case a previous one was started, checks whether the index key is still accessible */ initializeES: () => Promise<void>; /** * Pause the currently ongoing content indexing process, if any */ pauseContentIndexing: () => Promise<void>; /** * Pause the currently ongoing metadata indexing process, if any */ pauseMetadataIndexing: () => Promise<void>; /** * Wrapper around `correctDecryptionErrors` es callback, used to correct previous decryption errors after a key recovery */ correctDecryptionErrors: () => Promise<number>; /** * Start the caching routine, i.e. fetching and decrypting as many items from the ES * database as possible to be stored in memory for quick access * @returns the reference to the current cache */ cacheIndexedDB: () => Promise<void>; /** * Deactivates ES. This does not remove anything, and the database keeps being synced. * It is used to switch ES temporarily off in cases when server side search is available. */ toggleEncryptedSearch: () => Promise<void>; /** * Returns the ES cache, a map of indexed items. If the cache is not initialized, the return will be an empty map */ getCache: () => Map<string, CachedItem<ESItemMetadata, ESItemContent>>; /** * Reset the cache to its default empty state */ resetCache: () => void; /** * An object containing the different variables related to Encrypted Search status */ esStatus: ESStatus<ESItemMetadata, ESItemContent, ESSearchParameters>; /** * A reference object to two values related to an IndexedDB operation status. * The first number in the returned list is the current number of items processed while * the second is the total number of items to process. It is useful to show a progress bar. */ progressRecorderRef: MutableRefObject<ESTimepoint>; /** * An object containing variables describing the status of the indexing progress */ esIndexingProgressState: ESIndexingState; }
7,290
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/models/esIndexing.ts
import { IndexedDBRow } from '../esIDB'; /** * Mutate progress ref to take into account newly indexed items and estimate new index time, and optionnally persist the progress state in the DB * * @param progress can be either a single number representing the count of indexed items OR a tuple composed of the count of indexed items and the total items to index */ export type RecordProgress = (progress: number | [number, number], indexedDbRow?: IndexedDBRow) => Promise<void>;
7,291
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/models/index.ts
export * from './esCallbacks'; export * from './esFunctions'; export * from './interfaces'; export * from './esIndexing';
7,292
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/lib/models/interfaces.ts
import { DBSchema } from 'idb'; import { ES_SYNC_ACTIONS, INDEXING_STATUS, TIMESTAMP_TYPE } from '../constants'; import { ESSetResultsList } from './esFunctions'; /** * Object to be stored locally to retry an API call */ export interface RetryObject { retryTime: number; numberRetries: number; } /** * Object stored in local storage during indexing to keep track * of its status. Note that recoveryPoint can differ between * metadata or content indexing */ export interface ESProgress { totalItems: number; numPauses: number; isRefreshed: boolean; timestamps: { type: TIMESTAMP_TYPE; time: number; }[]; originalEstimate: number; recoveryPoint: any; status: INDEXING_STATUS; } /** * Collection of progress objects defined by the ESProgress interface * "metadata" is always present, being it the default content type enabled by encrypted search */ export interface ProgressObject { metadata: ESProgress; content?: ESProgress; } /** * Collection of event IDs for all the components specified by the * product (e.g. calendars in calendar and shares in drive) */ export interface EventsObject { [components: string]: string; } /** * Object containing the ciphertext of items as stored in IDB */ export interface AesGcmCiphertext { iv: Uint8Array; ciphertext: ArrayBuffer; } /** * The type of keys in the temporal index of ESDB. The first number * is supposed to be a time coordinate, while the second one a * tie-breaker in case of equal time */ export type ESTimepoint = [number, number]; /** * Object representing the primary ID and the temporal coordinate * of an item */ export interface ESItemInfo { ID: string; timepoint: ESTimepoint; } /** * Encrypted item, that can be either metadata or content, with * extra information in plaintext */ export interface EncryptedItemWithInfo extends ESItemInfo { keepSize?: boolean; aesGcmCiphertext: AesGcmCiphertext; } /** * Encrypted item, that can be either metadata or content, with * its ID in plaintext */ export type EncryptedItemWithID = Omit<EncryptedItemWithInfo, 'timepoint' | 'keepSize'>; /** * Ciphertexts in the metadata table of IDB have out-of-line keys, * therefore we need to specify the ID with which to index items externally */ export type EncryptedMetadataItem = Omit<EncryptedItemWithInfo, 'ID'>; /** * List of possible key-value pairs types in the config object store */ export interface ConfigValues { indexKey: string; size: number; enabled: boolean; limited: boolean; retries?: string; migrated?: any; } export type ConfigKeys = keyof ConfigValues; /** * IndexedDB structure. Each sub-object corresponds to an object store * - config contains overall information, e.g. whether ES was enabled * or disabled, the index key and the estimated size of all items * - events contains the latest event IDs according to which items * had been updated, for all components of the product * - indexingProgress contains information about the status of indexing * for metadata and for any other content type specified by the product * - metadata contains all the actual items' metadata * - content contains the content of the items which are stored in the * metadata objectStore */ export interface EncryptedSearchDB extends DBSchema { config: { value: ConfigValues[ConfigKeys]; key: ConfigKeys; }; events: { value: string; key: string; }; indexingProgress: { value: ESProgress; key: string; }; metadata: { value: EncryptedMetadataItem; key: string; indexes: { temporal: 'timepoint' }; }; content: { value: AesGcmCiphertext; key: string; }; } /** * Collection of fields to determine UI elements during indexing (e.g. progress bar, ...) */ export interface ESIndexingState { /** * number of items indexed so far */ esProgress: number; /** * estimated time (in minutes) expected for indexing to finish */ estimatedMinutes: number; /** * Total items to index */ totalIndexingItems: number; /** * progress value in percentage, i.e. number from 0 to 100 */ currentProgressValue: number; } export interface CachedItem<ESItemMetadata, ESItemContent> { metadata: ESItemMetadata; content?: ESItemContent; } /** * A decrypted copy of IDB kept in memory in plaintext form. The property * esCache is a map of all indexed items. The property isCacheLimited refers * to content only, as metadata is assumed to always fit cache */ export interface ESCache<ESItemMetadata, ESItemContent> { esCache: Map<string, CachedItem<ESItemMetadata, ESItemContent>>; cacheSize: number; isCacheLimited: boolean; isCacheReady: boolean; } /** * Base type for metrics on encrypted search */ interface ESMetrics { indexSize: number; // Note: the metrics dashboard expects a variable called "numMessagesIndexed" but // it doesn't make too much sense in general to talk about "messages" numMessagesIndexed: number; } /** * Type of the metrics report sent after each search */ export interface ESSearchMetrics extends ESMetrics { cacheSize: number; isFirstSearch: boolean; isCacheLimited: boolean; searchTime: number; } /** * Type of the metrics report sent after indexing */ export interface ESIndexMetrics extends ESMetrics { numPauses: number; originalEstimate: number; numInterruptions: number; isRefreshed: boolean; indexTime: number; } /** * Required fields to correctly process events and keep IDB in sync. This object * instructs the code to apply Action to the item specified by ID. ItemMetadata * contains the metadata of the item being changed and can be omitted only in * deletion events */ export interface ESItemEvent<ESItemMetadata> { ID: string; Action: ES_SYNC_ACTIONS; ItemMetadata: ESItemMetadata | undefined; } /** * Overall structure of an event */ export interface ESEvent<ESItemMetadata> { Refresh?: number; Items?: ESItemEvent<ESItemMetadata>[]; attemptReDecryption?: boolean; eventsToStore: EventsObject; } /** * Interface representing an ESItem, i.e. the combination of metadata plus * content. This is the overall item that can be searched. Note that metadata * must always be present, while content is optional, either because content * search hasn't been activated, or because a product doesn't support content * altogether */ export type ESItem<ESItemMetadata, ESItemContent> = ESItemMetadata & Partial<ESItemContent>; /** * Boolean variables of the ES status useful to display correct UI * @var dbExists whether an instance of IndexedDB exists * @var isEnablingContentSearch whether indexing of content is ongoing * @var isDBLimited whether IndexedDB has fewer than the total amount of items * @var esEnabled whether ES is enabled (in case a fallback to server-side search exists) * @var esSupported whether the browser supports our search engine. It's true by default until indexing fails to initialise IndexedDB * @var isRefreshing whether a refresh of IndexedDB (when correcting decryption errors) is ongoing * @var isSearchPartial whether the current search only has partial results. It happens when IndexedDB does not fit in cache * @var isSearching whether a search is ongoing * @var isCacheLimited whether the cache is limited, i.e. it doesn't contain all items that are in IndexedDB * @var isCacheReady whether in-memory cache load is filled * @var isEnablingEncryptedSearch whether indexing of metadata is ongoing * @var isContentIndexingPaused whether content indexing is paused * @var isMetadataIndexingPaused whether metadata indexing is paused * @var contentIndexingDone whether content indexing is finished */ export interface ESStatusBooleans { dbExists: boolean; isDBLimited: boolean; esEnabled: boolean; esSupported: boolean; isRefreshing: boolean; isSearchPartial: boolean; isSearching: boolean; isFirstSearch: boolean; isEnablingContentSearch: boolean; isContentIndexingPaused: boolean; isMetadataIndexingPaused: boolean; isEnablingEncryptedSearch: boolean; contentIndexingDone: boolean; isConfigFromESDBLoaded: boolean; } /** * Internal variables on the status of ES */ export interface ESStatus<ESItemMetadata, ESItemContent, ESSearchParameters> extends ESStatusBooleans { permanentResults: ESItem<ESItemMetadata, ESItemContent>[]; setResultsList: ESSetResultsList<ESItemMetadata, ESItemContent>; lastTimePoint: ESTimepoint | undefined; previousESSearchParams: ESSearchParameters | undefined; cachedIndexKey: CryptoKey | undefined; getCacheStatus: () => { isCacheReady: boolean; isCacheLimited: boolean }; }
7,293
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/test/index.spec.js
import { CryptoProxy } from '@proton/crypto'; import { Api as CryptoApi } from '@proton/crypto/lib/worker/api'; // Initialize CryptoProxy using a non-worker endpoint CryptoProxy.setEndpoint(new CryptoApi(), (endpoint) => endpoint.clearKeyStore()); const testsContext = require.context('.', true, /.spec.(js|tsx?)$/); testsContext.keys().forEach(testsContext);
7,294
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/test/karma.conf.js
const karmaJasmine = require('karma-jasmine'); const karmaWebpack = require('karma-webpack'); const karmaSpecReporter = require('karma-spec-reporter'); const karmaChromeLauncher = require('karma-chrome-launcher'); const { chromium } = require('playwright'); process.env.CHROME_BIN = chromium.executablePath(); module.exports = (config) => { config.set({ basePath: '..', frameworks: ['jasmine', 'webpack'], plugins: [karmaJasmine, karmaWebpack, karmaChromeLauncher, karmaSpecReporter], files: ['test/index.spec.js'], preprocessors: { 'test/index.spec.js': ['webpack'], }, webpack: { mode: 'development', resolve: { extensions: ['.js', '.ts', '.tsx'], fallback: { crypto: false, buffer: false, stream: false, }, }, module: { rules: [ { test: /\.tsx?$/, use: [ { loader: 'ts-loader', options: { transpileOnly: true, compilerOptions: { jsx: 'react-jsx', }, }, }, ], exclude: /node_modules\/(?!.*pmcrypto)/, }, ], }, devtool: 'inline-source-map', }, mime: { 'text/x-typescript': ['ts', 'tsx'], }, reporters: ['spec'], port: 9876, colors: true, logLevel: config.LOG_INFO, autoWatch: false, customLaunchers: { ChromeHeadlessCI: { base: 'ChromeHeadless', flags: ['--no-sandbox'], }, }, browsers: ['ChromeHeadlessCI'], singleRun: true, concurrency: Infinity, }); };
7,295
0
petrpan-code/ProtonMail/WebClients/packages/encrypted-search
petrpan-code/ProtonMail/WebClients/packages/encrypted-search/test/placeholder.spec.js
import { roundMilliseconds } from '../lib'; describe('placeholder', () => { it('should round milliseconds', async () => { expect(roundMilliseconds(1644340785178)).toEqual(1644340785); }); });
7,296
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/eslint-config-proton/.eslintrc.js
module.exports = { extends: ['@proton/eslint-config-proton'], parser: '@typescript-eslint/parser', parserOptions: { tsconfigRootDir: __dirname, project: './tsconfig.json', }, ignorePatterns: ['.eslintrc.js'], };
7,297
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/eslint-config-proton/LICENSE
GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS
7,298
0
petrpan-code/ProtonMail/WebClients/packages
petrpan-code/ProtonMail/WebClients/packages/eslint-config-proton/README.md
# Proton ESLint config Modern ESLint config for a more civilized age. ## How to use Add the following to the `package.json` dev dependencies. ```json "@proton/eslint-config-proton": "workspace:^", ``` Then, use the following `.eslintrc` config. ```js module.exports = { extends: ['@proton/eslint-config-proton'], parser: '@typescript-eslint/parser', parserOptions: { tsconfigRootDir: __dirname, project: './tsconfig.json', }, ignorePatterns: ['.eslintrc.js'], }; ```
7,299