index
int64 0
0
| repo_id
stringlengths 16
181
| file_path
stringlengths 28
270
| content
stringlengths 1
11.6M
| __index_level_0__
int64 0
10k
|
|---|---|---|---|---|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/useTrashedLinksListing.test.tsx
|
import { act, renderHook } from '@testing-library/react-hooks';
import { VolumesStateProvider } from '../../_volumes/useVolumesState';
import { LinksStateProvider } from '../useLinksState';
import { PAGE_SIZE } from './useLinksListingHelpers';
import { useTrashedLinksListing } from './useTrashedLinksListing';
jest.mock('@proton/shared/lib/api/drive/volume', () => ({
queryVolumeTrash: jest.fn(),
}));
const mockRequest = jest.fn();
jest.mock('../../_api/useDebouncedRequest', () => {
const useDebouncedRequest = () => {
return mockRequest;
};
return useDebouncedRequest;
});
jest.mock('../../_utils/errorHandler', () => {
return {
useErrorHandler: () => ({
showErrorNotification: jest.fn(),
showAggregatedErrorNotification: jest.fn(),
}),
};
});
jest.mock('../useLink', () => {
const useLink = () => {
return {
decryptLink: jest.fn(),
};
};
return useLink;
});
const queryVolumeTrashMock = require('@proton/shared/lib/api/drive/volume').queryVolumeTrash as jest.Mock;
const generateArrayOfRandomStrings = (size: number): string[] => {
return Array.from({ length: size }, () => Math.random().toString(36).substring(2));
};
describe('useTrashedLinksListing', () => {
let hook: {
current: ReturnType<typeof useTrashedLinksListing>;
};
beforeEach(() => {
jest.resetAllMocks();
const wrapper = ({ children }: { children: React.ReactNode }) => (
<VolumesStateProvider>
<LinksStateProvider>{children}</LinksStateProvider>
</VolumesStateProvider>
);
const { result } = renderHook(() => useTrashedLinksListing(), { wrapper });
hook = result;
jest.resetAllMocks();
});
it('should fetch the first page of trashed links for a given volume', async () => {
const volumeId = '1';
const page = 0;
const response = {
Trash: [{ ShareID: '1', LinkIDs: generateArrayOfRandomStrings(10) }],
};
mockRequest.mockResolvedValue(response);
await act(async () => {
await hook.current.loadTrashedLinks(new AbortController().signal, volumeId, () =>
Promise.resolve({ links: [], parents: [], errors: [] })
);
});
expect(queryVolumeTrashMock).toHaveBeenCalledWith(volumeId, { Page: page, PageSize: PAGE_SIZE });
});
it('should increment the page count when fetching the next page of trashed links', async () => {
const volumeId = '1';
const page = 0;
let firstResponse = {
Trash: [{ ShareID: '1', LinkIDs: generateArrayOfRandomStrings(PAGE_SIZE) }],
};
let secondResponse = {
Trash: [{ ShareID: '1', LinkIDs: generateArrayOfRandomStrings(1) }],
};
mockRequest.mockResolvedValueOnce(firstResponse).mockResolvedValueOnce(secondResponse);
const { loadTrashedLinks } = hook.current;
await act(async () => {
await loadTrashedLinks(new AbortController().signal, volumeId, () =>
Promise.resolve({ links: [], parents: [], errors: [] })
);
});
expect(queryVolumeTrashMock).toHaveBeenCalledWith(volumeId, { Page: page + 1, PageSize: PAGE_SIZE });
// verify that the script terminates successfully
expect(queryVolumeTrashMock).toBeCalledTimes(2);
});
});
| 3,100
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/useTrashedLinksListing.tsx
|
import { useCallback, useRef } from 'react';
import { queryVolumeTrash } from '@proton/shared/lib/api/drive/volume';
import { ListDriveVolumeTrashPayload } from '@proton/shared/lib/interfaces/drive/volume';
import { useDebouncedRequest } from '../../_api';
import useVolumesState from '../../_volumes/useVolumesState';
import { DecryptedLink } from './../interface';
import useLinksState from './../useLinksState';
import { FetchLoadLinksMeta } from './interface';
import { DEFAULT_SORTING, FetchMeta, PAGE_SIZE, SortParams, useLinksListingHelpers } from './useLinksListingHelpers';
interface FetchTrashMeta extends FetchMeta {
lastPage: number;
lastSorting: SortParams;
}
type TrashFetchState = {
[volumeId: string]: FetchTrashMeta;
};
/**
* Custom hook for managing and fetching trashed links for a given volume.
*/
export function useTrashedLinksListing() {
const debouncedRequest = useDebouncedRequest();
const linksState = useLinksState();
const volumesState = useVolumesState();
const { loadFullListing, getDecryptedLinksAndDecryptRest } = useLinksListingHelpers();
const trashFetchState = useRef<TrashFetchState>({});
const getTrashFetchState = useCallback((volumeId: string) => {
if (trashFetchState.current[volumeId]) {
return trashFetchState.current[volumeId];
}
trashFetchState.current[volumeId] = {
lastPage: 0,
lastSorting: DEFAULT_SORTING,
};
return trashFetchState.current[volumeId];
}, []);
const queryVolumeTrashPage = async (
volumeId: string,
page: number
): Promise<{ response: ListDriveVolumeTrashPayload; hasNextPage: boolean }> => {
const response = await debouncedRequest<ListDriveVolumeTrashPayload>(
queryVolumeTrash(volumeId, { Page: page, PageSize: PAGE_SIZE })
);
const totalLinks = Object.values(response.Trash).reduce((acc, trash) => acc + trash.LinkIDs.length, 0);
const hasNextPage = totalLinks >= PAGE_SIZE;
return {
response,
hasNextPage,
};
};
const loadTrashedLinksMeta = async (
signal: AbortSignal,
transformedResponse: {
[shareId: string]: { linkIds: string[]; parentIds: string[] };
},
loadLinksMeta: FetchLoadLinksMeta
) => {
for (const shareId in transformedResponse) {
await loadLinksMeta(signal, 'trash', shareId, transformedResponse[shareId].linkIds);
}
};
const fetchTrashedLinksNextPage = async (
signal: AbortSignal,
volumeId: string,
loadLinksMeta: FetchLoadLinksMeta
): Promise<boolean> => {
let trashFetchMeta = getTrashFetchState(volumeId);
if (trashFetchMeta.isEverythingFetched) {
return false;
}
const { response, hasNextPage } = await queryVolumeTrashPage(volumeId, trashFetchMeta.lastPage);
const volumeShareIds = response.Trash.map((share) => share.ShareID);
volumesState.setVolumeShareIds(volumeId, volumeShareIds);
const transformedResponse = transformTrashResponseToLinkMap(response);
await loadTrashedLinksMeta(signal, transformedResponse, loadLinksMeta);
trashFetchMeta.lastPage++;
trashFetchMeta.isEverythingFetched = !hasNextPage;
return hasNextPage;
};
/**
* Loads trashed links for a given volume.
*/
const loadTrashedLinks = async (
signal: AbortSignal,
volumeId: string,
loadLinksMeta: FetchLoadLinksMeta
): Promise<void> => {
return loadFullListing(() => fetchTrashedLinksNextPage(signal, volumeId, loadLinksMeta));
};
/**
* Gets trashed links that have already been fetched and cached.
*/
const getCachedTrashed = useCallback(
(abortSignal: AbortSignal, volumeId?: string): { links: DecryptedLink[]; isDecrypting: boolean } => {
if (!volumeId) {
return {
links: [],
isDecrypting: false,
};
}
const associatedShareIds = volumesState.getVolumeShareIds(volumeId);
const result = associatedShareIds.map((shareId) => {
return getDecryptedLinksAndDecryptRest(
abortSignal,
shareId,
linksState.getTrashed(shareId),
getTrashFetchState(volumeId)
);
});
const links = result.reduce<DecryptedLink[]>((acc, element) => {
return [...acc, ...element.links];
}, []);
const isDecrypting = result.some((element) => {
return element.isDecrypting;
});
return {
links,
isDecrypting,
};
},
[linksState.getTrashed]
);
return {
loadTrashedLinks,
getCachedTrashed,
};
}
/**
* Transforms a trash response from the API into an object with share IDs as keys,
* and link IDs and parent IDs as values.
*/
function transformTrashResponseToLinkMap(response: ListDriveVolumeTrashPayload) {
return response.Trash.reduce<{
[shareId: string]: {
linkIds: string[];
parentIds: string[];
};
}>((acc, share) => {
acc[share.ShareID] = {
linkIds: share.LinkIDs,
parentIds: share.ParentIDs,
};
return acc;
}, {});
}
| 3,101
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/PhotosProvider.tsx
|
import { FC, createContext, useContext, useEffect, useState } from 'react';
import { useLoading } from '@proton/hooks/index';
import { queryDeletePhotosShare, queryPhotos } from '@proton/shared/lib/api/drive/photos';
import type { Photo as PhotoPayload } from '@proton/shared/lib/interfaces/drive/photos';
import { photoPayloadToPhotos, useDebouncedRequest } from '../_api';
import { ShareWithKey, useDefaultShare } from '../_shares';
import type { Photo } from './interface';
export const PhotosContext = createContext<{
shareId?: string;
linkId?: string;
volumeId?: string;
hasPhotosShare: boolean;
isLoading: boolean;
photos: Photo[];
loadPhotos: (abortSignal: AbortSignal, volumeId: string) => void;
removePhotosFromCache: (linkIds: string[]) => void;
deletePhotosShare: (volumeId: string, shareId: string) => Promise<void>;
} | null>(null);
export const PhotosProvider: FC = ({ children }) => {
const [photosShare, setPhotosShare] = useState<ShareWithKey>();
const [share, setShare] = useState<ShareWithKey>();
const { getDefaultShare, getDefaultPhotosShare } = useDefaultShare();
const request = useDebouncedRequest();
const [photosLoading, withPhotosLoading] = useLoading();
const [photos, setPhotos] = useState<Photo[]>([]);
useEffect(() => {
void Promise.all([getDefaultShare().then(setShare), getDefaultPhotosShare().then(setPhotosShare)]);
}, []);
const loadPhotos = async (abortSignal: AbortSignal, volumeId: string) => {
const photoCall = async (lastLinkId?: string) => {
const { Photos, Code } = await request<{ Photos: PhotoPayload[]; Code: number }>(
queryPhotos(volumeId, {
PreviousPageLastLinkID: lastLinkId,
}),
abortSignal
);
if (Code === 1000 && !!Photos.length) {
void Promise.resolve();
const photosData = Photos.map(photoPayloadToPhotos);
setPhotos((prevPhotos) => [...prevPhotos, ...photosData]);
void photoCall(photosData[photosData.length - 1].linkId);
}
};
void withPhotosLoading(photoCall());
};
const removePhotosFromCache = (linkIds: string[]) => {
setPhotos((prevPhotos) => {
return prevPhotos.filter((photo) => !linkIds.includes(photo.linkId));
});
};
const deletePhotosShare = async (volumeId: string, shareId: string): Promise<void> => {
await request(queryDeletePhotosShare(volumeId, shareId));
};
if (!share) {
return <PhotosContext.Provider value={null}>{children}</PhotosContext.Provider>;
}
return (
<PhotosContext.Provider
value={{
shareId: photosShare?.shareId,
linkId: photosShare?.rootLinkId,
volumeId: photosShare?.volumeId,
hasPhotosShare: !!photosShare,
isLoading: (!share && !photosShare) || photosLoading,
photos,
loadPhotos,
removePhotosFromCache,
deletePhotosShare,
}}
>
{children}
</PhotosContext.Provider>
);
};
export function usePhotos() {
const state = useContext(PhotosContext);
if (!state) {
throw new Error('Trying to use uninitialized PhotosProvider');
}
return state;
}
| 3,102
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/exifInfo.ts
|
import ExifReader from 'exifreader';
import type { ExifTags, ExpandedTags } from 'exifreader';
import { CryptoProxy, PrivateKeyReference } from '@proton/crypto';
import { encodeBase64 } from '@proton/crypto/lib/utils';
import { isSVG } from '@proton/shared/lib/helpers/mimetype';
import { convertSubjectAreaToSubjectCoordinates, formatExifDateTime } from './utils';
export const getExifInfo = async (file: File, mimeType: string): Promise<ExpandedTags | undefined> => {
if (isSVG(mimeType)) {
return undefined;
}
await file.arrayBuffer().then((buffer) => {
// In case of error with return empty exif
try {
// Notes: XMP read is disable because DOMParser is not available in Worker (package.json > exifreader)
return ExifReader.load(buffer, { expanded: true });
} catch (err) {
return undefined;
}
});
};
export async function encryptExifInfo(
exifInfo: ExpandedTags,
nodePrivateKey: PrivateKeyReference,
addressPrivateKey: PrivateKeyReference
) {
const exifInfoString = JSON.stringify(exifInfo);
const { message } = await CryptoProxy.encryptMessage({
textData: exifInfoString,
encryptionKeys: nodePrivateKey,
signingKeys: addressPrivateKey,
compress: true,
context: { value: 'drive.photo.exif', critical: true },
});
return encodeBase64(message);
}
export const getCaptureDateTime = (file: File, exif?: ExifTags) => {
const formattedDateTime = exif?.DateTime?.value[0] ? formatExifDateTime(exif?.DateTime?.value[0]) : undefined;
// If file.lastModified is not know, current date will be returned:
// https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified
const captureDateTime = new Date(formattedDateTime || file.lastModified);
return captureDateTime;
};
export const getPhotoDimensions = ({ exif, png }: ExpandedTags): { width?: number; height?: number } => ({
width: exif?.ImageWidth?.value || exif?.PixelXDimension?.value || png?.['Image Width']?.value,
height: exif?.ImageLength?.value || exif?.PixelYDimension?.value || png?.['Image Height']?.value,
});
export const getPhotoExtendedAttributes = ({ exif, gps }: ExpandedTags) => ({
location:
gps?.Latitude && gps?.Longitude
? {
latitude: gps.Latitude,
longitude: gps.Longitude,
}
: undefined,
camera: exif
? {
device: exif.Model?.value[0],
orientation: exif.Orientation?.value,
captureTime: exif?.DateTime?.value[0]
? new Date(formatExifDateTime(exif?.DateTime?.value[0])).toISOString()
: undefined,
subjectCoordinates: exif.SubjectArea?.value
? convertSubjectAreaToSubjectCoordinates(exif.SubjectArea.value)
: undefined,
}
: undefined,
});
| 3,103
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/index.ts
|
export { usePhotosFeatureFlag } from './usePhotosFeatureFlag';
export { usePhotosRecovery } from './usePhotosRecovery';
export { PhotosProvider, PhotosContext, usePhotos } from './PhotosProvider';
export * from './utils';
export * from './interface';
| 3,104
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/interface.ts
|
import { DeepPartial } from '@proton/shared/lib/interfaces';
import type { DecryptedLink } from '../_links/interface';
export interface Photo {
linkId: string;
captureTime: number;
mainPhotoLinkId?: string;
exif?: string;
hash?: string;
contentHash?: string;
}
export type PhotoLink = DeepPartial<DecryptedLink> & {
linkId: string;
// If the link is in photos share it should always have activeRevision
activeRevision: DeepPartial<DecryptedLink['activeRevision']> & {
photo: Photo;
};
};
export type PhotoGroup = string;
export type PhotoGridItem = PhotoLink | PhotoGroup;
| 3,105
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/usePhotosFeatureFlag.ts
|
import { useFlag } from '@proton/components';
export const usePhotosFeatureFlag = () => useFlag('DrivePhotos');
| 3,106
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/usePhotosRecovery.test.ts
|
import { act, renderHook } from '@testing-library/react-hooks';
import { SupportedMimeTypes } from '@proton/shared/lib/drive/constants';
import { getItem, removeItem, setItem } from '@proton/shared/lib/helpers/storage';
import { DecryptedLink, useLinksActions, useLinksListing } from '../_links';
import useSharesState from '../_shares/useSharesState';
import { usePhotos } from './PhotosProvider';
import { RECOVERY_STATE, usePhotosRecovery } from './usePhotosRecovery';
function generateDecryptedLink(linkId = 'linkId'): DecryptedLink {
return {
encryptedName: 'name',
name: 'name',
linkId,
createTime: 323212,
digests: { sha1: '' },
fileModifyTime: 323212,
parentLinkId: 'parentLinkId',
isFile: true,
mimeType: SupportedMimeTypes.jpg,
hash: 'hash',
size: 233,
metaDataModifyTime: 323212,
trashed: 0,
hasThumbnail: false,
isShared: false,
rootShareId: 'rootShareId',
};
}
jest.mock('../_links', () => {
const useLinksActions = jest.fn();
const useLinksListing = jest.fn();
return { useLinksActions, useLinksListing };
});
jest.mock('../_shares/useSharesState');
jest.mock('./PhotosProvider', () => {
return {
usePhotos: jest.fn(),
};
});
jest.mock('../_utils', () => ({
waitFor: jest.fn().mockImplementation(async (callback) => {
callback();
}),
}));
jest.mock('@proton/shared/lib/helpers/storage', () => ({
getItem: jest.fn(),
removeItem: jest.fn(),
setItem: jest.fn(),
}));
jest.mock('../../utils/errorHandling');
const mockedRemoveItem = jest.mocked(removeItem);
const mockedGetItem = jest.mocked(getItem);
const mockedSetItem = jest.mocked(setItem);
const getAllResultState = (
all: (
| Error
| {
needsRecovery: boolean;
countOfUnrecoveredLinksLeft: number;
countOfFailedLinks: number;
start: () => void;
state: RECOVERY_STATE;
}
)[]
) => {
const allResultState: {
state: RECOVERY_STATE;
}[] = all.map((allResult: any) => allResult.state);
return allResultState;
};
describe('usePhotosRecovery', () => {
const links = [generateDecryptedLink('linkId1'), generateDecryptedLink('linkId2')];
const mockedUsePhotos = jest.mocked(usePhotos);
const mockedUseLinksListing = jest.mocked(useLinksListing);
const mockedUseLinksActions = jest.mocked(useLinksActions);
const mockedUseShareState = jest.mocked(useSharesState);
const mockedGetCachedChildren = jest.fn();
const mockedLoadChildren = jest.fn();
const mockedMoveLinks = jest.fn();
const mockedDeletePhotosShare = jest.fn();
// @ts-ignore
mockedUseLinksListing.mockReturnValue({
loadChildren: mockedLoadChildren,
getCachedChildren: mockedGetCachedChildren,
});
// @ts-ignore
mockedUsePhotos.mockReturnValue({
shareId: 'shareId',
linkId: 'linkId',
deletePhotosShare: mockedDeletePhotosShare,
});
// @ts-ignore
mockedUseLinksActions.mockReturnValue({
moveLinks: mockedMoveLinks,
});
// @ts-ignore
mockedUseShareState.mockReturnValue({
getRestoredPhotosShares: () => [
{
shareId: 'shareId',
rootLinkId: 'rootLinkId',
volumeId: 'volumeId',
creator: 'creator',
isLocked: false,
isDefault: false,
isVolumeSoftDeleted: false,
possibleKeyPackets: ['dsad'],
type: 4,
state: 1,
},
],
});
beforeAll(() => {});
beforeEach(() => {
jest.clearAllMocks();
mockedDeletePhotosShare.mockResolvedValue(undefined);
mockedLoadChildren.mockResolvedValue(undefined);
mockedMoveLinks.mockImplementation(
async (abortSignal: AbortSignal, { linkIds, onMoved }: { linkIds: string[]; onMoved?: () => void }) => {
// Reproduce the async behavior of moveLinks
linkIds.forEach(() => setTimeout(() => onMoved?.(), 10));
}
);
});
it('should pass all state if files need to be recovered', async () => {
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Decrypting step
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Preparing step
mockedGetCachedChildren.mockReturnValueOnce({ links: [], isDecrypting: false }); // Deleting step
const { result, waitFor } = renderHook(() => usePhotosRecovery());
act(() => {
result.current.start();
});
await waitFor(() => expect(result.current.countOfUnrecoveredLinksLeft).toEqual(2));
await waitFor(() => expect(result.current.countOfUnrecoveredLinksLeft).toEqual(0));
expect(result.current.state).toEqual('SUCCEED');
expect(mockedGetCachedChildren).toHaveBeenCalledTimes(3);
expect(mockedMoveLinks).toHaveBeenCalledTimes(1);
expect(mockedLoadChildren).toHaveBeenCalledTimes(1);
expect(mockedDeletePhotosShare).toHaveBeenCalledTimes(1);
expect(result.current.countOfUnrecoveredLinksLeft).toEqual(0);
expect(getAllResultState(result.all)).toStrictEqual([
'READY',
'READY',
'STARTED',
'DECRYPTING',
'DECRYPTED',
'PREPARING',
'PREPARING',
'PREPARING', // 3 times Preparing because of React states changes
'PREPARED',
'MOVING',
'MOVED',
'MOVED',
'MOVED',
'CLEANING',
'SUCCEED',
]);
expect(mockedRemoveItem).toHaveBeenCalledTimes(1);
expect(mockedRemoveItem).toHaveBeenCalledWith('photos-recovery-state');
});
it('should pass and set errors count if some moves failed', async () => {
mockedGetCachedChildren.mockClear();
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Decrypting step
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Preparing step
mockedGetCachedChildren.mockReturnValueOnce({ links: [links[0]], isDecrypting: false }); // Deleting step
mockedMoveLinks.mockImplementation(
async (
abortSignal: AbortSignal,
{ linkIds, onMoved, onError }: { linkIds: string[]; onMoved?: () => void; onError?: () => void }
) => {
linkIds.forEach((linkId) => {
if (linkId === 'linkId2') {
onError?.();
} else {
onMoved?.();
}
});
}
);
const { result, waitFor } = renderHook(() => usePhotosRecovery());
act(() => {
result.current.start();
});
await waitFor(() => expect(result.current.countOfUnrecoveredLinksLeft).toEqual(2));
expect(result.current.countOfFailedLinks).toEqual(1);
expect(result.current.countOfUnrecoveredLinksLeft).toEqual(0);
expect(result.current.state).toEqual('FAILED');
expect(mockedDeletePhotosShare).toHaveBeenCalledTimes(0);
expect(getAllResultState(result.all)).toStrictEqual([
'READY',
'READY',
'STARTED',
'DECRYPTING',
'DECRYPTED',
'PREPARING',
'PREPARING',
'PREPARING', // 3 times Preparing because of React states changes
'PREPARED',
'MOVING',
'MOVED',
'CLEANING',
'FAILED',
]);
expect(mockedGetItem).toHaveBeenCalledTimes(1);
expect(mockedSetItem).toHaveBeenCalledTimes(2);
expect(mockedSetItem).toHaveBeenCalledWith('photos-recovery-state', 'progress');
expect(mockedSetItem).toHaveBeenCalledWith('photos-recovery-state', 'failed');
});
it('should failed if deleteShare failed', async () => {
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Decrypting step
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Preparing step
mockedGetCachedChildren.mockReturnValueOnce({ links: [], isDecrypting: false }); // Deleting step
mockedDeletePhotosShare.mockRejectedValue(undefined);
const { result, waitFor } = renderHook(() => usePhotosRecovery());
act(() => {
result.current.start();
});
await waitFor(() => expect(result.current.state).toEqual('FAILED'));
expect(mockedDeletePhotosShare).toHaveBeenCalledTimes(1);
expect(getAllResultState(result.all)).toStrictEqual([
'READY',
'READY',
'STARTED',
'DECRYPTING',
'DECRYPTED',
'PREPARING',
'PREPARING',
'PREPARING', // 3 times Preparing because of React states changes
'PREPARED',
'MOVING',
'MOVED',
'MOVED',
'MOVED',
'CLEANING',
'FAILED',
]);
expect(mockedGetItem).toHaveBeenCalledTimes(1);
expect(mockedSetItem).toHaveBeenCalledTimes(2);
expect(mockedSetItem).toHaveBeenCalledWith('photos-recovery-state', 'progress');
expect(mockedSetItem).toHaveBeenCalledWith('photos-recovery-state', 'failed');
});
it('should failed if loadChildren failed', async () => {
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Decrypting step
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Preparing step
mockedGetCachedChildren.mockReturnValueOnce({ links: [], isDecrypting: false }); // Deleting step
mockedLoadChildren.mockRejectedValue(undefined);
const { result, waitFor } = renderHook(() => usePhotosRecovery());
act(() => {
result.current.start();
});
await waitFor(() => expect(result.current.state).toEqual('FAILED'));
expect(mockedDeletePhotosShare).toHaveBeenCalledTimes(0);
expect(mockedGetCachedChildren).toHaveBeenCalledTimes(0);
expect(getAllResultState(result.all)).toStrictEqual(['READY', 'READY', 'STARTED', 'DECRYPTING', 'FAILED']);
expect(mockedGetItem).toHaveBeenCalledTimes(1);
expect(mockedSetItem).toHaveBeenCalledTimes(2);
expect(mockedSetItem).toHaveBeenCalledWith('photos-recovery-state', 'progress');
expect(mockedSetItem).toHaveBeenCalledWith('photos-recovery-state', 'failed');
});
it('should failed if moveLinks helper failed', async () => {
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Decrypting step
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Preparing step
mockedGetCachedChildren.mockReturnValueOnce({ links: [], isDecrypting: false }); // Deleting step
mockedMoveLinks.mockRejectedValue(undefined);
const { result, waitFor } = renderHook(() => usePhotosRecovery());
act(() => {
result.current.start();
});
await waitFor(() => expect(result.current.state).toEqual('FAILED'));
expect(mockedDeletePhotosShare).toHaveBeenCalledTimes(0);
expect(mockedMoveLinks).toHaveBeenCalledTimes(1);
expect(mockedGetCachedChildren).toHaveBeenCalledTimes(2);
expect(getAllResultState(result.all)).toStrictEqual([
'READY',
'READY',
'STARTED',
'DECRYPTING',
'DECRYPTED',
'PREPARING',
'PREPARING',
'PREPARING', // 3 times Preparing because of React states changes
'PREPARED',
'MOVING',
'FAILED',
]);
expect(mockedGetItem).toHaveBeenCalledTimes(1);
expect(mockedSetItem).toHaveBeenCalledTimes(2);
expect(mockedSetItem).toHaveBeenCalledWith('photos-recovery-state', 'progress');
expect(mockedSetItem).toHaveBeenCalledWith('photos-recovery-state', 'failed');
});
it('should start the process if localStorage value was set to progress', async () => {
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Decrypting step
mockedGetCachedChildren.mockReturnValueOnce({ links, isDecrypting: false }); // Preparing step
mockedGetCachedChildren.mockReturnValueOnce({ links: [], isDecrypting: false }); // Deleting step
mockedGetItem.mockReturnValueOnce('progress');
const { result, waitFor } = renderHook(() => usePhotosRecovery());
await waitFor(() => expect(result.current.state).toEqual('SUCCEED'));
expect(getAllResultState(result.all)).toStrictEqual([
'READY',
'STARTED',
'DECRYPTING',
'DECRYPTED',
'PREPARING',
'PREPARING',
'PREPARING',
'PREPARED',
'MOVING',
'MOVED',
'MOVED',
'MOVED',
'CLEANING',
'SUCCEED',
]);
expect(mockedGetItem).toHaveBeenCalledTimes(1);
});
it('should set state to failed if localStorage value was set to failed', async () => {
mockedGetItem.mockReturnValueOnce('failed');
const { result, waitFor } = renderHook(() => usePhotosRecovery());
await waitFor(() => expect(result.current.state).toEqual('FAILED'));
expect(getAllResultState(result.all)).toStrictEqual(['READY', 'FAILED']);
});
});
| 3,107
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/usePhotosRecovery.ts
|
import { useCallback, useEffect, useState } from 'react';
import { getItem, removeItem, setItem } from '@proton/shared/lib/helpers/storage';
import { sendErrorReport } from '../../utils/errorHandling';
import { DecryptedLink, useLinksActions, useLinksListing } from '../_links';
import { Share, ShareWithKey } from '../_shares';
import useSharesState from '../_shares/useSharesState';
import { waitFor } from '../_utils';
import { usePhotos } from './PhotosProvider';
export type RECOVERY_STATE =
| 'READY'
| 'STARTED'
| 'DECRYPTING'
| 'DECRYPTED'
| 'PREPARING'
| 'PREPARED'
| 'MOVING'
| 'MOVED'
| 'CLEANING'
| 'SUCCEED'
| 'FAILED';
const RECOVERY_STATE_CACHE_KEY = 'photos-recovery-state';
export const usePhotosRecovery = () => {
const { shareId, linkId, deletePhotosShare } = usePhotos();
const { getRestoredPhotosShares } = useSharesState();
const { getCachedChildren, loadChildren } = useLinksListing();
const { moveLinks } = useLinksActions();
const [countOfUnrecoveredLinksLeft, setCountOfUnrecoveredLinksLeft] = useState<number>(0);
const [countOfFailedLinks, setCountOfFailedLinks] = useState<number>(0);
const [state, setState] = useState<RECOVERY_STATE>('READY');
const [restoredData, setRestoredData] = useState<{ links: DecryptedLink[]; shareId: string }[]>([]);
const [needsRecovery, setNeedsRecovery] = useState<boolean>(false);
const [restoredShares, setRestoredShares] = useState<Share[] | ShareWithKey[] | undefined>();
useEffect(() => {
const shares = getRestoredPhotosShares();
setRestoredShares(shares);
setNeedsRecovery(!!shares?.length);
}, [getRestoredPhotosShares]);
const handleFailed = (e: Error) => {
setState('FAILED');
setItem(RECOVERY_STATE_CACHE_KEY, 'failed');
sendErrorReport(e);
};
const handleDecryptLinks = useCallback(
async (abortSignal: AbortSignal, shares: Share[] | ShareWithKey[]) => {
for (const share of shares) {
await loadChildren(abortSignal, share.shareId, share.rootLinkId);
await waitFor(
() => {
const { isDecrypting } = getCachedChildren(abortSignal, share.shareId, share.rootLinkId);
return !isDecrypting;
},
{ abortSignal }
);
}
},
[getCachedChildren, loadChildren]
);
const handlePrepareLinks = useCallback(
async (abortSignal: AbortSignal, shares: Share[] | ShareWithKey[]) => {
let allRestoredData: { links: DecryptedLink[]; shareId: string }[] = [];
let totalNbLinks: number = 0;
for (const share of shares) {
const { links } = getCachedChildren(abortSignal, share.shareId, share.rootLinkId);
allRestoredData.push({
links,
shareId: share.shareId,
});
totalNbLinks += links.length;
}
return { allRestoredData, totalNbLinks };
},
[getCachedChildren]
);
const safelyDeleteShares = useCallback(
async (abortSignal: AbortSignal, shares: Share[] | ShareWithKey[]) => {
for (const share of shares) {
const { links } = getCachedChildren(abortSignal, share.shareId, share.rootLinkId);
if (!links.length) {
await deletePhotosShare(share.volumeId, share.shareId);
}
}
},
[deletePhotosShare, getCachedChildren]
);
const handleMoveLinks = useCallback(
async (
abortSignal: AbortSignal,
{
dataList,
newLinkId,
}: {
dataList: { links: DecryptedLink[]; shareId: string }[];
newLinkId: string;
}
) => {
for (const data of dataList) {
await moveLinks(abortSignal, {
shareId: data.shareId,
linkIds: data.links.map((link) => link.linkId),
newParentLinkId: newLinkId,
newShareId: shareId,
onMoved: () => setCountOfUnrecoveredLinksLeft((prevState) => prevState - 1),
onError: () => {
setCountOfUnrecoveredLinksLeft((prevState) => prevState - 1);
setCountOfFailedLinks((prevState) => prevState + 1);
},
});
}
},
[moveLinks, shareId]
);
useEffect(() => {
if (state !== 'STARTED' || !linkId || !restoredShares) {
return;
}
const abortController = new AbortController();
setState('DECRYPTING');
void handleDecryptLinks(abortController.signal, restoredShares)
.then(() => {
setState('DECRYPTED');
})
.catch(handleFailed);
}, [handleDecryptLinks, linkId, restoredShares, state]);
useEffect(() => {
const abortController = new AbortController();
if (state !== 'DECRYPTED' || !restoredShares) {
return;
}
setState('PREPARING');
void handlePrepareLinks(abortController.signal, restoredShares)
.then(({ allRestoredData, totalNbLinks }) => {
setRestoredData(allRestoredData);
if (!!totalNbLinks) {
setCountOfUnrecoveredLinksLeft(totalNbLinks);
}
setState('PREPARED');
})
.catch(handleFailed);
return () => {
abortController.abort();
};
}, [handlePrepareLinks, restoredShares, state]);
useEffect(() => {
if (state !== 'PREPARED' || !linkId) {
return;
}
const abortController = new AbortController();
setState('MOVING');
void handleMoveLinks(abortController.signal, {
newLinkId: linkId,
dataList: restoredData,
})
.then(() => {
setState('MOVED');
})
.catch(handleFailed);
// Moved is done in the background, so we don't abort it on rerender
}, [countOfUnrecoveredLinksLeft, handleMoveLinks, linkId, restoredData, state]);
useEffect(() => {
if (state !== 'MOVED' || !restoredShares || countOfUnrecoveredLinksLeft !== 0) {
return;
}
const abortController = new AbortController();
setState('CLEANING');
void safelyDeleteShares(abortController.signal, restoredShares)
.then(() => {
// We still want to remove empty shares if possible,
// but we should say to the user that it failed since not every file were recovered
if (countOfFailedLinks) {
return Promise.reject(new Error('Failed to move recovered photos'));
}
removeItem(RECOVERY_STATE_CACHE_KEY);
setState('SUCCEED');
})
.catch(handleFailed);
return () => {
abortController.abort();
};
}, [countOfFailedLinks, countOfUnrecoveredLinksLeft, restoredShares, safelyDeleteShares, state]);
const start = useCallback(() => {
setItem(RECOVERY_STATE_CACHE_KEY, 'progress');
setState('STARTED');
}, []);
useEffect(() => {
if (state !== 'READY') {
return;
}
const cachedRecoveryState = getItem(RECOVERY_STATE_CACHE_KEY);
if (cachedRecoveryState === 'progress') {
setState('STARTED');
} else if (cachedRecoveryState === 'failed') {
setState('FAILED');
}
}, [state]);
return {
needsRecovery,
countOfUnrecoveredLinksLeft,
countOfFailedLinks,
start,
state,
};
};
| 3,108
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/convertSubjectAreaToSubjectCoordinates.test.ts
|
import { convertSubjectAreaToSubjectCoordinates } from './convertSubjectAreaToSubjectCoordinates';
describe('convertSubjectAreaToSubjectCoordinates()', () => {
it("should correctly convert exif's SubjectArea with X,Y to SubjectCoordinates", () => {
const subjectArea = [232, 643];
expect(convertSubjectAreaToSubjectCoordinates(subjectArea)).toEqual({
top: 643,
left: 232,
bottom: 643,
right: 232,
});
});
it("should correctly convert exif's SubjectArea with X,Y,Diameter to SubjectCoordinates", () => {
const subjectArea = [232, 643, 142];
expect(convertSubjectAreaToSubjectCoordinates(subjectArea)).toEqual({
top: 572,
left: 161,
bottom: 714,
right: 303,
});
});
it("should correctly convert exif's SubjectArea X,Y,Width,Height to SubjectCoordinates", () => {
const subjectArea = [232, 643, 142, 432];
expect(convertSubjectAreaToSubjectCoordinates(subjectArea)).toEqual({
top: 427,
left: 161,
bottom: 859,
right: 303,
});
});
});
| 3,109
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/convertSubjectAreaToSubjectCoordinates.ts
|
export const convertSubjectAreaToSubjectCoordinates = (subjectArea: number[]) => {
if (subjectArea.length === 2) {
// Case: X, Y
const [x, y] = subjectArea;
return { top: y, left: x, bottom: y, right: x };
} else if (subjectArea.length === 3) {
// Case: X, Y, Diameter
const [x, y, diameter] = subjectArea;
const radius = Math.floor(diameter / 2);
return { top: y - radius, left: x - radius, bottom: y + radius, right: x + radius };
} else if (subjectArea.length === 4) {
// Case: X, Y, Width, Length
const [x, y, width, height] = subjectArea;
const middleHeight = Math.floor(height / 2);
const middleWidth = Math.floor(width / 2);
return { top: y - middleHeight, left: x - middleWidth, bottom: y + middleHeight, right: x + middleWidth };
}
// Invalid subject area format
throw new Error('Invalid exif SubjectArea was passed');
};
| 3,110
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/dateFormatter.ts
|
import { dateLocale } from '@proton/shared/lib/i18n';
type FormatterCache = { code?: string; formatter?: Intl.DateTimeFormat };
// Creating Intl.DateTimeFormat objects is expensive, so we have a local cache
// Since dateLocale can mutate, we update it when the code updates.
let monthFormatterCache: FormatterCache = {};
let monthYearFormatterCache: FormatterCache = {};
const getCachedFormatter = (cache: FormatterCache, options: Intl.DateTimeFormatOptions) => {
if (cache.code !== dateLocale.code || !cache.formatter) {
cache.code = dateLocale.code;
cache.formatter = new Intl.DateTimeFormat(dateLocale.code, options);
}
return cache.formatter;
};
/**
* A cached Intl.DateTimeFormat object that ouptuts month names
*
* e.g. `January`
*/
export const getMonthFormatter = () => getCachedFormatter(monthFormatterCache, { month: 'long' });
/**
* A cached Intl.DateTimeFormat object that outputs month names with the year attached
*
* e.g. `January 1970`
*/
export const getMonthYearFormatter = () =>
getCachedFormatter(monthYearFormatterCache, { month: 'long', year: 'numeric' });
| 3,111
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/formatExifDateTime.test.ts
|
import { formatExifDateTime } from './formatExifDateTime';
describe('formatExifDateTime()', () => {
it("should correctly format exif's DateTime to standard Date format", () => {
const exifDateTime = '2023:07:21 22:12:01';
expect(formatExifDateTime(exifDateTime)).toEqual('2023-07-21 22:12:01');
});
it("should throw an error if exif's DateTime format is incorrect", () => {
const exifDateTime = '2023-07:21-22:12:01';
expect(() => formatExifDateTime(exifDateTime)).toThrowError(
`The DateTime passed is not in the right format (received: ${exifDateTime}, expected: YYYY:MM:DD HH:MM:SS)`
);
});
});
| 3,112
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/formatExifDateTime.ts
|
export const formatExifDateTime = (exifDateTime: string) => {
const regex = /^\d{4}:\d{2}:\d{2} \d{2}:\d{2}:\d{2}$/;
if (!regex.test(exifDateTime)) {
throw new Error(
`The DateTime passed is not in the right format (received: ${exifDateTime}, expected: YYYY:MM:DD HH:MM:SS)`
);
}
const splitDate = exifDateTime.split(' ');
//get date part and replace ':' with '-'
const dateStr = splitDate[0].replace(/:/g, '-');
//concat the strings (date and time part)
return dateStr + ' ' + splitDate[1];
};
| 3,113
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/index.ts
|
export { convertSubjectAreaToSubjectCoordinates } from './convertSubjectAreaToSubjectCoordinates';
export { formatExifDateTime } from './formatExifDateTime';
export { sortWithCategories } from './sortWithCategories';
export { isPhotoGroup } from './isPhotoGroup';
| 3,114
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/isPhotoGroup.ts
|
import type { PhotoGroup } from '../interface';
export const isPhotoGroup = (item: unknown): item is PhotoGroup => typeof item === 'string';
| 3,115
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/sortWithCategories.test.ts
|
import { fromUnixTime, getUnixTime } from 'date-fns';
import { PhotoLink } from '../interface';
import { sortWithCategories } from './sortWithCategories';
jest.mock('@proton/shared/lib/i18n', () => ({
dateLocale: {
code: 'en-US',
formatLong: {
time: jest.fn(),
},
},
}));
describe('sortWithCategories()', () => {
beforeAll(() => {
const unixDate = 1694096758; // Thu Sep 07 14:25:58 2023 UTC
jest.useFakeTimers().setSystemTime(fromUnixTime(unixDate));
});
afterAll(() => {
jest.useRealTimers();
});
it('should return sorted list with categories of photos', () => {
const photos: PhotoLink[] = [
{
linkId: '9d6c33a79feba8dd2fd768f58f450a7f2ff3ec2e',
name: 'This month',
activeRevision: {
photo: {
linkId: '9d6c33a79feba8dd2fd768f58f450a7f2ff3ec2e',
captureTime: getUnixTime(new Date()), // Today
},
},
},
{
linkId: '1a44a587ffbb4d38a04f68af1ddf6b30a74ff3b7',
name: '8 March 2022',
activeRevision: {
photo: {
linkId: '1a44a587ffbb4d38a04f68af1ddf6b30a74ff3b7',
captureTime: 1646743628, // 08/03/2022
},
},
},
{
linkId: '6d2a5651f974cc67d99cbdabd00560967a7bad10',
name: '7 July 2023',
activeRevision: {
photo: {
linkId: '6d2a5651f974cc67d99cbdabd00560967a7bad10',
captureTime: 1688731320, // 07/07/2023
},
},
},
{
linkId: '8ac290ecd3dcfe51ac2e81ba1dbbcc8b6a20b199',
name: '7 May 2022',
activeRevision: {
photo: {
linkId: '8ac290ecd3dcfe51ac2e81ba1dbbcc8b6a20b199',
captureTime: 1651924920, // 07/05/2022
},
},
},
];
const flattenPhotos = sortWithCategories([...photos]); // Destructure to keep origin reference
expect(flattenPhotos).toEqual([
'This month',
photos[0],
'July',
photos[2],
'May 2022',
photos[3],
'March 2022',
photos[1],
]);
});
});
| 3,116
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_photos/utils/sortWithCategories.ts
|
import { fromUnixTime, isThisMonth, isThisYear } from 'date-fns';
import { c } from 'ttag';
import type { PhotoGridItem, PhotoGroup, PhotoLink } from '../interface';
import { getMonthFormatter, getMonthYearFormatter } from './dateFormatter';
const dateToCategory = (timestamp: number): PhotoGroup => {
const date = fromUnixTime(timestamp);
if (isThisMonth(date)) {
return c('Info').t`This month`;
} else if (isThisYear(date)) {
return getMonthFormatter().format(date);
}
return getMonthYearFormatter().format(date);
};
export const sortWithCategories = (data: PhotoLink[]): PhotoGridItem[] => {
const result: PhotoGridItem[] = [];
let lastGroup = '';
// Latest to oldest
data.sort((a, b) => b.activeRevision.photo.captureTime - a.activeRevision?.photo?.captureTime);
data.forEach((item) => {
const group = dateToCategory(item.activeRevision.photo.captureTime);
if (group !== lastGroup) {
lastGroup = group;
result.push(group);
}
result.push(item);
});
return result;
};
| 3,117
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_revisions/index.ts
|
export { default as useRevisions } from './useRevisions';
export * from './interface';
| 3,118
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_revisions/interface.ts
|
import { DriveFileBlock, Thumbnail } from '@proton/shared/lib/interfaces/drive/file';
export interface DriveFileRevision {
id: string;
createTime: number;
size: number;
state: number;
manifestSignature: string;
signatureAddress: string;
signatureEmail: string;
blocs: DriveFileBlock[];
thumbnails: Thumbnail[];
xAttr?: string;
}
| 3,119
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_revisions/useRevisions.test.ts
|
import { renderHook } from '@testing-library/react-hooks';
import { VERIFICATION_STATUS } from '@proton/crypto';
import { getIsConnectionIssue } from '@proton/shared/lib/api/helpers/apiErrorHelper';
import { sendErrorReport } from '../../utils/errorHandling';
import { useDriveCrypto } from '../_crypto';
import { useDownload } from '../_downloads';
import { useLink } from '../_links';
import { ParsedExtendedAttributes, decryptExtendedAttributes } from '../_links/extendedAttributes';
import useRevisions from './useRevisions';
jest.mock('../_crypto');
jest.mock('../_downloads', () => ({
useDownload: jest.fn(),
}));
jest.mock('../_links', () => ({
useLink: jest.fn(),
}));
jest.mock('../_links/extendedAttributes');
jest.mock('../../utils/errorHandling');
jest.mock('@proton/shared/lib/api/helpers/apiErrorHelper');
const mockedGetVerificationKey = jest.fn();
const mockedGetLinkPrivateKey = jest.fn();
const mockedDecryptExtendedAttributes = jest.mocked(decryptExtendedAttributes);
const mockedSendErrorReport = jest.mocked(sendErrorReport);
const mockedGetIsConnectionIssue = jest.mocked(getIsConnectionIssue);
jest.mocked(useDriveCrypto).mockImplementation(() => ({
...jest.requireMock('../_crypto').useDriveCrypto,
getVerificationKey: mockedGetVerificationKey,
}));
jest.mocked(useLink).mockImplementation(() => ({
...jest.requireMock('../_links').useDriveCrypto,
getLinkPrivateKey: mockedGetLinkPrivateKey,
}));
const mockedCheckFirstBlockSignature = jest.fn();
jest.mocked(useDownload).mockImplementation(() => ({
...jest.requireMock('../_downloads').useDownload,
checkFirstBlockSignature: mockedCheckFirstBlockSignature,
}));
jest.mocked(decryptExtendedAttributes);
const revisionXattrs: ParsedExtendedAttributes = {
Common: {
ModificationTime: 1681715947,
},
};
const revisionEncryptedXattrs = 'encryptedXattrs';
const revisionSignatureAddress = 'revisionSignatureAddress';
const shareId = 'shareId';
const linkId = 'linkId';
describe('useRevision', () => {
let abortSignal: AbortSignal;
beforeEach(() => {
abortSignal = new AbortController().signal;
});
it('getRevisionDecryptedXattrs', async () => {
mockedGetVerificationKey.mockResolvedValue(['key']);
mockedGetLinkPrivateKey.mockResolvedValue('privateKey');
mockedDecryptExtendedAttributes.mockResolvedValue({
xattrs: revisionXattrs,
verified: VERIFICATION_STATUS.SIGNED_AND_VALID,
});
const {
result: {
current: { getRevisionDecryptedXattrs },
},
} = renderHook(() => useRevisions(shareId, linkId));
const result = await getRevisionDecryptedXattrs(abortSignal, revisionEncryptedXattrs, revisionSignatureAddress);
expect(mockedGetVerificationKey).toHaveBeenCalledWith(revisionSignatureAddress);
expect(mockedGetLinkPrivateKey).toHaveBeenCalledWith(abortSignal, shareId, linkId);
expect(mockedDecryptExtendedAttributes).toHaveBeenCalledWith(revisionEncryptedXattrs, 'privateKey', ['key']);
expect(result).toStrictEqual({
xattrs: revisionXattrs,
signatureIssues: {
xattrs: VERIFICATION_STATUS.SIGNED_AND_VALID,
},
});
});
it('getRevisionDecryptedXattrs should sendErrorReport if a promise failed', async () => {
mockedDecryptExtendedAttributes.mockResolvedValue({
xattrs: revisionXattrs,
verified: VERIFICATION_STATUS.SIGNED_AND_VALID,
});
mockedGetVerificationKey.mockResolvedValue(['key']);
const error = new Error('getLinkPrivateKey error');
mockedGetLinkPrivateKey.mockRejectedValue(error);
const {
result: {
current: { getRevisionDecryptedXattrs },
},
} = renderHook(() => useRevisions(shareId, linkId));
const result = await getRevisionDecryptedXattrs(abortSignal, revisionEncryptedXattrs, revisionSignatureAddress);
expect(result).toBeUndefined();
expect(mockedSendErrorReport).toHaveBeenCalledWith(error);
});
it('checkRevisionSignature result should be undefined if no issues', async () => {
const revisionId = 'revisionId';
mockedCheckFirstBlockSignature.mockResolvedValueOnce(undefined);
const {
result: {
current: { checkRevisionSignature },
},
} = renderHook(() => useRevisions(shareId, linkId));
const result = await checkRevisionSignature(abortSignal, revisionId);
expect(mockedCheckFirstBlockSignature).toHaveBeenCalledWith(abortSignal, shareId, linkId, revisionId);
expect(result).toBeUndefined();
});
it('checkRevisionSignature should throw an error if there is connection issues', async () => {
const revisionId = 'revisionId';
const error = new Error('Network error');
mockedCheckFirstBlockSignature.mockRejectedValue(error);
mockedGetIsConnectionIssue.mockReturnValue(true);
const {
result: {
current: { checkRevisionSignature },
},
} = renderHook(() => useRevisions(shareId, linkId));
const errorResult = checkRevisionSignature(abortSignal, revisionId);
await expect(errorResult).rejects.toThrowError(error);
expect(mockedGetIsConnectionIssue).toHaveBeenCalledWith(error);
});
it('checkRevisionSignature should sendErrorReport and return signatureIssues', async () => {
const revisionId = 'revisionId';
const error = new Error('checkFirstBlockSignature error');
mockedCheckFirstBlockSignature.mockRejectedValue(error);
mockedGetIsConnectionIssue.mockReturnValue(false);
const {
result: {
current: { checkRevisionSignature },
},
} = renderHook(() => useRevisions(shareId, linkId));
const result = await checkRevisionSignature(abortSignal, revisionId);
expect(mockedSendErrorReport).toHaveBeenCalledWith(error);
expect(result).toStrictEqual({
contentKeyPacket: VERIFICATION_STATUS.NOT_SIGNED,
blocks: VERIFICATION_STATUS.NOT_SIGNED,
thumbnail: VERIFICATION_STATUS.NOT_SIGNED,
});
});
});
| 3,120
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_revisions/useRevisions.ts
|
import { VERIFICATION_STATUS } from '@proton/crypto';
import { getIsConnectionIssue } from '@proton/shared/lib/api/helpers/apiErrorHelper';
import { sendErrorReport } from '../../utils/errorHandling';
import { useDriveCrypto } from '../_crypto';
import { useDownload } from '../_downloads';
import { useLink } from '../_links';
import { decryptExtendedAttributes } from '../_links/extendedAttributes';
const useRevisions = (shareId: string, linkId: string) => {
const { checkFirstBlockSignature } = useDownload();
const { getVerificationKey } = useDriveCrypto();
const { getLinkPrivateKey } = useLink();
const getRevisionDecryptedXattrs = async (
abortSignal: AbortSignal,
revisionEncryptedXattr: string | undefined,
revisionSignatureAddress: string
) => {
if (!revisionEncryptedXattr) {
return;
}
try {
const keys = await getVerificationKey(revisionSignatureAddress);
const privateKey = await getLinkPrivateKey(abortSignal, shareId, linkId);
const { xattrs, verified } = await decryptExtendedAttributes(revisionEncryptedXattr, privateKey, keys);
return {
xattrs,
signatureIssues: {
xattrs: verified,
},
};
} catch (err) {
sendErrorReport(err);
return;
}
};
const checkRevisionSignature = (abortSignal: AbortSignal, revisionId: string) => {
return checkFirstBlockSignature(abortSignal, shareId, linkId, revisionId).catch((e) => {
if (getIsConnectionIssue(e)) {
throw e;
}
sendErrorReport(e);
return {
contentKeyPacket: VERIFICATION_STATUS.NOT_SIGNED,
blocks: VERIFICATION_STATUS.NOT_SIGNED,
thumbnail: VERIFICATION_STATUS.NOT_SIGNED,
};
});
};
return {
getRevisionDecryptedXattrs,
checkRevisionSignature,
};
};
export default useRevisions;
| 3,121
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/constants.ts
|
export const PAGE_SIZE = 500;
export const SESSION_EXPIRED_ERROR_CODE = 2501;
| 3,122
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/index.tsx
|
import { SpotlightProvider } from '../../components/useSpotlight';
import { SearchLibraryProvider } from './useSearchLibrary';
import { SearchResultsProvider } from './useSearchResults';
export { default as useSearchEnabledFeature } from './useSearchEnabledFeature';
export { default as useSearchLibrary } from './useSearchLibrary';
export { default as useSearchResults } from './useSearchResults';
export function SearchProvider({ children }: { children: React.ReactNode }) {
return (
<SearchLibraryProvider>
<SearchResultsProvider>
<SpotlightProvider>{children}</SpotlightProvider>
</SearchResultsProvider>
</SearchLibraryProvider>
);
}
| 3,123
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/migration.ts
|
/**
* This file contains all the code necessary to handle migration from
* the previous version of IndexedDB (version 1) to the new one (verson 2).
* This will ensure that all existing indexing will be migrated to the
* new format, hopefully without loss of data. Note that we have a neat
* upper bound for when to remove the migration code, and that is three
* weeks. All users who login before three weeks will have their IDB
* migrated, while all those who don't would anyway receive a refresh
* flag from the BE, thus once this migration code no longer exists
* the checkVersionedESDB will remove the old index
*/
import { IDBPTransaction, openDB } from 'idb';
import {
ES_MAX_ITEMS_PER_BATCH,
GetUserKeys,
INDEXING_STATUS,
decryptIndexKey,
defaultESProgress,
removeESFlags,
} from '@proton/encrypted-search';
import { getItem } from '@proton/shared/lib/helpers/storage';
/**
* Interface of the old progress blob as we used to store in local
* storage, which includes the possibility for totalMessages still
* being there instead of totalItems
*/
/**
* Helpers to read old ES blobs in localStorage
*/
const getESBlobs = (userID: string) => ({
armoredIndexKey: getItem(`ES:${userID}:Key`),
lastEventID: getItem(`ES:${userID}:Event`),
progressBlob: JSON.parse(getItem(`ES:${userID}:BuildProgress`) || 'null'),
size: parseInt(getItem(`ES:${userID}:SizeIDB`) || '0', 10) || 0,
isPaused: getItem(`ES:${userID}:Pause`) === 'true',
isEnabled: getItem(`ES:${userID}:ESEnabled`) === 'true',
});
const moveCiphertexts = async (tx: IDBPTransaction<unknown, string[], 'versionchange'>) => {
const filesOS = tx.objectStore('files');
const metadataOS = tx.objectStore('metadata');
const count = await filesOS.count();
let recoveryPoint: string | undefined;
for (let batch = 0; batch < count; batch += ES_MAX_ITEMS_PER_BATCH) {
const storedData = await filesOS.getAll(
!!recoveryPoint ? IDBKeyRange.lowerBound(recoveryPoint, true) : undefined,
ES_MAX_ITEMS_PER_BATCH
);
await Promise.all(
storedData.map(({ aesGcmCiphertext, id, createTime, order }) =>
metadataOS.put({ aesGcmCiphertext, timepoint: [createTime, order] }, id)
)
);
recoveryPoint = storedData[storedData.length - 1].id;
}
};
/**
* There are three possible states of the old version of ES:
* 1. ES was never activated. A reliable way for checking
* this is if the index key in local storage doesn't exist.
* In this case nothing should be done;
* 2. ES was fully activated. A reliable way for checking
* this is if the index key exists in local storage and the
* progress blob doesn't, meaning that indexing completed.
* In this case we migrate old IDB and local storage to new
* IDB and then proceed with normal operations (i.e. catching
* up from last events);
* 3. ES was indexing. In this case we just act as if the migration
* failed, since drive doesn't allow recovering indexing
*/
export const migrate = async (userID: string, getUserKeys: GetUserKeys, promiseShareID: Promise<string>) => {
const { armoredIndexKey, progressBlob, lastEventID, size, isEnabled } = getESBlobs(userID);
// Case 1. ES was never activated
if (!armoredIndexKey) {
return true;
}
// We need the last event ID to be stored in the events
// table to then sync IDB from that point
if (!lastEventID) {
return false;
}
// Retrieve the encrypted key, which is needed in both the next two
// cases. If this operation fails, ES is in a corrupt state and
// should be deactivated
try {
await decryptIndexKey(getUserKeys, armoredIndexKey);
} catch (error: any) {
return false;
}
const shareID = await promiseShareID;
let success = true;
await openDB(`ES:${userID}:DB`, 2, {
upgrade: async (...args) => {
const [newESDB, , , tx] = args;
// Create the new object stores and fill them accordingly
const configOS = newESDB.createObjectStore('config');
await configOS.put(armoredIndexKey, 'indexKey');
await configOS.put(size, 'size');
await configOS.put(isEnabled, 'enabled');
await configOS.put(false, 'limited');
const eventsOS = newESDB.createObjectStore('events');
await eventsOS.put(lastEventID, shareID);
const indexingProgressOS = newESDB.createObjectStore('indexingProgress');
if (!progressBlob) {
// Case 2. ES was fully activated
await indexingProgressOS.put(
{
...defaultESProgress,
status: INDEXING_STATUS.ACTIVE,
},
'metadata'
);
} else {
// Case 3. ES was indexing
success = false;
return;
}
// Create the metadata and content object stored and move all ciphertexts
// from "files" into the former, such that "files" can be removed
const metadataOS = newESDB.createObjectStore('metadata');
metadataOS.createIndex('temporal', 'timepoint', { unique: true, multiEntry: false });
newESDB.createObjectStore('content');
await moveCiphertexts(tx);
newESDB.deleteObjectStore('files');
},
});
removeESFlags(userID);
return success;
};
| 3,124
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/types.ts
|
import { EncryptedSearchFunctions } from '@proton/encrypted-search';
export interface ESLink {
createTime: number;
decryptedName: string;
id: string;
linkId: string;
MIMEType: string;
modifiedTime: number;
parentLinkId: string | null;
shareId: string;
size: number;
order: number;
}
export interface ESDriveSearchParams {
normalisedKeywords: string[] | undefined;
}
export interface EncryptedSearchFunctionsDrive
extends Pick<
EncryptedSearchFunctions<ESLink, ESDriveSearchParams>,
| 'handleEvent'
| 'encryptedSearch'
| 'enableEncryptedSearch'
| 'esDelete'
| 'esStatus'
| 'progressRecorderRef'
| 'esIndexingProgressState'
| 'cacheIndexedDB'
> {}
export interface Session {
sessionName: string;
total: number;
isDone: boolean;
}
| 3,125
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/useESCallbacks.tsx
|
import { useHistory } from 'react-router-dom';
import { PrivateKeyReference } from '@proton/crypto';
import {
CachedItem,
ESCallbacks,
ESEvent,
ESTimepoint,
EventsObject,
normalizeKeyword,
readAllLastEvents,
testKeywords,
} from '@proton/encrypted-search';
import { queryEvents, queryLatestEvents } from '@proton/shared/lib/api/drive/share';
import { hasBit } from '@proton/shared/lib/helpers/bitset';
import { Api, User } from '@proton/shared/lib/interfaces';
import { DriveEventsResult } from '@proton/shared/lib/interfaces/drive/events';
import { driveEventsResultToDriveEvents } from '../_api';
import { createLinkGenerator } from './indexing/createLinkGenerator';
import convertDriveEventsToSearchEvents from './indexing/processEvent';
import { FetchShareMap } from './indexing/useFetchShareMap';
import { ESDriveSearchParams, ESLink } from './types';
import { extractSearchParameters } from './utils';
interface Props {
api: Api;
user: User;
shareId: Promise<string>;
fetchShareMap: FetchShareMap;
getSharePrivateKey: (abortSignal: AbortSignal, shareId: string) => Promise<PrivateKeyReference>;
getLinkPrivateKey: (abortSignal: AbortSignal, shareId: string, linkId: string) => Promise<PrivateKeyReference>;
}
let linkMapGenerator: AsyncGenerator<ESLink[]>;
export const useESCallbacks = ({
api,
user,
shareId,
fetchShareMap,
getSharePrivateKey,
getLinkPrivateKey,
}: Props): ESCallbacks<ESLink, ESDriveSearchParams> => {
const history = useHistory();
const userID = user.ID;
const queryItemsMetadata = async (signal: AbortSignal) => {
if (!linkMapGenerator) {
const rootKey = await getSharePrivateKey(signal, await shareId);
linkMapGenerator = createLinkGenerator(await shareId, rootKey, { fetchShareMap });
}
const items = await linkMapGenerator.next();
return { resultMetadata: items.value || [] };
};
const getItemInfo = (item: ESLink): { ID: string; timepoint: ESTimepoint } => ({
ID: item.id,
timepoint: [item.createTime, item.order],
});
const searchKeywords = (keywords: string[], itemToSearch: CachedItem<ESLink, void>, hasApostrophe: boolean) =>
testKeywords(keywords, [itemToSearch.metadata.decryptedName], hasApostrophe);
const getSearchParams = () => {
const keyword = extractSearchParameters(history.location);
return {
isSearch: !!keyword,
esSearchParams: keyword ? { normalisedKeywords: normalizeKeyword(keyword) } : undefined,
};
};
const getPreviousEventID = async (): Promise<EventsObject> => {
const latestEvent = await api<{ EventID: string }>(queryLatestEvents(await shareId));
let eventsToStore: EventsObject = {};
eventsToStore[await shareId] = latestEvent.EventID;
return eventsToStore;
};
const getEventFromIDB = async (
previousEventsObject?: EventsObject
): Promise<{
newEvents: ESEvent<ESLink>[];
shouldRefresh: boolean;
eventsToStore: EventsObject;
}> => {
let eventsObject: EventsObject;
if (previousEventsObject) {
eventsObject = previousEventsObject;
} else {
const storedEventIDs = await readAllLastEvents(userID);
if (!storedEventIDs) {
throw new Error('No event stored');
}
eventsObject = storedEventIDs;
}
const initialShareEvent = await api<DriveEventsResult>(queryEvents(await shareId, eventsObject[await shareId]));
let keepSyncing = Boolean(initialShareEvent.More);
let index = 0;
const newEvents: DriveEventsResult[] = [initialShareEvent];
while (keepSyncing) {
const lastEventId = newEvents[index++].EventID;
const newEventToCheck = await api<DriveEventsResult>(queryEvents(await shareId, lastEventId));
if (!newEventToCheck || !newEventToCheck.EventID) {
throw new Error('No event found');
}
keepSyncing = Boolean(newEventToCheck.More);
if (newEventToCheck.EventID !== lastEventId) {
newEvents.push(newEventToCheck);
}
}
const resolvedShareId = await shareId;
const shouldRefresh = newEvents.some((event) => {
return hasBit(event.Refresh, 1);
});
let eventsToStore: EventsObject = {};
eventsToStore[await shareId] = newEvents[newEvents.length - 1].EventID;
return {
newEvents: await Promise.all(
newEvents
// Encrypted seach can search only in my files through
// events per share which do not include ContextShareID.
.map((event) => ({
...event,
Events: event.Events.map((item) => ({
...item,
ContextShareID: resolvedShareId,
})),
}))
.map((event) => driveEventsResultToDriveEvents(event))
.map((events) => convertDriveEventsToSearchEvents(resolvedShareId, events, getLinkPrivateKey))
),
shouldRefresh,
eventsToStore,
};
};
return {
getItemInfo,
queryItemsMetadata,
searchKeywords,
getTotalItems: (() => {
let total: number;
return async () => {
if (!total) {
// The Total property counts all files and folders, including the root
// folder which is neither indexed nor shown to users. For ES purposes
// it should not be counted toward the total, therefore the -1
total = (await fetchShareMap({ shareId: await shareId })).Total - 1;
}
return total;
};
})(),
getKeywords: (esSearchParams: ESDriveSearchParams) => esSearchParams.normalisedKeywords,
getSearchParams,
getPreviousEventID,
getEventFromIDB,
};
};
| 3,126
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/useSearchEnabledFeature.tsx
|
import { isMobile, isSafari } from '@proton/shared/lib/helpers/browser';
export default function useSearchEnabledFeature() {
// Safari has several issues.
// One: it is throttling a lot. First tens of items are done fast but
// after ~ 500 items it goes very slowly and after ~ 2500 items it
// basically stops without any progress.
// Second: in some cases even if indexing finishes, sometimes search
// doesnt work. Probably index is not created correctly. Its just few
// reported cases and we haven't found the issue yet.
// Because of that, its better to not allow search on Safari at all
// until we find some way around it.
return !isSafari() && !isMobile();
}
| 3,127
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/useSearchLibrary.tsx
|
import { ReactNode, createContext, useContext, useEffect, useState } from 'react';
import { c } from 'ttag';
import { useApi, useGetUserKeys, useNotifications, useUser } from '@proton/components';
import {
INDEXING_STATUS,
checkVersionedESDB,
metadataIndexingProgress,
useEncryptedSearch,
} from '@proton/encrypted-search';
import { EVENT_TYPES } from '@proton/shared/lib/drive/constants';
import { isPaid } from '@proton/shared/lib/user/helpers';
import { useDriveEventManager } from '../_events';
import { useLink } from '../_links';
import { useDefaultShare, useShare } from '../_shares';
import convertDriveEventsToSearchEvents from './indexing/processEvent';
import useFetchShareMap from './indexing/useFetchShareMap';
import { migrate } from './migration';
import { ESDriveSearchParams, ESLink, EncryptedSearchFunctionsDrive } from './types';
import { useESCallbacks } from './useESCallbacks';
import useSearchEnabledFeature from './useSearchEnabledFeature';
const SearchLibraryContext = createContext<EncryptedSearchFunctionsDrive | null>(null);
interface Props {
children?: ReactNode;
}
export const SearchLibraryProvider = ({ children }: Props) => {
const fetchShareMap = useFetchShareMap();
const api = useApi();
const { createNotification } = useNotifications();
const [user] = useUser();
const getUserKeys = useGetUserKeys();
const { getLinkPrivateKey } = useLink();
const { getSharePrivateKey } = useShare();
const { getDefaultShare } = useDefaultShare();
const searchEnabled = useSearchEnabledFeature();
const [isInitialized, setIsInitialize] = useState(false);
const driveEventManager = useDriveEventManager();
const defaultShareIdPromise = getDefaultShare().then(({ shareId }) => shareId);
const esCallbacks = useESCallbacks({
api,
user,
fetchShareMap,
shareId: defaultShareIdPromise,
getSharePrivateKey,
getLinkPrivateKey,
});
const handleMetadataIndexed = () => {
createNotification({
type: 'success',
text: c('Notification').t`Encrypted search activated`,
});
};
const esFunctions = useEncryptedSearch<ESLink, ESDriveSearchParams>({
refreshMask: 1,
esCallbacks,
onMetadataIndexed: handleMetadataIndexed,
sendMetricsOnSearch: true,
});
const initializeESDrive = async () => {
// Migrate old IDBs
const success = await migrate(user.ID, getUserKeys, defaultShareIdPromise);
if (!success) {
return esFunctions.esDelete();
}
// In case of a downgrade from paid to free, remove everything
if ((await checkVersionedESDB(user.ID)) && !isPaid(user)) {
return esFunctions.esDelete();
}
// In case an interrupted indexing process is found, we remove anything ES
// has built so far since drive needs to finish indexing in one go
const progress = await metadataIndexingProgress.read(user.ID);
if (!!progress && progress.status !== INDEXING_STATUS.ACTIVE) {
await esFunctions.esDelete();
}
await esFunctions.initializeES();
setIsInitialize(true);
};
useEffect(() => {
// Feature flags come in asyncronously (false back to `false` initially),
// thus we need to observe their changes
if (searchEnabled && !isInitialized) {
void initializeESDrive();
}
}, [searchEnabled, isInitialized]);
useEffect(() => {
if (!esFunctions.esStatus.dbExists) {
return;
}
const callbackId = driveEventManager.eventHandlers.register(async (volumeId, events) => {
// The store is updated via volume events which includes all shares
// including my files or devices. Encrypted search works only for
// my files and thus we need to filter for events affecting only
// the default share. In case of delete operation, share ID is not
// known and thus we do a hack and try to guess it is for my files
// share. There might be a minor problem but before the risk gets
// big we should be switched to volume-centric cache and not deal
// with this issue.
const defaultShareId = await defaultShareIdPromise;
const defaultShareEvents = {
...events,
events: events.events
.map(
// Move from one share to another is just simple meta
// data update in volume context, but it is delete in
// share context.
(event) =>
!event.originShareId || event.encryptedLink.rootShareId === event.originShareId
? event
: {
...event,
eventType: EVENT_TYPES.DELETE,
encryptedLink: {
...event.encryptedLink,
rootShareId: event.originShareId,
},
}
)
.filter(
(event) =>
event.eventType === EVENT_TYPES.DELETE || event.encryptedLink.rootShareId === defaultShareId
),
};
const searchEvents = await convertDriveEventsToSearchEvents(
defaultShareId,
defaultShareEvents,
getLinkPrivateKey
);
await esFunctions.handleEvent(searchEvents);
});
return () => {
driveEventManager.eventHandlers.unregister(callbackId);
};
}, [esFunctions.handleEvent]);
return <SearchLibraryContext.Provider value={esFunctions}>{children}</SearchLibraryContext.Provider>;
};
export default function useSearchLibrary() {
const state = useContext(SearchLibraryContext);
if (!state) {
throw new Error('Trying to use uninitialized SearchLibraryProvider');
}
return state;
}
| 3,128
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/useSearchResults.tsx
|
import { createContext, useContext, useState } from 'react';
import { ESLink } from './types';
import useSearchLibrary from './useSearchLibrary';
function useSearchResultsProvider() {
const { encryptedSearch, esStatus } = useSearchLibrary();
const { dbExists } = esStatus;
const [query, setQuery] = useState<string>('');
const [isSearching, setIsSearching] = useState(false);
const [results, setResults] = useState<ESLink[]>([]);
const searchStarted = (query: string) => {
setQuery(query);
setIsSearching(true);
};
const runSearch = async (query: string) => {
searchStarted(query);
await encryptedSearch((results: ESLink[]) => {
setResults(results);
}).finally(() => {
setIsSearching(false);
});
};
return {
runSearch,
dbExists,
query,
isSearching,
results,
};
}
const SearchResultsContext = createContext<ReturnType<typeof useSearchResultsProvider> | null>(null);
export function SearchResultsProvider({ children }: { children: React.ReactNode }) {
const providerState = useSearchResultsProvider();
return <SearchResultsContext.Provider value={providerState}>{children}</SearchResultsContext.Provider>;
}
export default function useSearchResults() {
const state = useContext(SearchResultsContext);
if (!state) {
throw new Error('Trying to use uninitialized SearchResultsProvider');
}
return state;
}
| 3,129
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/utils.ts
|
import { Location } from 'history';
import { CryptoProxy } from '@proton/crypto';
import { ShareMapLink } from '@proton/shared/lib/interfaces/drive/link';
import { ESLink } from './types';
export const createItemId = (shareId: string, linkId: string) => {
return `${shareId}:${linkId}`;
};
export const parseItemId = (esItemId: string) => {
const [shareId, linkId] = esItemId.split(':');
return { shareId, linkId };
};
export const generateOrder = async (ID: string) => {
const numericalID = ID.split('').map((char) => char.charCodeAt(0));
const digest = await CryptoProxy.computeHash({ algorithm: 'unsafeMD5', data: Uint8Array.from(numericalID) });
const orderArray = new Uint32Array(digest.buffer);
return orderArray[0];
};
export const convertLinkToESItem = async (link: ShareMapLink, shareId: string): Promise<ESLink> => {
const id = createItemId(shareId, link.LinkID);
const order = await generateOrder(id);
const processedLink = {
id,
createTime: link.CreateTime,
decryptedName: link.Name,
linkId: link.LinkID,
MIMEType: link.MIMEType,
modifiedTime: link.ModifyTime,
parentLinkId: link.ParentLinkID,
shareId,
size: link.Size,
order,
};
return processedLink;
};
export const getDefaultSessionValue = () => ({
lastIndex: 0,
sessionName: 'test',
isDone: false,
total: 200000,
});
/**
* Transforms url hash into an object
* @param urlHash Example: `#q=query&sort=acs`
*/
export const parseHashParams = (urlHash: string) => {
const result: Record<string, string> = {};
return urlHash
.slice(1)
.split('&')
.reduce(function (res, item) {
const [key, value] = item.split('=');
res[key] = value;
return res;
}, result);
};
export const extractSearchParameters = (location: Location): string => {
const hashParams = parseHashParams(location.hash);
const { q } = hashParams;
return q ? decodeURIComponent(q) : '';
};
| 3,130
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/indexing/LinkDecryptionBuffer.ts
|
import { PrivateKeyReference } from '@proton/crypto';
import { HARDWARE_CONCURRENCY } from '@proton/shared/lib/drive/constants';
import { wait } from '@proton/shared/lib/helpers/promise';
import { ShareMapLink } from '@proton/shared/lib/interfaces/drive/link';
import { runInQueueAbortable } from '../../../utils/parallelRunners';
import { ESLink } from '../types';
import { convertLinkToESItem } from '../utils';
import { KeyCache } from './useKeysCache';
export class LinkMapDecryptionBuffer {
keyCache;
decryptedLinks: ESLink[] = [];
decryptionQueue: ShareMapLink[][] = [];
isDone: boolean = false;
encryptedItems;
constructor(encryptedItemsGenerator: AsyncGenerator<ShareMapLink[]>, keyCache: KeyCache) {
this.encryptedItems = encryptedItemsGenerator;
this.keyCache = keyCache;
}
async decryptLink(linkMeta: ShareMapLink, shareId: string, privateKey: PrivateKeyReference) {
const { name: decryptedName } = await this.keyCache.decryptAndCacheLink(linkMeta, privateKey);
if (linkMeta.ParentLinkID && decryptedName) {
this.decryptedLinks.push(
await convertLinkToESItem(
{
...linkMeta,
Name: decryptedName,
},
shareId
)
);
}
}
async decryptLinkMetas(linkMetaPage: ShareMapLink[], shareId: string) {
const page = [...linkMetaPage];
while (page.length) {
const availableParentKeyIndex = page.findIndex((linkMeta) => {
return !this.keyCache.getCachedPrivateKey(linkMeta.ParentLinkID);
});
const spliceEndIndex = availableParentKeyIndex === -1 ? page.length : availableParentKeyIndex;
const readyToDecryptLinks = page.splice(0, spliceEndIndex);
if (readyToDecryptLinks.length === 0) {
console.error('ES: parentKeys are missing');
break;
}
await runInQueueAbortable(
readyToDecryptLinks.map((linkMeta) => () => {
const privateKey = this.keyCache.getCachedPrivateKey(linkMeta.ParentLinkID);
return this.decryptLink(linkMeta, shareId, privateKey!);
}),
HARDWARE_CONCURRENCY
);
}
}
async decrypt(shareId: string) {
for await (const linkMetaBatch of this.encryptedItems) {
await this.decryptLinkMetas(linkMetaBatch, shareId);
}
this.isDone = true;
}
async *iterateItems() {
while (!this.isDone || this.decryptedLinks.length > 0) {
const { length } = this.decryptedLinks;
// These numbers were picked more or less randomly.
// Feel free to change them if any tweak is needed.
if (this.isDone || length > 200) {
yield this.decryptedLinks.splice(0, length);
} else {
await wait(1000);
}
}
}
}
| 3,131
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/indexing/LinkMapLoader.ts
|
import { wait } from '@proton/shared/lib/helpers/promise';
import { ShareMapLink } from '@proton/shared/lib/interfaces/drive/link';
import { createAsyncQueue } from '../../../utils/parallelRunners';
import { PAGE_SIZE } from '../constants';
import { fetchItemsMetadataPage } from './fetchItemsMetadataPage';
import { FetchShareMap } from './useFetchShareMap';
const PARALLEL_FETCH_LIMIT = 5;
export class LinkMapLoader {
linkMetaRawByPage = new Map<number, ShareMapLink[]>();
isDone: boolean = false;
sessionName?: string;
queue;
fetchShareMapPage: FetchShareMap;
constructor({ fetchShareMapPage }: { fetchShareMapPage: FetchShareMap }) {
this.fetchShareMapPage = fetchShareMapPage;
this.queue = createAsyncQueue(PARALLEL_FETCH_LIMIT);
}
async fetchAndCacheLinkPage(shareId: string, sessionName?: string, page?: number) {
const { links, session } = await fetchItemsMetadataPage(shareId, this.fetchShareMapPage, sessionName, page);
this.linkMetaRawByPage.set(page || 0, links);
this.isDone = this.isDone || session.isDone;
this.sessionName = session.sessionName;
return {
links,
session,
};
}
async fetchShareMap(shareId: string) {
// Fetch first page separately to retrieve meta info of links map
const { session } = await this.fetchAndCacheLinkPage(shareId);
if (this.isDone) {
return;
}
const pageCount = Math.ceil(session.total / PAGE_SIZE);
for (let page = 1; page < pageCount; page++) {
this.queue.addToQueue(() => this.fetchAndCacheLinkPage(shareId, this.sessionName, page));
}
}
async *iterateItems() {
let pageNumber = 0;
while (!this.isDone || this.linkMetaRawByPage.size > 0) {
const links = this.linkMetaRawByPage.get(pageNumber);
if (links) {
this.linkMetaRawByPage.delete(pageNumber);
pageNumber += 1;
yield links;
} else {
await wait(500);
}
}
}
}
| 3,132
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/indexing/createLinkGenerator.ts
|
import { PrivateKeyReference } from '@proton/crypto';
import { LinkMapDecryptionBuffer } from './LinkDecryptionBuffer';
import { LinkMapLoader } from './LinkMapLoader';
import { FetchShareMap } from './useFetchShareMap';
import { createKeysCache } from './useKeysCache';
export const createLinkGenerator = (
shareId: string,
rootLinkKeys: PrivateKeyReference,
callbacks: {
fetchShareMap: FetchShareMap;
}
) => {
const shareMapLoader = new LinkMapLoader({ fetchShareMapPage: callbacks.fetchShareMap });
shareMapLoader.fetchShareMap(shareId).catch(console.warn);
const shareMapGenerator = shareMapLoader.iterateItems();
const linkMapBuffer = new LinkMapDecryptionBuffer(shareMapGenerator, createKeysCache(rootLinkKeys));
linkMapBuffer.decrypt(shareId).catch(console.warn);
const decryptedLinkMetaGenerator = linkMapBuffer.iterateItems();
return decryptedLinkMetaGenerator;
};
| 3,133
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/indexing/fetchItemsMetadataPage.ts
|
import { getApiError } from '@proton/shared/lib/api/helpers/apiErrorHelper';
import { ShareMapLink } from '@proton/shared/lib/interfaces/drive/link';
import retryOnError from '../../../utils/retryOnError';
import { PAGE_SIZE, SESSION_EXPIRED_ERROR_CODE } from '../constants';
import { Session } from '../types';
import { getDefaultSessionValue } from '../utils';
import { FetchShareMap } from './useFetchShareMap';
export const fetchItemsMetadataPage = async (
shareId: string,
fetchShareMap: FetchShareMap,
sessionName?: Session['sessionName'],
page?: number
): Promise<{
links: ShareMapLink[];
session: Session;
}> => {
return retryOnError<{
links: ShareMapLink[];
session: Session;
}>({
fn: async (sessionName: Session['sessionName'], page?: number) => {
const lastIndex = page === undefined ? undefined : page * PAGE_SIZE - 1;
const { Links, SessionName, More, Total } = await fetchShareMap({
shareId,
lastIndex,
sessionName,
pageSize: PAGE_SIZE,
});
return {
links: Links,
session: {
sessionName: SessionName,
isDone: More === 0,
total: Total,
},
};
},
shouldRetryBasedOnError: (error) => {
const apiError = getApiError(error);
if (apiError.code === SESSION_EXPIRED_ERROR_CODE) {
return true;
}
console.warn(error);
return false;
},
beforeRetryCallback: async () => {
return [getDefaultSessionValue()];
},
maxRetriesNumber: 2,
})(sessionName, page);
};
| 3,134
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/indexing/processEvent.ts
|
import { PrivateKeyReference } from '@proton/crypto';
import { ESEvent, ESItemEvent, ES_SYNC_ACTIONS, EventsObject } from '@proton/encrypted-search';
import { EVENT_TYPES } from '@proton/shared/lib/drive/constants';
import { decryptUnsigned } from '@proton/shared/lib/keys/driveKeys';
import { DriveEvent, DriveEvents } from '../../_events';
import { ESLink } from '../types';
import { createItemId, generateOrder } from '../utils';
export type SearchEventItem = ESItemEvent<ESLink>;
export type SearchEvent = ESEvent<ESLink>;
/**
* Formats regular events into a ESEvent batch that will be processed by ES lib
*/
export default async function convertDriveEventsToSearchEvents(
shareId: string,
events: DriveEvents,
getLinkPrivateKey: (abortSignal: AbortSignal, shareId: string, linkId: string) => Promise<PrivateKeyReference>
): Promise<SearchEvent> {
let eventsToStore: EventsObject = {};
eventsToStore[shareId] = events.eventId;
return {
eventsToStore,
Refresh: events.refresh ? 1 : 0,
attemptReDecryption: false,
Items: await Promise.all(
events.events.map((event) => convertDriveEventToSearchEvent(shareId, event, getLinkPrivateKey))
),
};
}
const convertEventTypesToSearchEventAction = Object.fromEntries([
[EVENT_TYPES.CREATE, ES_SYNC_ACTIONS.CREATE],
[EVENT_TYPES.UPDATE, ES_SYNC_ACTIONS.UPDATE_CONTENT],
[EVENT_TYPES.UPDATE_METADATA, ES_SYNC_ACTIONS.UPDATE_METADATA],
[EVENT_TYPES.DELETE, ES_SYNC_ACTIONS.DELETE],
]);
async function convertDriveEventToSearchEvent(
shareId: string,
event: DriveEvent,
getLinkPrivateKey: (abortSignal: AbortSignal, shareId: string, linkId: string) => Promise<PrivateKeyReference>
): Promise<SearchEventItem> {
const result: SearchEventItem = {
ID: createItemId(shareId, event.encryptedLink.linkId),
Action: convertEventTypesToSearchEventAction[event.eventType],
ItemMetadata: undefined,
};
// There's no link meta sent from BE in case of delete event
if (event.eventType === EVENT_TYPES.DELETE) {
return result;
}
const parentPrivateKey = await getLinkPrivateKey(
new AbortController().signal,
shareId,
event.encryptedLink.parentLinkId
);
result.ItemMetadata = await decryptAndGenerateSearchEvent(shareId, event, parentPrivateKey);
return result;
}
async function decryptAndGenerateSearchEvent(shareId: string, event: DriveEvent, privateKey: PrivateKeyReference) {
const link = event.encryptedLink;
const name = await decryptUnsigned({ armoredMessage: link.name, privateKey });
const id = createItemId(shareId, link.linkId);
return {
decryptedName: name,
MIMEType: link.mimeType,
createTime: link.createTime,
id,
linkId: link.linkId,
parentLinkId: link.parentLinkId,
size: link.size,
modifiedTime: link.metaDataModifyTime,
shareId,
order: await generateOrder(id),
};
}
| 3,135
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/indexing/useFetchShareMap.ts
|
import { useCallback } from 'react';
import { queryShareMap } from '@proton/shared/lib/api/drive/link';
import { ShareMapPayload } from '@proton/shared/lib/interfaces/drive/link';
import { useDebouncedRequest } from '../../_api';
interface ShareMapParams {
shareId: string;
lastIndex?: number;
sessionName?: string;
pageSize?: number;
}
export type FetchShareMap = (params: ShareMapParams, signal?: AbortSignal) => Promise<ShareMapPayload>;
export default function useFetchShareMap() {
const debouncedRequest = useDebouncedRequest();
return useCallback<FetchShareMap>(({ shareId, lastIndex, sessionName, pageSize }, signal) => {
return debouncedRequest<ShareMapPayload>(queryShareMap(shareId, lastIndex, sessionName, pageSize), signal);
}, []);
}
| 3,136
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/indexing/useKeysCache.test.ts
|
import { CryptoApiInterface, CryptoProxy, PrivateKeyReference } from '@proton/crypto';
import { LinkType } from '@proton/shared/lib/interfaces/drive/link';
import { KeyCache, createKeysCache } from './useKeysCache';
const linkMock = {
CreateTime: 123456,
Hash: '',
Index: 0,
LinkID: 'link-mock-id',
MIMEType: '',
ModifyTime: 1234,
Name: '',
ParentLinkID: null,
Size: 123,
State: 0,
Type: LinkType.FOLDER,
};
const DECRYPTED_NAME = 'a smell of petroleum prevails throughout';
const PRIVATE_KEY = 'private-key';
const mockedCryptoApi = {
importPrivateKey: jest.fn().mockImplementation(() => PRIVATE_KEY),
} as any as CryptoApiInterface;
jest.mock('@proton/shared/lib/keys/driveKeys', () => ({
decryptUnsigned: jest.fn().mockImplementation(() => DECRYPTED_NAME),
}));
jest.mock('@proton/shared/lib/keys/drivePassphrase', () => ({
decryptPassphrase: jest.fn().mockImplementation(() => ({
decryptedPassphrase: '',
})),
}));
describe('useKeysCache', () => {
let keyCache: KeyCache;
beforeAll(() => {
CryptoProxy.setEndpoint(mockedCryptoApi);
});
afterAll(async () => {
await CryptoProxy.releaseEndpoint();
});
beforeEach(() => {
keyCache = createKeysCache('key' as unknown as PrivateKeyReference);
});
it('caches decrypted links', async () => {
const { name } = await keyCache.decryptAndCacheLink(linkMock, {} as unknown as PrivateKeyReference);
expect(name).toEqual(DECRYPTED_NAME);
const key = keyCache.getCachedPrivateKey(linkMock.LinkID);
expect(key).toEqual(PRIVATE_KEY);
});
it("returns undefined when unknown link's keys are requested", async () => {
const result = keyCache.getCachedPrivateKey('new-link-id');
expect(result).toBe(undefined);
});
});
| 3,137
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_search/indexing/useKeysCache.ts
|
import { CryptoProxy, PrivateKeyReference } from '@proton/crypto';
import { LinkType, ShareMapLink } from '@proton/shared/lib/interfaces/drive/link';
import { decryptUnsigned } from '@proton/shared/lib/keys/driveKeys';
import { decryptPassphrase } from '@proton/shared/lib/keys/drivePassphrase';
export type DecryptAndCacheLink = (
linkMeta: ShareMapLink,
parentPrivateKey: PrivateKeyReference
) => Promise<{ name: string }>;
export type GetCachedParentPrivateKey = (linkId: string | null) => PrivateKeyReference | undefined;
export const LINK_KEYS_NOT_FOUND_MESSAGE = "ES Indexing: parent link key wasn't not found.";
export interface KeyCache {
getCachedPrivateKey: GetCachedParentPrivateKey;
decryptAndCacheLink: DecryptAndCacheLink;
}
export const createKeysCache = (rootKey: PrivateKeyReference): KeyCache => {
const keyCache = new Map<string | null, PrivateKeyReference>();
keyCache.set(null, rootKey);
const getCachedPrivateKey: GetCachedParentPrivateKey = (linkId) => {
return keyCache.get(linkId);
};
// XXX: move to a worker some time in the future
const decryptAndCacheLink: DecryptAndCacheLink = async (linkMeta, parentPrivateKey) => {
/*
* If link is a folder, we need to decrypt its NodeKey in order to be able
* to decrypt its children names later on
*/
if (linkMeta.Type === LinkType.FOLDER) {
const { decryptedPassphrase } = await decryptPassphrase({
armoredPassphrase: linkMeta.NodePassphrase!,
armoredSignature: linkMeta.NodePassphraseSignature!,
privateKeys: [parentPrivateKey],
publicKeys: [],
validateSignature: false,
});
const linkPrivateKey = await CryptoProxy.importPrivateKey({
armoredKey: linkMeta.NodeKey!,
passphrase: decryptedPassphrase,
});
keyCache.set(linkMeta.LinkID, linkPrivateKey);
}
const name = await decryptUnsigned({ armoredMessage: linkMeta.Name, privateKey: parentPrivateKey });
return { name };
};
return {
decryptAndCacheLink,
getCachedPrivateKey,
};
};
| 3,138
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_settings/index.tsx
|
export { UserSettingsProvider } from './useUserSettings';
export { default as useUserSettings } from './useUserSettings';
| 3,139
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_settings/sorting.test.ts
|
import { SORT_DIRECTION } from '@proton/shared/lib/constants';
import { SortField } from '../_views/utils/useSorting';
import * as sorting from './sorting';
describe('sorting', () => {
it('should return SortSetting identifier given SortParams', () => {
expect(sorting.getSetting({ sortField: SortField.name, sortOrder: SORT_DIRECTION.DESC })).toBe(-1);
expect(sorting.getSetting({ sortField: SortField.size, sortOrder: SORT_DIRECTION.ASC })).toBe(2);
expect(sorting.getSetting({ sortField: SortField.fileModifyTime, sortOrder: SORT_DIRECTION.ASC })).toBe(4);
});
it('should return SortParams given SortSetting identifier', () => {
expect(sorting.parseSetting(1)).toEqual({
sortField: 'name',
sortOrder: SORT_DIRECTION.ASC,
});
expect(sorting.parseSetting(-2)).toEqual({
sortField: 'size',
sortOrder: SORT_DIRECTION.DESC,
});
expect(sorting.parseSetting(-4)).toEqual({
sortField: 'fileModifyTime',
sortOrder: SORT_DIRECTION.DESC,
});
});
});
| 3,140
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_settings/sorting.ts
|
import { SORT_DIRECTION } from '@proton/shared/lib/constants';
import { SortSetting } from '@proton/shared/lib/interfaces/drive/userSettings';
import { SortField } from '../_views/utils/useSorting';
export interface UserSortParams {
sortField: SortField.fileModifyTime | SortField.name | SortField.size;
sortOrder: SORT_DIRECTION;
}
export const settingsToSortParams: { [key in SortSetting]: UserSortParams } = {
[SortSetting.ModifiedAsc]: { sortField: SortField.fileModifyTime, sortOrder: SORT_DIRECTION.ASC },
[SortSetting.ModifiedDesc]: { sortField: SortField.fileModifyTime, sortOrder: SORT_DIRECTION.DESC },
[SortSetting.NameAsc]: { sortField: SortField.name, sortOrder: SORT_DIRECTION.ASC },
[SortSetting.NameDesc]: { sortField: SortField.name, sortOrder: SORT_DIRECTION.DESC },
[SortSetting.SizeAsc]: { sortField: SortField.size, sortOrder: SORT_DIRECTION.ASC },
[SortSetting.SizeDesc]: { sortField: SortField.size, sortOrder: SORT_DIRECTION.DESC },
};
const DEFAULT_SORT_SETTING = settingsToSortParams[SortSetting.ModifiedDesc];
export const parseSetting = (sortSetting: SortSetting): UserSortParams => {
return settingsToSortParams[sortSetting] || DEFAULT_SORT_SETTING;
};
export const getSetting = ({ sortField, sortOrder }: UserSortParams): SortSetting | undefined => {
const value = Object.entries(settingsToSortParams).find(([, sortParams]) => {
return sortParams.sortField === sortField && sortParams.sortOrder === sortOrder;
});
return value ? Number(value[0]) : undefined;
};
| 3,141
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_settings/useUserSettings.tsx
|
import { createContext, useCallback, useContext, useMemo, useState } from 'react';
import { useApi, useGetUser } from '@proton/components';
import { queryUpdateUserSettings, queryUserSettings } from '@proton/shared/lib/api/drive/userSettings';
import { DEFAULT_USER_SETTINGS } from '@proton/shared/lib/drive/constants';
import { LayoutSetting, UserSettings, UserSettingsResponse } from '@proton/shared/lib/interfaces/drive/userSettings';
import { UserSortParams, getSetting, parseSetting } from './sorting';
const useUserSettingsProvider = () => {
const api = useApi();
const getUser = useGetUser();
const [userSettings, setUserSettings] = useState<UserSettings>(DEFAULT_USER_SETTINGS);
const loadUserSettings = async () => {
const [{ UserSettings, Defaults }, { hasPaidDrive }] = await Promise.all([
api<UserSettingsResponse>(queryUserSettings()),
getUser(),
]);
const userSettingsWithDefaults = Object.entries(UserSettings).reduce((settings, [key, value]) => {
// In case of user downgrade from paid to free, we want to set the default free user value
if (key === 'RevisionRetentionDays' && !hasPaidDrive) {
return {
...settings,
RevisionRetentionDays: Defaults.RevisionRetentionDays,
};
}
return {
...settings,
[key]:
value ??
(Defaults[key as keyof UserSettingsResponse['Defaults']] ||
DEFAULT_USER_SETTINGS[key as keyof UserSettingsResponse['UserSettings']]),
};
}, {} as UserSettings);
setUserSettings(userSettingsWithDefaults);
};
const sort = useMemo(() => parseSetting(userSettings.Sort), [userSettings.Sort]);
const changeSort = useCallback(async (sortParams: UserSortParams) => {
const sortSetting = getSetting(sortParams);
if (!sortSetting) {
return;
}
setUserSettings((settings) => ({ ...settings, Sort: sortSetting }));
await api(
queryUpdateUserSettings({
Sort: sortSetting,
})
);
}, []);
const changeLayout = useCallback(async (Layout: LayoutSetting) => {
setUserSettings((settings) => ({ ...settings, Layout }));
await api(
queryUpdateUserSettings({
Layout,
})
);
}, []);
return {
sort,
layout: userSettings.Layout,
revisionRetentionDays: userSettings.RevisionRetentionDays,
loadUserSettings,
changeSort,
changeLayout,
};
};
const UserSettingsContext = createContext<ReturnType<typeof useUserSettingsProvider> | null>(null);
export function UserSettingsProvider({ children }: { children: React.ReactNode }) {
const value = useUserSettingsProvider();
return <UserSettingsContext.Provider value={value}>{children}</UserSettingsContext.Provider>;
}
export default function useUserSettings() {
const state = useContext(UserSettingsContext);
if (!state) {
throw new Error('Trying to use uninitialized UserSettingsProvider');
}
return state;
}
| 3,142
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/index.tsx
|
import { SharesKeysProvider } from './useSharesKeys';
import { SharesStateProvider } from './useSharesState';
export * from './interface';
export * from './shareUrl';
export { default as useDefaultShare } from './useDefaultShare';
export { default as usePublicShare } from './usePublicShare';
export { default as useShare } from './useShare';
export { default as useShareActions } from './useShareActions';
export { default as useShareUrl } from './useShareUrl';
export { default as useVolume } from './useVolume';
export { default as useLockedVolume } from './useLockedVolume';
export function SharesProvider({ children }: { children: React.ReactNode }) {
return (
<SharesStateProvider>
<SharesKeysProvider>{children}</SharesKeysProvider>
</SharesStateProvider>
);
}
| 3,143
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/interface.ts
|
import { SessionKey } from '@proton/crypto';
type WithSRPPayload<T extends any> = T & {
srpModulusID: string;
srpVerifier: string;
urlPasswordSalt: string;
};
export enum ShareType {
default = 1,
standard,
device,
photos,
}
export enum ShareState {
active = 1,
deleted,
restored,
}
export interface Share {
shareId: string;
rootLinkId: string;
volumeId: string;
creator: string;
isLocked: boolean;
isDefault: boolean;
isVolumeSoftDeleted: boolean;
possibleKeyPackets: string[];
type: ShareType;
state: ShareState;
}
export interface ShareWithKey extends Share {
key: string;
passphrase: string;
passphraseSignature: string;
addressId: string;
rootLinkRecoveryPassphrase?: string;
}
export type ShareURL = WithSRPPayload<{
shareId: string;
shareUrlId: string;
expirationTime: number | null;
creatorEmail: string;
password: string;
flags: number;
token: string;
publicUrl: string;
sharePassphraseKeyPacket: string;
sharePasswordSalt: string;
hasCustomPassword: boolean;
hasGeneratedPasswordIncluded: boolean;
numAccesses: number;
maxAccesses: number;
permissions: number;
}>;
export type UpdateSharedURL = WithSRPPayload<{
expirationDuration: number | null;
expirationTime: number | null;
flags: number;
maxAccesses: number;
password: string;
permissions: number;
sharePassphraseKeyPacket: string;
sharePasswordSalt: string;
}>;
export interface LockedVolumeForRestore {
lockedVolumeId: string;
defaultShare: LockedShareForRestore;
devices: LockedDeviceForRestore[];
photos: LockedDeviceForRestore[];
}
export interface LockedShareForRestore {
shareId: string;
linkDecryptedPassphrase: string;
}
export interface LockedDeviceForRestore extends LockedShareForRestore {
shareDecryptedPassphrase: string;
shareSessionKey: SessionKey;
}
export interface LockedPhotosForRestore extends LockedShareForRestore {
shareDecryptedPassphrase: string;
shareSessionKey: SessionKey;
}
| 3,144
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/shareUrl.test.ts
|
import { SharedURLFlags } from '@proton/shared/lib/interfaces/drive/sharing';
import { hasCustomPassword, hasGeneratedPasswordIncluded, splitGeneratedAndCustomPassword } from './shareUrl';
describe('Password flags checks', () => {
describe('Missing data check', () => {
it('returns false if flags are undefined', () => {
expect(hasCustomPassword({})).toEqual(false);
expect(hasGeneratedPasswordIncluded({})).toEqual(false);
});
it('returns false if SharedURLInfo is abscent', () => {
expect(hasCustomPassword()).toEqual(false);
expect(hasGeneratedPasswordIncluded()).toEqual(false);
});
});
describe('hasCustomPassword', () => {
it('returns true is CustomPassword flag is present', () => {
expect(hasCustomPassword({ flags: 0 | SharedURLFlags.CustomPassword })).toEqual(true);
expect(
hasCustomPassword({ flags: SharedURLFlags.GeneratedPasswordIncluded | SharedURLFlags.CustomPassword })
).toEqual(true);
expect(hasCustomPassword({ flags: 0 })).toEqual(false);
});
});
describe('hasGeneratedPasswordIncluded', () => {
it('returns true is CustomPassword flag is present', () => {
expect(hasGeneratedPasswordIncluded({ flags: 0 | SharedURLFlags.GeneratedPasswordIncluded })).toEqual(true);
expect(
hasGeneratedPasswordIncluded({
flags: SharedURLFlags.GeneratedPasswordIncluded | SharedURLFlags.CustomPassword,
})
).toEqual(true);
expect(hasGeneratedPasswordIncluded({ flags: 0 })).toEqual(false);
});
});
});
describe('splitGeneratedAndCustomPassword', () => {
it('no custom password returns only generated password', () => {
expect(splitGeneratedAndCustomPassword('1234567890ab', { flags: 0 })).toEqual(['1234567890ab', '']);
});
it('legacy custom password returns only custom password', () => {
expect(splitGeneratedAndCustomPassword('abc', { flags: SharedURLFlags.CustomPassword })).toEqual(['', 'abc']);
});
it('new custom password returns both generated and custom password', () => {
expect(
splitGeneratedAndCustomPassword('1234567890ababc', {
flags: SharedURLFlags.CustomPassword | SharedURLFlags.GeneratedPasswordIncluded,
})
).toEqual(['1234567890ab', 'abc']);
});
});
| 3,145
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/shareUrl.ts
|
import { SHARE_GENERATED_PASSWORD_LENGTH } from '@proton/shared/lib/drive/constants';
import { hasBit } from '@proton/shared/lib/helpers/bitset';
import { SharedURLFlags } from '@proton/shared/lib/interfaces/drive/sharing';
export const hasCustomPassword = (sharedURL?: { flags?: number }): boolean => {
return !!sharedURL && hasBit(sharedURL.flags, SharedURLFlags.CustomPassword);
};
export const hasGeneratedPasswordIncluded = (sharedURL?: { flags?: number }): boolean => {
return !!sharedURL && hasBit(sharedURL.flags, SharedURLFlags.GeneratedPasswordIncluded);
};
export const splitGeneratedAndCustomPassword = (password: string, sharedURL?: { flags?: number }): [string, string] => {
if (hasCustomPassword(sharedURL)) {
if (hasGeneratedPasswordIncluded(sharedURL)) {
return [
password.substring(0, SHARE_GENERATED_PASSWORD_LENGTH),
password.substring(SHARE_GENERATED_PASSWORD_LENGTH),
];
}
// This is legacy custom password mode; new shares should not create it.
return ['', password];
}
return [password, ''];
};
export const getSharedLink = (sharedURL?: {
token: string;
publicUrl: string;
password: string;
flags?: number;
}): string | undefined => {
if (!sharedURL) {
return undefined;
}
const [generatedPassword] = splitGeneratedAndCustomPassword(sharedURL.password, sharedURL);
const url = sharedURL.publicUrl ? sharedURL.publicUrl : `${window.location.origin}/urls/${sharedURL.token}`;
return `${url}${generatedPassword !== '' ? `#${generatedPassword}` : ''}`;
};
| 3,146
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useCreateDevice.ts
|
import { queryCreateDriveDevice } from '@proton/shared/lib/api/drive/devices';
import { CreatedDriveVolumeResult } from '@proton/shared/lib/interfaces/drive/volume';
import { generateDriveBootstrap, generateNodeHashKey } from '@proton/shared/lib/keys/driveKeys';
import { useDebouncedRequest } from '../_api';
import { useDriveCrypto } from '../_crypto';
import useDefaultShare from './useDefaultShare';
export function useCreateDevice() {
const debouncedRequest = useDebouncedRequest();
const { getOwnAddressAndPrimaryKeys } = useDriveCrypto();
const { getDefaultShare } = useDefaultShare();
const createDevice = async (): Promise<{ volumeId: string; shareId: string; linkId: string }> => {
const defaultShare = await getDefaultShare();
const { address, privateKey } = await getOwnAddressAndPrimaryKeys(defaultShare.creator);
const { bootstrap, folderPrivateKey } = await generateDriveBootstrap(privateKey);
const { NodeHashKey: FolderHashKey } = await generateNodeHashKey(folderPrivateKey, folderPrivateKey);
const { Volume } = await debouncedRequest<CreatedDriveVolumeResult>(
queryCreateDriveDevice({
Device: {
VolumeID: defaultShare.volumeId,
SyncState: 1,
Type: 1,
},
Share: {
Name: 'My device',
AddressID: address.ID,
Key: bootstrap.ShareKey,
Passphrase: bootstrap.SharePassphrase,
PassphraseSignature: bootstrap.SharePassphraseSignature,
},
Link: {
Name: bootstrap.FolderName,
NodeHashKey: FolderHashKey,
NodePassphrase: bootstrap.FolderPassphrase,
NodePassphraseSignature: bootstrap.FolderPassphraseSignature,
NodeKey: bootstrap.FolderKey,
},
})
);
return {
volumeId: Volume.ID,
shareId: Volume.Share.ID,
linkId: Volume.Share.LinkID,
};
};
return {
createDevice,
};
}
| 3,147
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useCreatePhotos.ts
|
import { queryCreatePhotosShare } from '@proton/shared/lib/api/drive/share';
import { CreatedDriveVolumeResult } from '@proton/shared/lib/interfaces/drive/volume';
import { generateDriveBootstrap, generateNodeHashKey } from '@proton/shared/lib/keys/driveKeys';
import { useDebouncedRequest } from '../_api';
import { useDriveCrypto } from '../_crypto';
import useDefaultShare from './useDefaultShare';
export function useCreatePhotos() {
const debouncedRequest = useDebouncedRequest();
const { getOwnAddressAndPrimaryKeys } = useDriveCrypto();
const { getDefaultShare } = useDefaultShare();
const createPhotosShare = async () => {
const defaultShare = await getDefaultShare();
const { address, privateKey } = await getOwnAddressAndPrimaryKeys(defaultShare.creator);
const { bootstrap, folderPrivateKey } = await generateDriveBootstrap(privateKey);
const { NodeHashKey: FolderHashKey } = await generateNodeHashKey(folderPrivateKey, folderPrivateKey);
const { Volume } = await debouncedRequest<CreatedDriveVolumeResult>(
queryCreatePhotosShare(defaultShare.volumeId, {
Share: {
Name: 'Photos',
AddressID: address.ID,
Key: bootstrap.ShareKey,
Passphrase: bootstrap.SharePassphrase,
PassphraseSignature: bootstrap.SharePassphraseSignature,
},
Link: {
Name: bootstrap.FolderName,
NodeHashKey: FolderHashKey,
NodePassphrase: bootstrap.FolderPassphrase,
NodePassphraseSignature: bootstrap.FolderPassphraseSignature,
NodeKey: bootstrap.FolderKey,
},
})
);
return {
volumeId: Volume.ID,
shareId: Volume.Share.ID,
linkId: Volume.Share.LinkID,
};
};
return {
createPhotosShare,
};
}
| 3,148
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useDefaultShare.test.tsx
|
import { act, renderHook } from '@testing-library/react-hooks';
import { VolumesStateProvider } from '../_volumes/useVolumesState';
import useDefaultShare from './useDefaultShare';
const mockRequest = jest.fn();
const mockCreateVolume = jest.fn();
const mockGetDefaultShareId = jest.fn();
const mockGetShare = jest.fn();
const mockGetShareWithKey = jest.fn();
jest.mock('../_api/useDebouncedRequest', () => {
const useDebouncedRequest = () => {
return mockRequest;
};
return useDebouncedRequest;
});
jest.mock('../_utils/useDebouncedFunction', () => {
const useDebouncedFunction = () => {
return (wrapper: any) => wrapper();
};
return useDebouncedFunction;
});
jest.mock('./useSharesState', () => {
const useSharesState = () => {
return {
setShares: () => {},
getDefaultShareId: mockGetDefaultShareId,
};
};
return {
...jest.requireActual('./useSharesState'),
__esModule: true,
default: useSharesState,
};
});
jest.mock('../_shares/useShare', () => {
const useLink = () => {
return {
getShare: mockGetShare,
getShareWithKey: mockGetShareWithKey,
};
};
return useLink;
});
jest.mock('./useVolume', () => {
const useVolume = () => {
return {
createVolume: mockCreateVolume,
};
};
return useVolume;
});
describe('useDefaultShare', () => {
let hook: {
current: ReturnType<typeof useDefaultShare>;
};
const defaultShareId = Symbol('shareId');
const ac = new AbortController();
beforeEach(() => {
jest.resetAllMocks();
mockCreateVolume.mockImplementation(async () => {
return { shareId: defaultShareId };
});
mockRequest.mockImplementation(async () => {
return { Shares: [] };
});
const wrapper = ({ children }: { children: React.ReactNode }) => (
<VolumesStateProvider>{children}</VolumesStateProvider>
);
const { result } = renderHook(() => useDefaultShare(), { wrapper });
hook = result;
});
it('creates a volume if existing shares are locked/soft deleted', async () => {
mockGetDefaultShareId.mockImplementation(() => {
// no valid shares were found
return undefined;
});
await act(async () => {
await hook.current.getDefaultShare();
});
expect(mockCreateVolume.mock.calls.length).toBe(1);
expect(mockGetShareWithKey).toHaveBeenCalledWith(expect.anything(), defaultShareId);
});
it('creates a volume if no shares exist', async () => {
mockRequest.mockImplementation(async () => {
return { Shares: [] };
});
await act(async () => {
await hook.current.getDefaultShare();
});
expect(mockCreateVolume.mock.calls.length).toBe(1);
expect(mockGetShareWithKey).toHaveBeenCalledWith(expect.anything(), defaultShareId);
});
it("creates a volume if default share doesn't exist", async () => {
mockRequest.mockImplementation(async () => {
return {
Shares: [
{
isDefault: false,
},
],
};
});
await act(async () => {
await hook.current.getDefaultShare();
});
expect(mockCreateVolume.mock.calls.length).toBe(1);
expect(mockGetShareWithKey).toHaveBeenCalledWith(expect.anything(), defaultShareId);
});
it('says share is available by default', async () => {
mockGetShare.mockImplementation(async () => ({}));
await act(async () => {
const isAvailable = await hook.current.isShareAvailable(ac.signal, 'shareId');
expect(isAvailable).toBeTruthy();
});
});
it('says share is not available if locked', async () => {
mockGetShare.mockImplementation(async () => {
return {
isLocked: true,
};
});
await act(async () => {
const isAvailable = await hook.current.isShareAvailable(ac.signal, 'shareId');
expect(isAvailable).toBeFalsy();
});
});
it('says share is not available if soft deleted', async () => {
mockGetShare.mockImplementation(async () => {
return {
isVolumeSoftDeleted: true,
};
});
await act(async () => {
const isAvailable = await hook.current.isShareAvailable(ac.signal, 'shareId');
expect(isAvailable).toBeFalsy();
});
});
});
| 3,149
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useDefaultShare.ts
|
import { useCallback } from 'react';
import { queryUserShares } from '@proton/shared/lib/api/drive/share';
import { UserShareResult } from '@proton/shared/lib/interfaces/drive/share';
import { shareMetaShortToShare, useDebouncedRequest } from '../_api';
import { useDebouncedFunction } from '../_utils';
import { useVolumesState } from '../_volumes';
import { Share, ShareState, ShareWithKey } from './interface';
import useShare from './useShare';
import useSharesState, { findDefaultPhotosShareId, findDefaultShareId } from './useSharesState';
import useVolume from './useVolume';
/**
* useDefaultShare provides access to main default user's share.
*/
export default function useDefaultShare() {
const debouncedFunction = useDebouncedFunction();
const debouncedRequest = useDebouncedRequest();
const sharesState = useSharesState();
const { getShare, getShareWithKey } = useShare();
const { createVolume } = useVolume();
const volumesState = useVolumesState();
const loadUserShares = useCallback(async (): Promise<Share[]> => {
const { Shares } = await debouncedRequest<UserShareResult>(queryUserShares());
// We have to ignore the deleted shares until BE stop to return them
const shares = Shares.map(shareMetaShortToShare).filter((share) => share.state !== ShareState.deleted);
shares.forEach(({ volumeId, shareId }) => {
volumesState.setVolumeShareIds(volumeId, [shareId]);
});
sharesState.setShares(shares);
return shares;
}, []);
const getDefaultShare = useCallback(
async (abortSignal?: AbortSignal): Promise<ShareWithKey> => {
return debouncedFunction(
async (abortSignal: AbortSignal) => {
let defaultShareId = sharesState.getDefaultShareId();
// First try to load fresh list of shares from API to make sure
// we don't create second default share.
if (!defaultShareId) {
const shares = await loadUserShares();
// Do not use sharesState.getDefaultShareId as useState
// is not sync operation and thus the new state might
// not be set just yet.
defaultShareId = findDefaultShareId(shares);
}
if (!defaultShareId) {
const { shareId } = await createVolume();
defaultShareId = shareId;
// Load shares to the cache.
await loadUserShares();
}
return getShareWithKey(abortSignal || new AbortController().signal, defaultShareId);
},
['getDefaultShare'],
abortSignal
);
},
[sharesState.getDefaultShareId, getShareWithKey]
);
const getDefaultPhotosShare = useCallback(
async (abortSignal?: AbortSignal): Promise<ShareWithKey | undefined> => {
return debouncedFunction(
async (abortSignal: AbortSignal) => {
let defaultPhotosShareId = sharesState.getDefaultPhotosShareId();
// First try to load fresh list of shares from API
if (!defaultPhotosShareId) {
const shares = await loadUserShares();
// Do not use sharesState.getDefaultPhotosShare as useState
// is not sync operation and thus the new state might
// not be set just yet.
defaultPhotosShareId = findDefaultPhotosShareId(shares);
}
// We currently don't support photos share creation on web
return defaultPhotosShareId
? getShareWithKey(abortSignal || new AbortController().signal, defaultPhotosShareId)
: undefined;
},
['getDefaultPhotosShare'],
abortSignal
);
},
[sharesState.getDefaultPhotosShareId, getShareWithKey]
);
const isShareAvailable = useCallback(
(abortSignal: AbortSignal, shareId: string): Promise<boolean> => {
return debouncedFunction(
async (abortSignal: AbortSignal) => {
const share = await getShare(abortSignal, shareId);
return !share.isLocked && !share.isVolumeSoftDeleted;
},
['isShareAvailable', shareId],
abortSignal
);
},
[getShare]
);
return {
getDefaultShare,
getDefaultPhotosShare,
isShareAvailable,
};
}
| 3,150
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/usePublicShare.ts
|
import { useApi } from '@proton/components';
import { CryptoProxy } from '@proton/crypto';
import { querySharedURLInformation, querySubmitAbuseReport } from '@proton/shared/lib/api/drive/sharing';
import { LinkType } from '@proton/shared/lib/interfaces/drive/link';
import { SharedURLInfo } from '@proton/shared/lib/interfaces/drive/sharing';
import { computeKeyPassword } from '@proton/srp';
import { usePublicSession } from '../_api';
import { useLink } from '../_links';
import useLinksState from '../_links/useLinksState';
import { ShareState, ShareType } from './interface';
import useSharesKeys from './useSharesKeys';
import useSharesState from './useSharesState';
/**
* usePublicShare loads shared share with link to the store and decrypts them.
*/
export default function usePublicShare() {
const api = useApi();
const { request, getSessionInfo } = usePublicSession();
const sharesKeys = useSharesKeys();
const { setLinks } = useLinksState();
const { setShares } = useSharesState();
const { getLink, getLinkPassphraseAndSessionKey } = useLink();
const loadPublicShare = async (abortSignal: AbortSignal) => {
const sessionInfo = getSessionInfo();
if (!sessionInfo) {
throw new Error('Unauthenticated session');
}
const { Token } = await request<{ Token: SharedURLInfo }>({
...querySharedURLInformation(sessionInfo.token),
silence: true,
});
const computedPassword = await computeKeyPassword(sessionInfo.password, Token.SharePasswordSalt).catch((e) =>
Promise.reject(
new Error('Failed to compute key password for shared page', {
cause: {
e,
linkId: Token.LinkID,
public: true,
},
})
)
);
const sharePassphrase = await CryptoProxy.decryptMessage({
armoredMessage: Token.SharePassphrase,
passwords: [computedPassword],
}).catch((e) =>
Promise.reject(
new Error('Failed to decrypt share passphrase for shared page', {
cause: {
e,
linkId: Token.LinkID,
public: true,
},
})
)
);
const sharePrivateKey = await CryptoProxy.importPrivateKey({
armoredKey: Token.ShareKey,
passphrase: sharePassphrase.data,
}).catch((e) =>
Promise.reject(
new Error('Failed to import share private key for shared page', {
cause: {
e,
linkId: Token.LinkID,
public: true,
},
})
)
);
sharesKeys.set(sessionInfo.token, sharePrivateKey);
setLinks(sessionInfo.token, [
{
encrypted: {
linkId: Token.LinkID,
parentLinkId: '',
isFile: Token.LinkType === LinkType.FILE,
name: Token.Name,
mimeType: Token.MIMEType,
size: Token.Size,
createTime: Token.CreateTime,
metaDataModifyTime: Token.CreateTime,
trashed: null,
hasThumbnail: Token.ThumbnailURLInfo !== undefined,
isShared: false,
nodeKey: Token.NodeKey,
nodePassphrase: Token.NodePassphrase,
contentKeyPacket: Token.ContentKeyPacket,
rootShareId: '',
xAttr: '',
hash: '',
},
},
]);
// We need to set the share in cache as `getLink` will attempt
// to fetch the share to determine it's type.
// This isn't used in the public context.
setShares([
{
shareId: sessionInfo.token,
type: ShareType.standard,
passphrase: Token.SharePassphrase,
key: Token.ShareKey,
passphraseSignature: '',
creator: '',
addressId: '',
rootLinkId: '',
volumeId: '',
isLocked: false,
isDefault: false,
isVolumeSoftDeleted: false,
possibleKeyPackets: [],
state: ShareState.active,
},
]);
const link = await getLink(abortSignal, sessionInfo.token, Token.LinkID);
return {
token: sessionInfo.token,
link,
};
};
const submitAbuseReport = async (params: {
linkId: string;
abuseCategory: string;
reporterEmail?: string;
reporterMessage?: string;
}): Promise<void> => {
const sessionInfo = getSessionInfo();
if (!sessionInfo) {
throw new Error('Unauthenticated session');
}
const { token, password } = sessionInfo;
const ac = new AbortController();
const { passphrase } = await getLinkPassphraseAndSessionKey(ac.signal, token, params.linkId);
return api(
querySubmitAbuseReport({
ShareURL: window.location.href,
Password: password,
AbuseCategory: params.abuseCategory,
ReporterEmail: params.reporterEmail,
ReporterMessage: params.reporterMessage,
ResourcePassphrase: passphrase,
})
);
};
return {
loadPublicShare,
submitAbuseReport,
};
}
| 3,151
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useShare.ts
|
import { CryptoProxy, PrivateKeyReference, SessionKey } from '@proton/crypto';
import { queryShareMeta } from '@proton/shared/lib/api/drive/share';
import { ShareMeta } from '@proton/shared/lib/interfaces/drive/share';
import { shareMetaToShareWithKey, useDebouncedRequest } from '../_api';
import { useDriveCrypto } from '../_crypto';
import { useDebouncedFunction } from '../_utils';
import { Share, ShareWithKey } from './interface';
import useSharesKeys, { ShareKeys } from './useSharesKeys';
import useSharesState from './useSharesState';
export default function useShare() {
const debouncedFunction = useDebouncedFunction();
const debouncedRequest = useDebouncedRequest();
const driveCrypto = useDriveCrypto();
const sharesKeys = useSharesKeys();
const sharesState = useSharesState();
const fetchShare = async (abortSignal: AbortSignal, shareId: string): Promise<ShareWithKey> => {
const Share = await debouncedRequest<ShareMeta>({
...queryShareMeta(shareId),
signal: abortSignal,
});
return shareMetaToShareWithKey(Share);
};
/**
* getShareWithKey returns share with keys. That is not available after
* listing user's shares and thus needs extra API call. Use wisely.
*/
const getShareWithKey = async (abortSignal: AbortSignal, shareId: string): Promise<ShareWithKey> => {
return debouncedFunction(
async (abortSignal: AbortSignal) => {
const cachedShare = sharesState.getShare(shareId);
if (cachedShare && 'key' in cachedShare) {
return cachedShare;
}
const share = await fetchShare(abortSignal, shareId);
sharesState.setShares([share]);
return share;
},
['getShareWithKey', shareId],
abortSignal
);
};
/**
* getShare returns share from cache or it fetches the full share from API.
*/
const getShare = async (abortSignal: AbortSignal, shareId: string): Promise<Share> => {
const cachedShare = sharesState.getShare(shareId);
if (cachedShare) {
return cachedShare;
}
return getShareWithKey(abortSignal, shareId);
};
const getShareKeys = async (abortSignal: AbortSignal, shareId: string): Promise<ShareKeys> => {
const keys = sharesKeys.get(shareId);
if (keys) {
return keys;
}
const share = await getShareWithKey(abortSignal, shareId);
const { decryptedPassphrase, sessionKey } = await driveCrypto.decryptSharePassphrase(share).catch((e) =>
Promise.reject(
new Error('Failed to decrypt share passphrase', {
cause: {
e,
shareId,
},
})
)
);
const privateKey = await CryptoProxy.importPrivateKey({
armoredKey: share.key,
passphrase: decryptedPassphrase,
}).catch((e) =>
Promise.reject(
new Error('Failed to import share private key', {
cause: {
e,
shareId,
},
})
)
);
sharesKeys.set(shareId, privateKey, sessionKey);
return {
privateKey,
sessionKey,
};
};
/**
* getSharePrivateKey returns private key used for link private key encryption.
*/
const getSharePrivateKey = async (abortSignal: AbortSignal, shareId: string): Promise<PrivateKeyReference> => {
const keys = await getShareKeys(abortSignal, shareId);
return keys.privateKey;
};
/**
* getShareSessionKey returns session key used for sharing links.
*/
const getShareSessionKey = async (abortSignal: AbortSignal, shareId: string): Promise<SessionKey> => {
const keys = await getShareKeys(abortSignal, shareId);
if (!keys.sessionKey) {
// This should not happen. All shares have session key, only
// publicly shared link will not have it, but it is bug if
// it is needed.
throw new Error('Share is missing session key');
}
return keys.sessionKey;
};
/**
* getShareCreatorKeys returns the share creator address' keys
*/
const getShareCreatorKeys = async (abortSignal: AbortSignal, shareId: string) => {
const share = await getShare(abortSignal, shareId);
const keys = await driveCrypto.getOwnAddressAndPrimaryKeys(share.creator);
return keys;
};
return {
getShareWithKey,
getShare,
getSharePrivateKey,
getShareSessionKey,
getShareCreatorKeys,
removeShares: sharesState.removeShares,
};
}
| 3,152
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useShareActions.ts
|
import { usePreventLeave } from '@proton/components';
import { queryCreateShare, queryDeleteShare } from '@proton/shared/lib/api/drive/share';
import { getEncryptedSessionKey } from '@proton/shared/lib/calendar/crypto/encrypt';
import { uint8ArrayToBase64String } from '@proton/shared/lib/helpers/encoding';
import { generateNodeKeys } from '@proton/shared/lib/keys/driveKeys';
import { getDecryptedSessionKey } from '@proton/shared/lib/keys/drivePassphrase';
import { useDebouncedRequest } from '../_api';
import { useLink } from '../_links';
import useShare from './useShare';
/**
* useShareActions provides actions for manipulating with individual share.
*/
export default function useShareActions() {
const { preventLeave } = usePreventLeave();
const debouncedRequest = useDebouncedRequest();
const { getLink, getLinkPassphraseAndSessionKey, getLinkPrivateKey } = useLink();
const { getShareCreatorKeys } = useShare();
const createShare = async (abortSignal: AbortSignal, shareId: string, volumeId: string, linkId: string) => {
const [{ address, privateKey: addressPrivateKey }, { passphraseSessionKey }, link] = await Promise.all([
getShareCreatorKeys(abortSignal, shareId),
getLinkPassphraseAndSessionKey(abortSignal, shareId, linkId),
getLink(abortSignal, shareId, linkId),
]);
const [parentPrivateKey, keyInfo] = await Promise.all([
getLinkPrivateKey(abortSignal, shareId, link.parentLinkId),
generateNodeKeys(addressPrivateKey).catch((e) =>
Promise.reject(
new Error('Failed to generate share node keys during share creation', {
cause: {
e,
shareId,
volumeId,
linkId,
},
})
)
),
]);
const {
NodeKey: ShareKey,
NodePassphrase: SharePassphrase,
privateKey: sharePrivateKey,
NodePassphraseSignature: SharePassphraseSignature,
} = keyInfo;
const nameSessionKey = await getDecryptedSessionKey({
data: link.encryptedName,
privateKeys: parentPrivateKey,
}).catch((e) =>
Promise.reject(
new Error('Failed to decrypt link name session key during share creation', {
cause: {
e,
shareId,
volumeId,
linkId,
},
})
)
);
if (!nameSessionKey) {
throw new Error('Could not get name session key during share creation');
}
const [PassphraseKeyPacket, NameKeyPacket] = await Promise.all([
getEncryptedSessionKey(passphraseSessionKey, sharePrivateKey)
.then(uint8ArrayToBase64String)
.catch((e) =>
Promise.reject(
new Error('Failed to encrypt link passphrase during share creation', {
cause: {
e,
shareId,
volumeId,
linkId,
},
})
)
),
getEncryptedSessionKey(nameSessionKey, sharePrivateKey)
.then(uint8ArrayToBase64String)
.catch((e) =>
Promise.reject(
new Error('Failed to encrypt link name during share creation', {
cause: {
e,
shareId,
volumeId,
linkId,
},
})
)
),
]);
const { Share } = await preventLeave(
debouncedRequest<{ Share: { ID: string } }>(
queryCreateShare(volumeId, {
AddressID: address.ID,
RootLinkID: linkId,
Name: 'New Share',
ShareKey,
SharePassphrase,
SharePassphraseSignature,
PassphraseKeyPacket,
NameKeyPacket,
})
)
);
return {
shareId: Share.ID,
sessionKey: keyInfo.sessionKey,
};
};
const deleteShare = async (shareId: string): Promise<void> => {
await preventLeave(debouncedRequest(queryDeleteShare(shareId)));
};
return {
createShare,
deleteShare,
};
}
| 3,153
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useShareUrl.ts
|
import { useApi, usePreventLeave } from '@proton/components';
import { CryptoProxy, SessionKey } from '@proton/crypto';
import { encodeUtf8 } from '@proton/crypto/lib/utils';
import {
queryCreateSharedLink,
queryDeleteMultipleSharedLinks,
queryDeleteSharedLink,
querySharedLinks,
queryUpdateSharedLink,
} from '@proton/shared/lib/api/drive/sharing';
import {
BATCH_REQUEST_SIZE,
DEFAULT_SHARE_MAX_ACCESSES,
MAX_THREADS_PER_REQUEST,
RESPONSE_CODE,
SHARE_GENERATED_PASSWORD_LENGTH,
} from '@proton/shared/lib/drive/constants';
import {
base64StringToUint8Array,
stringToUint8Array,
uint8ArrayToBase64String,
} from '@proton/shared/lib/helpers/encoding';
import runInQueue from '@proton/shared/lib/helpers/runInQueue';
import {
ShareURL as ShareURLPayload,
SharedURLFlags,
SharedURLSessionKeyPayload,
} from '@proton/shared/lib/interfaces/drive/sharing';
import { decryptUnsigned, encryptUnsigned } from '@proton/shared/lib/keys/driveKeys';
import { generateKeySaltAndPassphrase } from '@proton/shared/lib/keys/keys';
import { srpGetVerify } from '@proton/shared/lib/srp';
import { computeKeyPassword } from '@proton/srp';
import chunk from '@proton/utils/chunk';
import getRandomString from '@proton/utils/getRandomString';
import groupWith from '@proton/utils/groupWith';
import isTruthy from '@proton/utils/isTruthy';
import unique from '@proton/utils/unique';
import { sendErrorReport } from '../../utils/errorHandling';
import { shareUrlPayloadToShareUrl, useDebouncedRequest } from '../_api';
import { useDriveCrypto } from '../_crypto';
import { useDriveEventManager } from '../_events';
import { useLink } from '../_links';
import { useVolumesState } from '../_volumes';
import { ShareURL, UpdateSharedURL } from './interface';
import { getSharedLink } from './shareUrl';
import useShare from './useShare';
import useShareActions from './useShareActions';
/**
* useShareUrl provides actions to manipulate with share URLs.
*
* This file needs a bit of love. First, lets transform ShareURL to nicer
* interface and compute some flags so we don't need to use shareUrl helpers.
* Second, lets separate it into two layers similarly as links are: this
* module handles only communication with API for the needs of the web client
* and lets have another layer in actions folder to wrap it with error
* reporting and generating user messages. Third, lets remove notifications
* and other business logic from the ShareLinkModal. Fourth, cover with tests!
*/
export default function useShareUrl() {
const api = useApi();
const { preventLeave } = usePreventLeave();
const debouncedRequest = useDebouncedRequest();
const driveCrypto = useDriveCrypto();
const events = useDriveEventManager();
const { createShare, deleteShare } = useShareActions();
const { getShare, getShareSessionKey } = useShare();
const { getLink, loadFreshLink } = useLink();
const volumeState = useVolumesState();
const fetchShareUrl = async (abortSignal: AbortSignal, shareId: string): Promise<ShareURL | undefined> => {
const { ShareURLs = [] } = await debouncedRequest<{
ShareURLs: ShareURLPayload[];
}>(querySharedLinks(shareId, { Page: 0, Recursive: 0, PageSize: 10 }), abortSignal);
return ShareURLs.length ? shareUrlPayloadToShareUrl(ShareURLs[0]) : undefined;
};
const decryptShareSessionKey = async (keyPacket: string | Uint8Array, password: string) => {
const messageType = keyPacket instanceof Uint8Array ? 'binaryMessage' : 'armoredMessage';
return CryptoProxy.decryptSessionKey({ [messageType]: keyPacket, passwords: [password] });
};
const decryptShareUrl = async ({
creatorEmail,
password,
sharePassphraseKeyPacket,
sharePasswordSalt,
...rest
}: ShareURL) => {
const privateKeys = await driveCrypto.getPrivateAddressKeys(creatorEmail);
const decryptedPassword = await decryptUnsigned({
armoredMessage: password,
privateKey: privateKeys,
}).catch((e) =>
Promise.reject(
new Error('Failed to decrypt share URL password', {
cause: {
e,
shareId: rest.shareId,
keyId: privateKeys.map((key) => key.getKeyID()),
},
})
)
);
const sharedLinkPassword: string = await computeKeyPassword(decryptedPassword, sharePasswordSalt);
const shareSessionKey = await decryptShareSessionKey(
base64StringToUint8Array(sharePassphraseKeyPacket),
sharedLinkPassword
).catch((e) =>
Promise.reject(
new Error('Failed to decrypt share session key for shared URL', {
cause: {
e,
shareId: rest.shareId,
},
})
)
);
if (!shareSessionKey) {
throw new Error('Failed to decrypt share session key for shared URL', {
cause: {
shareId: rest.shareId,
},
});
}
return {
shareUrl: {
...rest,
creatorEmail,
password: decryptedPassword,
sharePassphraseKeyPacket,
sharePasswordSalt,
},
keyInfo: {
sharePasswordSalt,
shareSessionKey,
},
};
};
const encryptSymmetricSessionKey = async (sessionKey: SessionKey, password: string) => {
const symmetric = await CryptoProxy.encryptSessionKey({
data: sessionKey.data,
algorithm: sessionKey.algorithm,
passwords: [password],
format: 'binary',
});
return uint8ArrayToBase64String(symmetric);
};
const encryptShareUrlPassword = async (decryptedPassword: string, creatorEmail: string) => {
const {
address: { Email: email },
publicKey,
} = await driveCrypto.getOwnAddressAndPrimaryKeys(creatorEmail);
const password = await encryptUnsigned({
message: stringToUint8Array(encodeUtf8(decryptedPassword)),
publicKey,
});
return { email, password };
};
const createShareUrl = async (
abortSignal: AbortSignal,
shareId: string,
linkShareId: string,
linkShareSessionKey: SessionKey
): Promise<{
shareUrl: ShareURL;
keyInfo: {
shareSessionKey: SessionKey;
sharePasswordSalt: string;
};
}> => {
const password = getRandomString(SHARE_GENERATED_PASSWORD_LENGTH);
const credentials = { password };
const getSharedLinkPassphraseSaltAndKeyPacket = async () => {
const { salt, passphrase } = await generateKeySaltAndPassphrase(password);
const keyPacket = await encryptSymmetricSessionKey(linkShareSessionKey, passphrase);
return { salt, keyPacket };
};
const share = await getShare(abortSignal, shareId);
const [
{ salt: SharePasswordSalt, keyPacket: SharePassphraseKeyPacket },
{ email: CreatorEmail, password: Password },
{
Auth: { Salt: UrlPasswordSalt, Verifier: SRPVerifier, ModulusID: SRPModulusID },
},
] = await Promise.all([
getSharedLinkPassphraseSaltAndKeyPacket().catch((e) =>
Promise.reject(
new Error('Failed to encrypt share URL session key', {
cause: {
e,
shareId,
linkShareId,
},
})
)
),
encryptShareUrlPassword(password, share.creator).catch((e) =>
Promise.reject(
new Error('Failed to encrypt share URL password', {
cause: {
e,
shareId,
linkShareId,
},
})
)
),
srpGetVerify({
api,
credentials,
}),
]);
const shareUrl = await preventLeave(
debouncedRequest<{ ShareURL: ShareURLPayload }>(
queryCreateSharedLink(linkShareId, {
Flags: SharedURLFlags.GeneratedPasswordIncluded,
Permissions: 4,
MaxAccesses: DEFAULT_SHARE_MAX_ACCESSES,
CreatorEmail,
ExpirationDuration: null,
SharePassphraseKeyPacket,
SRPModulusID,
SRPVerifier,
SharePasswordSalt,
UrlPasswordSalt,
Password,
})
)
).then(({ ShareURL }) => shareUrlPayloadToShareUrl(ShareURL));
const volumeId = volumeState.findVolumeId(shareId);
if (volumeId) {
await events.pollEvents.volumes(volumeId);
}
return {
shareUrl: {
...shareUrl,
password,
},
keyInfo: {
shareSessionKey: linkShareSessionKey,
sharePasswordSalt: SharePasswordSalt,
},
};
};
const loadOrCreateShareUrl = async (
abortSignal: AbortSignal,
shareId: string,
linkId: string
): Promise<{
shareUrl: ShareURL;
keyInfo: {
shareSessionKey: SessionKey;
sharePasswordSalt: string;
};
}> => {
const [share, link] = await Promise.all([
getShare(abortSignal, shareId),
loadFreshLink(abortSignal, shareId, linkId),
]);
if (!link.parentLinkId) {
throw Error('Root folder cannot be shared');
}
const { shareId: linkShareId, sessionKey: linkShareSessionKey } = link.shareId
? { shareId: link.shareId, sessionKey: await getShareSessionKey(abortSignal, link.shareId) }
: await createShare(abortSignal, shareId, share.volumeId, linkId);
const shareUrl = await fetchShareUrl(abortSignal, linkShareId);
if (shareUrl) {
return decryptShareUrl(shareUrl);
}
return createShareUrl(abortSignal, shareId, linkShareId, linkShareSessionKey).catch((err) => {
// If share URL creation was aborted, remove its share as well
// as at this moment we support only sharing via link.
if (abortSignal.aborted) {
void deleteShare(linkShareId);
}
throw err;
});
};
const loadShareUrl = async (
abortSignal: AbortSignal,
shareId: string,
linkId: string
): Promise<ShareURL | undefined> => {
const link = await loadFreshLink(abortSignal, shareId, linkId);
if (!link.shareId || !link.shareUrl) {
return;
}
const shareUrl = await fetchShareUrl(abortSignal, link.shareId);
if (!shareUrl) {
return;
}
const { shareUrl: decryptedShareUrl } = await decryptShareUrl(shareUrl);
return decryptedShareUrl;
};
const loadShareUrlLink = async (
abortSignal: AbortSignal,
shareId: string,
linkId: string
): Promise<string | undefined> => {
const shareUrl = await loadShareUrl(abortSignal, shareId, linkId);
return getSharedLink(shareUrl);
};
const loadShareUrlNumberOfAccesses = async (
abortSignal: AbortSignal,
shareId: string,
linkId: string
): Promise<number | undefined> => {
const shareUrl = await loadShareUrl(abortSignal, shareId, linkId);
return shareUrl?.numAccesses;
};
/*
* `password` can come in several shapes:
* - <initial>, flags === 0 – legacy without custom password
* - <custom>, flags === 1 – legacy custom password
* - <generated>, flags === 2 – without custom password
* - <generated><custom>, flags === 3, contains both generated and custom paswords
* There are four bit array states that can be used as `flags`:
* - `0` - legacy shared link without custom password.
* - `1` - legacy shared link with custom password.
* - `2` - shared link with generated password without custom password.
* - `3` - shared link with both generated and custom passwords.
* The legacy shared links are not supported anymore and cannot be modified
* anymore (user needs to delete them and share links again), so we can
* ignore such cases and focus only on new flags.
*/
const getSharedLinkUpdatedFlags = (password: string) => {
if (password.length === SHARE_GENERATED_PASSWORD_LENGTH) {
return SharedURLFlags.GeneratedPasswordIncluded;
}
return SharedURLFlags.CustomPassword | SharedURLFlags.GeneratedPasswordIncluded;
};
const getFieldsToUpdateForPassword = async (
newPassword: string,
creatorEmail: string,
_flags: number,
keyInfo: SharedURLSessionKeyPayload
): Promise<Partial<UpdateSharedURL>> => {
const { sharePasswordSalt, shareSessionKey } = keyInfo;
const [
sharePassphraseKeyPacket,
{ password },
{
Auth: { Salt: urlPasswordSalt, Verifier: srpVerifier, ModulusID: srpModulusID },
},
] = await Promise.all([
computeKeyPassword(newPassword, sharePasswordSalt)
.then((sharedLinkPassword) => encryptSymmetricSessionKey(shareSessionKey, sharedLinkPassword))
.catch((e) =>
Promise.reject(
new Error('Failed to encrypt share URL session key', {
cause: {
e,
},
})
)
),
encryptShareUrlPassword(newPassword, creatorEmail).catch((e) =>
Promise.reject(
new Error('Failed to encrypt share URL password', {
cause: {
e,
},
})
)
),
srpGetVerify({
api,
credentials: { password: newPassword },
}),
]);
const fieldsToUpdate: Partial<UpdateSharedURL> = {
flags: getSharedLinkUpdatedFlags(newPassword),
password,
sharePassphraseKeyPacket,
srpVerifier,
srpModulusID,
urlPasswordSalt,
};
return fieldsToUpdate;
};
const updateShareUrl = async (
shareUrlInfo: {
creatorEmail: string;
shareId: string;
shareUrlId: string;
flags: number;
keyInfo: SharedURLSessionKeyPayload;
},
newDuration?: number | null,
newPassword?: string
) => {
const { creatorEmail, shareId, shareUrlId, flags, keyInfo } = shareUrlInfo;
let fieldsToUpdate: Partial<UpdateSharedURL> = {};
if (newDuration !== undefined) {
fieldsToUpdate = { expirationDuration: newDuration };
}
if (newPassword !== undefined) {
const fieldsToUpdateForPassword = await getFieldsToUpdateForPassword(
newPassword,
creatorEmail,
flags,
keyInfo
).catch((e) =>
Promise.reject(
new Error('Failed to update share URL password', {
cause: {
e,
shareId,
shareUrlId,
},
})
)
);
fieldsToUpdate = {
...fieldsToUpdate,
...fieldsToUpdateForPassword,
};
}
const shareUrl = await preventLeave(
debouncedRequest<{ ShareURL: ShareURLPayload }>(
queryUpdateSharedLink(shareId, shareUrlId, {
SharePasswordSalt: fieldsToUpdate.sharePasswordSalt,
SharePassphraseKeyPacket: fieldsToUpdate.sharePassphraseKeyPacket,
Permissions: fieldsToUpdate.permissions,
Password: fieldsToUpdate.password,
MaxAccesses: fieldsToUpdate.maxAccesses,
Flags: fieldsToUpdate.flags,
ExpirationDuration: fieldsToUpdate.expirationDuration,
ExpirationTime: fieldsToUpdate.expirationTime,
SRPModulusID: fieldsToUpdate.srpModulusID,
SRPVerifier: fieldsToUpdate.srpVerifier,
UrlPasswordSalt: fieldsToUpdate.urlPasswordSalt,
})
)
).then(({ ShareURL }) => shareUrlPayloadToShareUrl(ShareURL));
// Update password value to decrypted one.
if (newPassword) {
fieldsToUpdate.password = newPassword;
}
await events.pollEvents.driveEvents();
return {
...fieldsToUpdate,
expirationTime: shareUrl.expirationTime,
};
};
const deleteShareUrl = async (shareId: string, shareUrlId: string) => {
const deletePromise = async () => {
await debouncedRequest(queryDeleteSharedLink(shareId, shareUrlId)).catch((error) => {
sendErrorReport(error);
throw error;
});
// Lets only collect reports when share cannot be deleted but do
// not bother users about it - link was deleted fine.
await deleteShare(shareId).catch(sendErrorReport);
};
await preventLeave(deletePromise());
await events.pollEvents.driveEvents();
};
const deleteShareUrls = async (abortSignal: AbortSignal, ids: { linkId: string; shareId: string }[]) => {
const links = await Promise.all(ids.map(({ linkId, shareId }) => getLink(abortSignal, shareId, linkId)));
const successes: string[] = [];
const failures: { [linkId: string]: any } = {};
// First delete urls in batches so the request is of reasonable size.
const sharedLinks = links
.map(({ linkId, shareUrl, rootShareId }) => ({ linkId, rootShareId, shareUrlId: shareUrl?.id }))
.filter(({ shareUrlId }) => shareUrlId) as { linkId: string; shareUrlId: string; rootShareId: string }[];
const groupedLinksByShareId = groupWith((a, b) => a.rootShareId === b.rootShareId, sharedLinks);
const batches: (typeof sharedLinks)[] = [];
groupedLinksByShareId.forEach((linkGroup) => {
if (linkGroup.length <= BATCH_REQUEST_SIZE) {
batches.push(linkGroup);
return;
}
batches.push(...chunk(sharedLinks, BATCH_REQUEST_SIZE));
});
const deleteShareUrlQueue = batches.map(
(batchLinks) => () =>
debouncedRequest<{ Responses: { ShareURLID: string; Response: { Code: number } }[] }>(
queryDeleteMultipleSharedLinks(
batchLinks[0].rootShareId,
batchLinks.map(({ shareUrlId }) => shareUrlId)
)
)
.then(({ Responses }) =>
Responses.forEach(({ Response }, index) => {
const linkId = batchLinks[index].linkId;
if (Response.Code === RESPONSE_CODE.SUCCESS) {
successes.push(linkId);
} else {
failures[linkId] = Response.Code;
}
})
)
.catch((error) => {
batchLinks.forEach(({ linkId }) => (failures[linkId] = error));
})
);
await preventLeave(runInQueue(deleteShareUrlQueue, MAX_THREADS_PER_REQUEST));
// Once we know how many urls we deleted, we can delete shares itself.
// Note this needs to be changed once we support sharing between members.
const sharedIds = [...new Set(links.map(({ shareId }) => shareId).filter(isTruthy))];
const deleteShareQueue = sharedIds.map((shareId) => async () => {
// Lets only collect reports when share cannot be deleted but do
// not bother users about it - link was deleted fine.
await deleteShare(shareId).catch(sendErrorReport);
});
await preventLeave(runInQueue(deleteShareQueue, MAX_THREADS_PER_REQUEST));
const shareIdsToUpdate = unique(batches.map((batch) => batch[0].rootShareId));
const volumeIds = unique(
shareIdsToUpdate.map((shareId) => {
return volumeState.findVolumeId(shareId);
})
).filter(isTruthy);
if (volumeIds.length) {
await events.pollEvents.volumes(volumeIds);
}
return { successes, failures };
};
return {
// This is a bit of hack to nicely report all errors. It might collect
// a bit more errors than we need and it might not result in proper
// error message to user. See comment to useShareUrl on the top.
loadOrCreateShareUrl: (abortSignal: AbortSignal, shareId: string, linkId: string) =>
loadOrCreateShareUrl(abortSignal, shareId, linkId).catch((error) => {
sendErrorReport(error);
throw error;
}),
loadShareUrlLink,
loadShareUrlNumberOfAccesses,
updateShareUrl: (
shareUrlInfo: {
creatorEmail: string;
shareId: string;
shareUrlId: string;
flags: number;
keyInfo: SharedURLSessionKeyPayload;
},
newDuration?: number | null,
newPassword?: string
) =>
updateShareUrl(shareUrlInfo, newDuration, newPassword).catch((error) => {
sendErrorReport(error);
throw error;
}),
deleteShareUrl,
deleteShareUrls,
};
}
| 3,154
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useSharesKeys.test.tsx
|
import { SharesKeysStorage } from './useSharesKeys';
describe('useSharesKeys', () => {
let keys: SharesKeysStorage;
beforeEach(() => {
keys = new SharesKeysStorage();
});
it('returns empty passphrase when not set', () => {
// @ts-ignore: We simplify types in tests, so we don't have to construct OpenPGP key.
keys.set('shareId', 'pk', 'sk');
const passphrase = keys.get('missingShareId');
expect(passphrase).toBe(undefined);
});
it('returns the cached passphrase', () => {
// @ts-ignore: We simplify types in tests, so we don't have to construct OpenPGP key.
keys.set('shareId', 'pk', 'sk');
const passphrase = keys.get('shareId');
expect(passphrase).toMatchObject({
privateKey: 'pk',
sessionKey: 'sk',
});
});
});
| 3,155
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useSharesKeys.tsx
|
import { createContext, useContext } from 'react';
import { PrivateKeyReference, SessionKey } from '@proton/crypto';
type SharesKeys = {
[shareId: string]: ShareKeys;
};
export type ShareKeys = {
privateKey: PrivateKeyReference;
sessionKey?: SessionKey;
};
/**
* SharesKeys provides a simple storage to cache share keys.
* Ideally, there should be only one instance in the whole app.
*/
export class SharesKeysStorage {
private keys: SharesKeys;
constructor() {
this.keys = {};
}
get(shareId: string): ShareKeys | undefined {
return this.keys[shareId];
}
set(shareId: string, privateKey: PrivateKeyReference, sessionKey?: SessionKey) {
this.keys[shareId] = {
privateKey,
sessionKey,
};
}
}
const SharesKeysContext = createContext<SharesKeysStorage | null>(null);
export function SharesKeysProvider({ children }: { children: React.ReactNode }) {
const value = new SharesKeysStorage();
return <SharesKeysContext.Provider value={value}>{children}</SharesKeysContext.Provider>;
}
export default function useSharesKeys() {
const state = useContext(SharesKeysContext);
if (!state) {
throw new Error('Trying to use uninitialized SharesKeysProvider');
}
return state;
}
| 3,156
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useSharesState.test.tsx
|
import { act, renderHook } from '@testing-library/react-hooks';
import { Share, ShareState, ShareType } from './interface';
import { useSharesStateProvider } from './useSharesState';
function createTestShare(
shareId: string,
volumeId: string,
flags = {
isLocked: false,
isDefault: false,
isVolumeSoftDeleted: false,
type: ShareType.standard,
state: ShareState.active,
}): Share {
return {
shareId,
rootLinkId: 'linkId',
volumeId,
creator: 'creator',
...flags,
possibleKeyPackets: [],
};
}
describe('useSharesState', () => {
let hook: {
current: ReturnType<typeof useSharesStateProvider>;
};
const mainShare1 = createTestShare('mainShare1', 'volume1', {
isLocked: true,
isDefault: true,
isVolumeSoftDeleted: false,
type: ShareType.default,
state: ShareState.active,
});
const device1 = createTestShare('device1', 'volume1', {
isLocked: true,
isDefault: false,
isVolumeSoftDeleted: false,
type: ShareType.device,
state: ShareState.active,
});
const device2 = createTestShare('device2', 'volume1', {
isLocked: true,
isDefault: false,
isVolumeSoftDeleted: false,
type: ShareType.device,
state: ShareState.active,
});
const share1 = createTestShare('share1', 'volume1', {
isLocked: true,
isDefault: false,
isVolumeSoftDeleted: false,
type: ShareType.standard,
state: ShareState.active,
});
const mainShare2 = createTestShare('mainShare2', 'volume2', {
isLocked: true,
isDefault: true,
isVolumeSoftDeleted: false,
type: ShareType.default,
state: ShareState.active,
});
const device3 = createTestShare('device3', 'volume2', {
isLocked: true,
isDefault: false,
isVolumeSoftDeleted: false,
type: ShareType.device,
state: ShareState.active,
});
const share2 = createTestShare('share2', 'volume2', {
isLocked: true,
isDefault: false,
isVolumeSoftDeleted: false,
type: ShareType.standard,
state: ShareState.active,
});
const mainShare3 = createTestShare('mainShare3', 'volume3', {
isLocked: false,
isDefault: true,
isVolumeSoftDeleted: false,
type: ShareType.default,
state: ShareState.active,
});
const device4 = createTestShare('device4', 'volume3', {
isLocked: false,
isDefault: false,
isVolumeSoftDeleted: false,
type: ShareType.device,
state: ShareState.active,
});
const mainShare4 = createTestShare('mainShare4', 'volume4', {
isLocked: true,
isDefault: true,
isVolumeSoftDeleted: true,
type: ShareType.default,
state: ShareState.active,
});
const device5 = createTestShare('device5', 'volume4', {
isLocked: true,
isDefault: false,
isVolumeSoftDeleted: true,
type: ShareType.device,
state: ShareState.active,
});
beforeEach(() => {
jest.resetAllMocks();
const { result } = renderHook(() => useSharesStateProvider());
hook = result;
act(() => {
hook.current.setShares([
mainShare1,
device1,
device2,
share1,
mainShare2,
device3,
share2,
mainShare3,
device4,
mainShare4,
device5,
]);
});
});
it('returns only locked undeleted shares with its devices', async () => {
const lockedShares = hook.current.getLockedShares();
expect(lockedShares).toMatchObject([
{
defaultShare: mainShare1,
devices: [device1, device2],
},
{
defaultShare: mainShare2,
devices: [device3],
},
]);
});
});
| 3,157
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useSharesState.tsx
|
import { createContext, useCallback, useContext, useState } from 'react';
import { LockedVolumeForRestore, Share, ShareState, ShareType, ShareWithKey } from './interface';
type SharesState = {
[shareId: string]: Share | ShareWithKey;
};
/**
* useSharesStateProvider provides a storage to cache shares.
*/
export function useSharesStateProvider() {
const [state, setState] = useState<SharesState>({});
const [lockedVolumesForRestore, setLockedVolumesForRestore] = useState<LockedVolumeForRestore[]>([]);
const setShares = useCallback((shares: (Share | ShareWithKey)[]) => {
setState((state) => {
shares.forEach((share) => {
state[share.shareId] = share;
});
return { ...state };
});
}, []);
const removeShares = useCallback((shareIds: string[]) => {
setState((state) => {
return Object.fromEntries(Object.entries(state).filter(([shareId]) => !shareIds.includes(shareId)));
});
}, []);
const getShare = useCallback(
(shareId: string): Share | ShareWithKey | undefined => {
return state[shareId];
},
[state]
);
/**
* In the past, volume only had a single default share, making it
* appropriate to match share with volume. However, volume can contain
* multiple root shares - one default share and any number of devices.
* Volume has to be unlocked in one request and thus we need to prepare
* default shares together with devices for the same volume.
* In the future it makes sense to move this logic fully to volume
* section.
*/
const getLockedShares = useCallback((): {
defaultShare: Share | ShareWithKey;
devices: (Share | ShareWithKey)[];
photos: (Share | ShareWithKey)[];
}[] => {
return Object.values(state)
.filter((share) => share.isLocked && share.isDefault && !share.isVolumeSoftDeleted)
.map((defaultShare) => ({
defaultShare,
devices: Object.values(state).filter(
(share) =>
share.isLocked && share.type === ShareType.device && share.volumeId === defaultShare.volumeId
),
photos: Object.values(state).filter(
(share) =>
share.isLocked && share.type === ShareType.photos && share.volumeId === defaultShare.volumeId
),
}));
}, [state]);
const getDefaultShareId = useCallback((): string | undefined => {
return findDefaultShareId(Object.entries(state).map(([, share]) => share));
}, [state]);
const getDefaultPhotosShareId = useCallback((): string | undefined => {
return findDefaultPhotosShareId(Object.entries(state).map(([, share]) => share));
}, [state]);
const getRestoredPhotosShares = useCallback((): Share[] | ShareWithKey[] | undefined => {
return Object.values(state).filter(
(share) => share.state === ShareState.restored && !share.isLocked && share.type === ShareType.photos
);
}, [state]);
return {
setShares,
removeShares,
getShare,
getLockedShares,
getDefaultShareId,
getDefaultPhotosShareId,
getRestoredPhotosShares,
setLockedVolumesForRestore,
lockedVolumesForRestore,
};
}
export function findDefaultShareId(shares: (Share | ShareWithKey)[]) {
const share = shares.find((share) => share.isDefault && !share.isLocked && !share.isVolumeSoftDeleted);
return share ? share.shareId : undefined;
}
export function findDefaultPhotosShareId(shares: (Share | ShareWithKey)[]) {
const share = shares.find(
(share) => share.state === ShareState.active && !share.isLocked && share.type === ShareType.photos
);
return share ? share.shareId : undefined;
}
const SharesStateContext = createContext<ReturnType<typeof useSharesStateProvider> | null>(null);
export function SharesStateProvider({ children }: { children: React.ReactNode }) {
const value = useSharesStateProvider();
return <SharesStateContext.Provider value={value}>{children}</SharesStateContext.Provider>;
}
export default function useSharesState() {
const state = useContext(SharesStateContext);
if (!state) {
throw new Error('Trying to use uninitialized SharesStateProvider');
}
return state;
}
| 3,158
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useVolume.ts
|
import { queryCreateDriveVolume } from '@proton/shared/lib/api/drive/volume';
import { CreatedDriveVolumeResult } from '@proton/shared/lib/interfaces/drive/volume';
import { generateDriveBootstrap, generateNodeHashKey } from '@proton/shared/lib/keys/driveKeys';
import { useDebouncedRequest } from '../_api';
import { useDriveCrypto } from '../_crypto';
export default function useVolume() {
const debouncedRequest = useDebouncedRequest();
const { getPrimaryAddressKey } = useDriveCrypto();
const createVolume = async (): Promise<{ volumeId: string; shareId: string; linkId: string }> => {
// Volumes should use primary address key, as we only create
// a new volume when bootstrapping an empty user.
//
// In this scenario, there are no other prefered keys.
const { address, privateKey } = await getPrimaryAddressKey();
const { bootstrap, folderPrivateKey } = await generateDriveBootstrap(privateKey);
const { NodeHashKey: FolderHashKey } = await generateNodeHashKey(folderPrivateKey, folderPrivateKey);
const { Volume } = await debouncedRequest<CreatedDriveVolumeResult>(
queryCreateDriveVolume({
AddressID: address.ID,
VolumeName: 'MainVolume',
ShareName: 'MainShare',
FolderHashKey,
...bootstrap,
})
);
return {
volumeId: Volume.ID,
shareId: Volume.Share.ID,
linkId: Volume.Share.LinkID,
};
};
return {
createVolume,
};
}
| 3,159
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useLockedVolume/index.ts
|
export { default } from './useLockedVolume';
| 3,160
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useLockedVolume/useLockedVolume.test.tsx
|
import { act, renderHook } from '@testing-library/react-hooks';
import { User } from '@proton/shared/lib/interfaces';
import { getDecryptedUserKeysHelper } from '@proton/shared/lib/keys';
import { getUserKey } from '@proton/shared/test/keys/keyDataHelper';
import { generateAddress, releaseCryptoProxy, setupCryptoProxyForTesting } from '../../../utils/test/crypto';
import { VolumesStateProvider } from '../../_volumes/useVolumesState';
import { LockedVolumeForRestore } from '../interface';
import useSharesState from '../useSharesState';
import useLockedVolume, { useLockedVolumeInner } from './useLockedVolume';
const mockRequest = jest.fn();
jest.mock('../../_api/useDebouncedRequest', () => {
const useDebouncedRequest = () => {
return mockRequest;
};
return useDebouncedRequest;
});
jest.mock('../../_utils/useDebouncedFunction', () => {
const useDebouncedFunction = () => {
return (wrapper: any) => wrapper();
};
return useDebouncedFunction;
});
jest.mock('@proton/components/hooks/usePreventLeave', () => {
const usePreventLeave = () => {
return (wrapper: any) => wrapper();
};
return usePreventLeave;
});
const SHARE_MOCK_1 = {
shareId: 'shareId1',
lockedVolumeId: 'volumeId1',
linkDecryptedPassphrase: 'passphrase',
};
const SHARE_MOCK_2 = {
shareId: 'shareId2',
lockedVolumeId: 'volumeId2',
linkDecryptedPassphrase: 'passphrase',
};
const LOCKED_VOLUME_MOCK_1 = {
lockedVolumeId: 'volumeId1',
defaultShare: SHARE_MOCK_1,
devices: [],
photos: [],
};
const LOCKED_VOLUME_MOCK_2 = {
lockedVolumeId: 'volumeId2',
defaultShare: SHARE_MOCK_2,
devices: [],
photos: [],
};
const mockGetLockedShares = jest.fn();
const mockGetShareWithKey = jest.fn();
const sharesStateMock: ReturnType<typeof useSharesState> = {
getLockedShares: mockGetLockedShares,
removeShares: jest.fn(),
setShares: jest.fn(),
getShare: jest.fn(),
getDefaultShareId: jest.fn(),
setLockedVolumesForRestore: jest.fn(),
lockedVolumesForRestore: [],
getDefaultPhotosShareId: jest.fn(),
getRestoredPhotosShares: jest.fn(),
};
const generateAddressKeys = async () => {
const keyPassword = 'password';
const userKeysFull = await Promise.all([getUserKey('a', keyPassword, 2), getUserKey('b', keyPassword, 2)]);
const UserKeys = userKeysFull.map(({ Key }) => Key);
const User1 = {
Keys: UserKeys.slice(0, 1),
} as unknown as User;
const decryptedUserKeys = await getDecryptedUserKeysHelper(User1, keyPassword);
return [
{
address: await generateAddress(UserKeys, 'email@pm.me'),
keys: decryptedUserKeys,
},
];
};
const useHook = (props: any = {}) => {
return useLockedVolumeInner({
sharesState: sharesStateMock,
getShareWithKey: jest.fn(),
addressesKeys: [],
getDefaultShare: jest.fn(),
getOwnAddressAndPrimaryKeys: jest.fn(),
prepareVolumeForRestore: jest.fn(),
getLinkPrivateKey: jest.fn(),
getLinkHashKey: jest.fn(),
getDeviceByShareId: jest.fn(),
...props,
});
};
describe('useLockedVolume', () => {
const abortSignal = new AbortController().signal;
let hook: {
current: ReturnType<typeof useLockedVolume>;
};
const wrapper = ({ children }: { children: React.ReactNode }) => (
<VolumesStateProvider>{children}</VolumesStateProvider>
);
beforeAll(async () => {
await setupCryptoProxyForTesting();
});
afterAll(async () => {
await releaseCryptoProxy();
});
beforeEach(() => {
jest.resetAllMocks();
mockGetLockedShares.mockImplementation(() => {
return [];
});
});
describe('prepareVolumesForRestore', () => {
it("should return locked volumes if there's no private keys associated with address", async () => {
const { result } = renderHook(() => useHook(), { wrapper });
hook = result;
sharesStateMock.lockedVolumesForRestore = [LOCKED_VOLUME_MOCK_1, LOCKED_VOLUME_MOCK_2];
await act(async () => {
const result = await hook.current.prepareVolumesForRestore(abortSignal);
expect(result).toMatchObject([LOCKED_VOLUME_MOCK_1, LOCKED_VOLUME_MOCK_2]);
});
});
it("should return locked volumes if there's no locked unprepared shares", async () => {
const addressesKeys = await generateAddressKeys();
const { result } = renderHook(
() =>
useHook({
addressesKeys,
}),
{ wrapper }
);
hook = result;
sharesStateMock.lockedVolumesForRestore = [LOCKED_VOLUME_MOCK_1, LOCKED_VOLUME_MOCK_2];
await act(async () => {
const result = await hook.current.prepareVolumesForRestore(abortSignal);
expect(result).toMatchObject([LOCKED_VOLUME_MOCK_1, LOCKED_VOLUME_MOCK_2]);
});
});
it("should return locked volumes if there's no new prepared shares", async () => {
mockGetLockedShares.mockImplementation(() => {
return [{ defaultShare: { shareId: 'shareId' }, devices: [], photos: [] }];
});
mockGetShareWithKey.mockImplementation(() => {
return [{ shareId: 'shareId' }];
});
const addressesKeys = await generateAddressKeys();
const { result } = renderHook(
() =>
useHook({
addressesKeys,
getShareWithKey: mockGetShareWithKey,
}),
{ wrapper }
);
hook = result;
sharesStateMock.lockedVolumesForRestore = [LOCKED_VOLUME_MOCK_1, LOCKED_VOLUME_MOCK_2];
await act(async () => {
const result = await hook.current.prepareVolumesForRestore(abortSignal);
expect(result).toMatchObject([LOCKED_VOLUME_MOCK_1, LOCKED_VOLUME_MOCK_2]);
});
});
it('should return extended volume list with new prepared volumes', async () => {
mockGetLockedShares.mockImplementation(() => {
return [{ defaultShare: { shareId: 'shareId' }, devices: [], photos: [] }];
});
mockGetShareWithKey.mockImplementation(() => {
return [{ shareId: 'shareId' }];
});
const lockedVolumeForRestore = {
lockedVolumeId: 'volumeId',
defaultShare: {
shareId: 'shareId',
linkDecryptedPassphrase: 'passphrase',
},
} as LockedVolumeForRestore;
const addressesKeys = await generateAddressKeys();
const { result } = renderHook(
() =>
useHook({
addressesKeys,
getShareWithKey: mockGetShareWithKey,
prepareVolumeForRestore: async () => lockedVolumeForRestore,
}),
{ wrapper }
);
hook = result;
sharesStateMock.lockedVolumesForRestore = [LOCKED_VOLUME_MOCK_1, LOCKED_VOLUME_MOCK_2];
await act(async () => {
const result = await hook.current.prepareVolumesForRestore(abortSignal);
expect(result).toMatchObject([LOCKED_VOLUME_MOCK_1, LOCKED_VOLUME_MOCK_2, lockedVolumeForRestore]);
});
});
});
});
| 3,161
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useLockedVolume/useLockedVolume.ts
|
import { useCallback } from 'react';
import { format } from 'date-fns';
import { c } from 'ttag';
import { useAddressesKeys, usePreventLeave } from '@proton/components';
import { CryptoProxy, PrivateKeyReference } from '@proton/crypto';
import { queryDeleteLockedVolumes, queryRestoreDriveVolume } from '@proton/shared/lib/api/drive/volume';
import { getEncryptedSessionKey } from '@proton/shared/lib/calendar/crypto/encrypt';
import { uint8ArrayToBase64String } from '@proton/shared/lib/helpers/encoding';
import { dateLocale } from '@proton/shared/lib/i18n';
import { Address } from '@proton/shared/lib/interfaces';
import { encryptPassphrase, generateLookupHash, sign } from '@proton/shared/lib/keys/driveKeys';
import isTruthy from '@proton/utils/isTruthy';
import { useDebouncedRequest } from '../../_api';
import { useDriveCrypto } from '../../_crypto';
import { useLink } from '../../_links';
import { GLOBAL_FORBIDDEN_CHARACTERS } from '../../_links/link';
import { useDebouncedFunction } from '../../_utils';
import { LockedDeviceForRestore, LockedPhotosForRestore, LockedVolumeForRestore, ShareWithKey } from './../interface';
import useDefaultShare from './../useDefaultShare';
import useShare from './../useShare';
import useSharesState from './../useSharesState';
import { getPossibleAddressPrivateKeys, prepareVolumeForRestore } from './utils';
type LockedShares = {
defaultShare: ShareWithKey;
devices: ShareWithKey[];
photos: ShareWithKey[];
}[];
/**
* useLockedVolume provides actions to delete or recover files from locked volumes.
*/
export default function useLockedVolume() {
const { getLinkPrivateKey, getLinkHashKey } = useLink();
return useLockedVolumeInner({
sharesState: useSharesState(),
getShareWithKey: useShare().getShareWithKey,
getDefaultShare: useDefaultShare().getDefaultShare,
addressesKeys: useAddressesKeys()[0],
getOwnAddressAndPrimaryKeys: useDriveCrypto().getOwnAddressAndPrimaryKeys,
prepareVolumeForRestore,
getLinkHashKey,
getLinkPrivateKey,
});
}
type LockedVolumesCallbacks = {
sharesState: ReturnType<typeof useSharesState>;
getShareWithKey: ReturnType<typeof useShare>['getShareWithKey'];
addressesKeys: ReturnType<typeof useAddressesKeys>[0];
getDefaultShare: ReturnType<typeof useDefaultShare>['getDefaultShare'];
getOwnAddressAndPrimaryKeys: ReturnType<typeof useDriveCrypto>['getOwnAddressAndPrimaryKeys'];
prepareVolumeForRestore: typeof prepareVolumeForRestore;
getLinkPrivateKey: ReturnType<typeof useLink>['getLinkPrivateKey'];
getLinkHashKey: ReturnType<typeof useLink>['getLinkHashKey'];
};
export function useLockedVolumeInner({
getShareWithKey,
sharesState,
addressesKeys,
getDefaultShare,
getOwnAddressAndPrimaryKeys,
prepareVolumeForRestore,
getLinkPrivateKey,
getLinkHashKey,
}: LockedVolumesCallbacks) {
const { preventLeave } = usePreventLeave();
const debouncedFunction = useDebouncedFunction();
const debouncedRequest = useDebouncedRequest();
const getLoadedLockedShares = useCallback(
async (abortSignal: AbortSignal): Promise<LockedShares> => {
return Promise.all(
sharesState.getLockedShares().map(async ({ defaultShare, devices, photos }) => {
return {
defaultShare: await getShareWithKey(abortSignal, defaultShare.shareId),
devices: await Promise.all(
devices.map((device) => getShareWithKey(abortSignal, device.shareId))
),
photos: await Promise.all(photos.map((photo) => getShareWithKey(abortSignal, photo.shareId))),
};
})
);
},
[sharesState.getLockedShares]
);
const getLockedUnpreparedShares = useCallback(
async (lockedShares: LockedShares) => {
return lockedShares.filter(
({ defaultShare: { volumeId } }) =>
!sharesState.lockedVolumesForRestore.some(({ lockedVolumeId }) => volumeId === lockedVolumeId)
);
},
[sharesState.lockedVolumesForRestore]
);
const getPreparedVolumes = useCallback(
async (lockedUnpreparedShares: LockedShares, addressPrivateKeys: PrivateKeyReference[]) => {
const preparedVolumes = await Promise.all(
lockedUnpreparedShares.map(({ defaultShare, devices, photos }) => {
return debouncedFunction(
async () => prepareVolumeForRestore(defaultShare, devices, photos, addressPrivateKeys),
['prepareVolumeForRestore', defaultShare.volumeId]
);
})
);
return preparedVolumes.filter(isTruthy);
},
[]
);
const prepareVolumesForRestore = useCallback(
async (abortSignal: AbortSignal): Promise<LockedVolumeForRestore[]> => {
const { lockedVolumesForRestore } = sharesState;
const addressPrivateKeys = getPossibleAddressPrivateKeys(addressesKeys);
if (!addressPrivateKeys?.length) {
return lockedVolumesForRestore;
}
const lockedUnpreparedShares = await getLockedUnpreparedShares(await getLoadedLockedShares(abortSignal));
if (!lockedUnpreparedShares.length) {
return lockedVolumesForRestore;
}
const newPreparedVolumes = await getPreparedVolumes(lockedUnpreparedShares, addressPrivateKeys);
if (!newPreparedVolumes.length) {
return lockedVolumesForRestore;
}
const volumes = [...lockedVolumesForRestore, ...newPreparedVolumes];
sharesState.setLockedVolumesForRestore(volumes);
return volumes;
},
[
addressesKeys,
getLockedUnpreparedShares,
getPreparedVolumes,
getLoadedLockedShares,
sharesState.setLockedVolumesForRestore,
sharesState.lockedVolumesForRestore,
]
);
const restoreVolume = async (
parentVolumeID: string,
privateKey: PrivateKeyReference,
hashKey: Uint8Array,
addressKey: PrivateKeyReference,
address: Address,
lockedVolumeId: string,
lockedShareLinkPassphraseRaw: string,
lockedDevices: LockedDeviceForRestore[],
lockedPhotos: LockedPhotosForRestore[]
) => {
if (!hashKey) {
throw new Error('Missing hash key on folder link');
}
const formattedDate = format(new Date(), 'Ppp', { locale: dateLocale }).replaceAll(
RegExp(GLOBAL_FORBIDDEN_CHARACTERS, 'g'),
' '
);
// translator: The date is in locale of user's preference. It's used for folder name and translating the beginning of the string is enough.
const restoreFolderName = c('Info').t`Restored files ${formattedDate}`;
const [
Hash,
{ NodePassphrase, NodePassphraseSignature },
{ message: encryptedName },
devicePassphrases,
photosPassphrases,
] = await Promise.all([
generateLookupHash(restoreFolderName, hashKey),
encryptPassphrase(privateKey, addressKey, lockedShareLinkPassphraseRaw),
CryptoProxy.encryptMessage({
textData: restoreFolderName,
stripTrailingSpaces: true,
encryptionKeys: privateKey,
signingKeys: addressKey,
}),
Promise.all(
lockedDevices.map(async (device) => {
const [sharePassphraseSignature, shareKeyPacket] = await Promise.all([
sign(device.shareDecryptedPassphrase, addressKey),
getEncryptedSessionKey(device.shareSessionKey, addressKey).then(uint8ArrayToBase64String),
]);
return {
sharePassphraseSignature,
shareKeyPacket,
};
})
),
Promise.all(
lockedPhotos.map(async (photo) => {
const [sharePassphraseSignature, shareKeyPacket] = await Promise.all([
sign(photo.shareDecryptedPassphrase, addressKey),
getEncryptedSessionKey(photo.shareSessionKey, addressKey).then(uint8ArrayToBase64String),
]);
return {
sharePassphraseSignature,
shareKeyPacket,
};
})
),
]);
const devicesPayload = lockedDevices.map(({ shareId }, idx) => {
const { shareKeyPacket, sharePassphraseSignature } = devicePassphrases[idx];
return {
LockedShareID: shareId,
ShareKeyPacket: shareKeyPacket,
PassphraseSignature: sharePassphraseSignature,
};
});
const photosPayload = lockedPhotos.map(({ shareId }, idx) => {
const { shareKeyPacket, sharePassphraseSignature } = photosPassphrases[idx];
return {
LockedShareID: shareId,
ShareKeyPacket: shareKeyPacket,
PassphraseSignature: sharePassphraseSignature,
};
});
await debouncedRequest(
queryRestoreDriveVolume(lockedVolumeId, {
Name: encryptedName,
SignatureAddress: address.Email,
Hash,
NodePassphrase,
NodePassphraseSignature,
TargetVolumeID: parentVolumeID,
Devices: devicesPayload,
PhotoShares: photosPayload,
})
);
};
const restoreVolumes = async (abortSignal: AbortSignal) => {
const defaultShare = await getDefaultShare(abortSignal);
const lockedVolumesForRestore = await prepareVolumesForRestore(abortSignal);
if (!defaultShare || !lockedVolumesForRestore.length) {
return;
}
const [privateKey, hashKey, { privateKey: addressKey, address }] = await Promise.all([
getLinkPrivateKey(abortSignal, defaultShare.shareId, defaultShare.rootLinkId),
getLinkHashKey(abortSignal, defaultShare.shareId, defaultShare.rootLinkId),
getOwnAddressAndPrimaryKeys(defaultShare.creator),
]);
// Backend does not support restoring of multiple volumes at one time.
// Resotring is async operation and user has to trigger it for the next
// volume manualy later again.
const restorePromiseList = [lockedVolumesForRestore[0]].map(async (lockedVolume) => {
await restoreVolume(
defaultShare.volumeId,
privateKey,
hashKey,
addressKey,
address,
lockedVolume.lockedVolumeId,
lockedVolume.defaultShare.linkDecryptedPassphrase,
lockedVolume.devices,
lockedVolume.photos
);
sharesState.removeShares([
lockedVolume.defaultShare.shareId,
...lockedVolume.devices.map(({ shareId }) => shareId),
...lockedVolume.photos.map(({ shareId }) => shareId),
]);
});
await preventLeave(Promise.all(restorePromiseList));
sharesState.setLockedVolumesForRestore([]);
};
const deleteLockedVolumes = async () => {
const lockedShares = sharesState.getLockedShares();
const lockedVolumeIds = lockedShares.map(({ defaultShare: { volumeId } }) => volumeId);
await preventLeave(
Promise.all(lockedVolumeIds.map((volumeId) => debouncedRequest(queryDeleteLockedVolumes(volumeId))))
);
const lockedShareIds = lockedShares.map(({ defaultShare: { shareId } }) => shareId);
sharesState.removeShares(lockedShareIds);
};
const lockedSharesCount = sharesState.getLockedShares().length;
return {
isReadyForPreparation: !!lockedSharesCount && !!addressesKeys?.length,
lockedVolumesCount: lockedSharesCount,
hasLockedVolumes: lockedSharesCount,
hasVolumesForRestore: !!sharesState.lockedVolumesForRestore?.length,
deleteLockedVolumes,
prepareVolumesForRestore,
restoreVolumes,
};
}
| 3,162
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useLockedVolume/utils.test.ts
|
import { DecryptedKey, User } from '@proton/shared/lib/interfaces';
import { getDecryptedUserKeysHelper } from '@proton/shared/lib/keys';
import { getUserKey } from '@proton/shared/test/keys/keyDataHelper';
import { generateAddress, releaseCryptoProxy, setupCryptoProxyForTesting } from '../../../utils/test/crypto';
import { getPossibleAddressPrivateKeys } from './utils';
const DEFAULT_KEYPASSWORD = '1';
jest.setTimeout(20000);
describe('useLockedVolume -- utils', () => {
beforeAll(async () => {
await setupCryptoProxyForTesting();
});
afterAll(async () => {
await releaseCryptoProxy();
});
describe('getPossibleAddressPrivateKeys()', () => {
it('return empty array if no matching keys found', async () => {
const keyPassword = DEFAULT_KEYPASSWORD;
const userKeysFull = await Promise.all([getUserKey('a', keyPassword, 2), getUserKey('b', keyPassword, 2)]);
const UserKeys = userKeysFull.map(({ Key }) => Key);
const decryptedUserKeys: DecryptedKey[] = [];
const addressesKeys = [
{
address: await generateAddress(UserKeys, 'email@pm.me'),
keys: decryptedUserKeys,
},
];
expect(getPossibleAddressPrivateKeys(addressesKeys).length).toBe(0);
});
it('return only matching decrypted keys', async () => {
const keyPassword = DEFAULT_KEYPASSWORD;
const user1KeysFull = await Promise.all([getUserKey('a', keyPassword, 2), getUserKey('b', keyPassword, 2)]);
const user2KeysFull = await Promise.all([getUserKey('c', keyPassword, 2), getUserKey('d', keyPassword, 2)]);
const User1Keys = user1KeysFull.map(({ Key }) => Key);
const User2Keys = user2KeysFull.map(({ Key }) => Key);
const User1 = {
Keys: User1Keys.slice(0, 1),
} as unknown as User;
const User2 = {
Keys: User2Keys.slice(0, 1),
} as unknown as User;
const decryptedUserKeysUser1 = await getDecryptedUserKeysHelper(User1, keyPassword);
const decryptedUserKeysUser2 = await getDecryptedUserKeysHelper(User2, keyPassword);
const addressesKeysUser1 = [
{
address: await generateAddress(User1Keys, 'email@pm.me'),
keys: decryptedUserKeysUser1,
},
];
const addressesKeysUser2 = [
{
address: await generateAddress(User2Keys, 'email@pm.me'),
keys: decryptedUserKeysUser2,
},
];
expect(getPossibleAddressPrivateKeys(addressesKeysUser1)).toMatchObject(
decryptedUserKeysUser1.map((decryptedKey) => decryptedKey.privateKey)
);
expect(getPossibleAddressPrivateKeys(addressesKeysUser2)).toMatchObject(
decryptedUserKeysUser2.map((decryptedKey) => decryptedKey.privateKey)
);
});
});
});
| 3,163
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_shares/useLockedVolume/utils.ts
|
import { c } from 'ttag';
import { useAddressesKeys } from '@proton/components';
import {
CryptoProxy,
PrivateKeyReference,
SessionKey,
VERIFICATION_STATUS,
getMatchingSigningKey,
} from '@proton/crypto';
import { base64StringToUint8Array } from '@proton/shared/lib/helpers/encoding';
import { DecryptedKey } from '@proton/shared/lib/interfaces';
import { getDecryptedSessionKey } from '@proton/shared/lib/keys/drivePassphrase';
import isTruthy from '@proton/utils/isTruthy';
import mergeUint8Arrays from '@proton/utils/mergeUint8Arrays';
import { LockedDeviceForRestore, LockedShareForRestore, LockedVolumeForRestore, ShareWithKey } from './../interface';
export const getPossibleAddressPrivateKeys = (addressesKeys: ReturnType<typeof useAddressesKeys>[0]) => {
if (!addressesKeys?.length) {
return [];
}
/*
Taking all adress keys and match it by Id with `Keys` array (that's decrypted keys).
The result here is an array of decrypted private keys associated with any of the given
address keys.
*/
return addressesKeys
.reduce((result: DecryptedKey[], { address, keys }) => {
return [
...result,
...address.Keys.map((addressKey) => keys.find((key) => key.ID === addressKey.ID)).filter(isTruthy),
];
}, [])
.map((decryptedKey) => decryptedKey.privateKey);
};
export async function decryptLockedSharePassphrase(
oldPrivateKey: PrivateKeyReference,
lockedShare: ShareWithKey
): Promise<
| {
shareSessionKey: SessionKey;
shareDecryptedPassphrase: string;
linkDecryptedPassphrase: string;
}
| undefined
> {
if (!lockedShare.possibleKeyPackets) {
return;
}
const keyPacketsAsUnit8Array = mergeUint8Arrays(
lockedShare.possibleKeyPackets.map((keyPacket) => base64StringToUint8Array(keyPacket))
);
const shareSessionKey = await getDecryptedSessionKey({
data: keyPacketsAsUnit8Array,
privateKeys: oldPrivateKey,
});
const { data: shareDecryptedPassphrase, verified } = await CryptoProxy.decryptMessage({
armoredMessage: lockedShare.passphrase,
armoredSignature: lockedShare.passphraseSignature,
sessionKeys: shareSessionKey,
verificationKeys: oldPrivateKey,
});
if (verified !== VERIFICATION_STATUS.SIGNED_AND_VALID) {
const error = new Error(c('Error').t`Signature verification failed`);
error.name = 'SignatureError';
throw error;
}
if (!lockedShare.rootLinkRecoveryPassphrase) {
return;
}
const lockedShareKey = await CryptoProxy.importPrivateKey({
armoredKey: lockedShare.key,
passphrase: shareDecryptedPassphrase,
});
const linkSessionKey = await getDecryptedSessionKey({
data: lockedShare.rootLinkRecoveryPassphrase,
privateKeys: lockedShareKey,
});
const { data: linkDecryptedPassphrase } = await CryptoProxy.decryptMessage({
armoredMessage: lockedShare.rootLinkRecoveryPassphrase,
sessionKeys: linkSessionKey,
verificationKeys: lockedShareKey,
});
return {
shareSessionKey,
shareDecryptedPassphrase,
linkDecryptedPassphrase,
};
}
export async function prepareVolumeForRestore(
defaultShare: ShareWithKey,
devices: (ShareWithKey & { deviceName?: string })[],
photos: ShareWithKey[],
addressPrivateKeys: PrivateKeyReference[]
): Promise<LockedVolumeForRestore | undefined> {
const preparedDefaultShare = await prepareShareForRestore(defaultShare, addressPrivateKeys);
if (!preparedDefaultShare) {
return undefined;
}
const preparedDevices = await Promise.all(
devices.map(async (device) => {
const preparedShare = await prepareShareForRestore(device, addressPrivateKeys);
if (!preparedShare) {
return undefined;
}
return preparedShare;
})
);
const preparedPhotos = await Promise.all(
photos.map(async (photo) => {
const preparedShare = await prepareShareForRestore(photo, addressPrivateKeys);
if (!preparedShare) {
return undefined;
}
return preparedShare;
})
);
return {
lockedVolumeId: defaultShare.volumeId,
defaultShare: preparedDefaultShare,
devices: preparedDevices.filter(isTruthy),
photos: preparedPhotos.filter(isTruthy),
};
}
async function prepareShareForRestore(
share: ShareWithKey,
addressPrivateKeys: PrivateKeyReference[]
): Promise<
| (LockedShareForRestore & {
shareSessionKey: LockedDeviceForRestore['shareSessionKey'];
shareDecryptedPassphrase: LockedDeviceForRestore['shareDecryptedPassphrase'];
})
| undefined
> {
try {
const matchingPrivateKey = (await getMatchingSigningKey({
armoredSignature: share.passphraseSignature,
keys: addressPrivateKeys,
})) as PrivateKeyReference | undefined;
if (matchingPrivateKey) {
const result = await decryptLockedSharePassphrase(matchingPrivateKey, share);
if (result) {
return {
shareId: share.shareId,
shareSessionKey: result.shareSessionKey,
shareDecryptedPassphrase: result.shareDecryptedPassphrase,
linkDecryptedPassphrase: result.linkDecryptedPassphrase,
};
}
}
} catch {
return undefined;
}
}
| 3,164
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/ChunkFileReader.ts
|
export default class ChunkFileReader {
private blob: Blob;
private chunkSize: number;
private offset = 0;
constructor(file: Blob, chunkSize: number) {
this.blob = file;
this.chunkSize = chunkSize;
}
isEOF() {
return this.offset >= this.blob.size;
}
async readNextChunk() {
const fileReader = new FileReader();
const blob = this.blob.slice(this.offset, this.offset + this.chunkSize);
return new Promise<Uint8Array>((resolve, reject) => {
fileReader.onload = async (e) => {
if (!e.target || e.target?.error) {
return reject(e.target?.error || new Error('Cannot open file for reading'));
}
const result = new Uint8Array(e.target.result as ArrayBuffer);
this.offset += result.byteLength;
resolve(result);
};
fileReader.readAsArrayBuffer(blob);
});
}
}
| 3,165
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/architecture.md
|
# Upload Architecture
```mermaid
graph TD
UploadButton
subgraph "TransferManager folder"
TransferManager
end
UploadButton --> useUploadInput --> UploadProvider
TransferManager --> UploadProvider
subgraph "upload folder"
ConflictModal
UploadDragDrop
initUploadFileWorker
useUploadInput
workerController
UploadDragDrop --> UploadProvider
useUploadFile --> initUploadFileWorker
subgraph "UploadProvider folder"
UploadProvider
useUpload
useUploadFile
useUploadFolder
useUploadHelper
useUploadQueue
useUploadControl
useUploadConflicts
UploadProvider --> useUpload
useUpload --> useUploadFile --> useUploadHelper
useUpload --> useUploadFolder --> useUploadHelper
useUpload --> useUploadQueue
useUpload --> useUploadControl --> useUploadQueue
useUpload --> useUploadConflicts --> useUploadControl
useUploadConflicts --> useUploadQueue
end
useUploadConflicts --> ConflictModal
initUploadFileWorker --> workerController --> worker
worker --> workerController --> initUploadFileWorker
subgraph "worker folder"
buffer
encryption
wupload[upload]
worker
encryption --> buffer --> wupload
worker --> encryption
worker --> buffer
worker --> wupload
end
end
useUploadHelper --> useDrive
useUploadFile --> useDrive
useUploadFolder --> useDrive
subgraph "hooks"
useDrive[useDrive and others]
end
```
| 3,166
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/constants.ts
|
/**
* How many times failed request is retried before giving up and failing
* the whole upload.
*/
export const MAX_RETRIES_BEFORE_FAIL = 3;
/**
* MAX_ENCRYPTED_BLOCKS limits the number of blocks in the buffer before
* asking API for next batch of links, if not asked sooner by low buffer
* of uploading blocks.
*/
export const MAX_ENCRYPTED_BLOCKS = 15;
/**
* MAX_UPLOADING_BLOCKS limits the number of blocks in the buffer for
* upload. It should be a bit bigger than MAX_UPLOAD_JOBS to ensure that
* there is always ready next block when any upload job finishes current
* upload. When there is not enough uploading jobs, worker asks for link
* creation sooner than it hits the ideal size of MAX_ENCRYPTED_BLOCKS.
*/
export const MAX_UPLOADING_BLOCKS = 10;
/**
* MAX_BLOCKS_PER_UPLOAD is how many blocks can one upload job have buffered
* in memory. Used to count the current load for the total limit of ongoing
* upload files by MAX_UPLOAD_BLOCKS_LOAD.
*/
export const MAX_BLOCKS_PER_UPLOAD = MAX_ENCRYPTED_BLOCKS + MAX_UPLOADING_BLOCKS;
/**
* MAX_UPLOAD_BLOCKS_LOAD limits the number of total blocks being uploaded
* at one time. If the queue contains only big files, only few of them at
* a time is allowed to limit the memory requirements. If the queue contains
* small files, more can be uploaded in parallel. But each upload mean extra
* worker. Even though browsers support up to hunderds of web workers, still
* it spawns threads.
*/
export const MAX_UPLOAD_BLOCKS_LOAD = 10;
/**
* MAX_UPLOAD_FOLDER_LOAD limits the number of total folder being created
* at one time.
*/
export const MAX_UPLOAD_FOLDER_LOAD = 5;
/**
* How many ongoing uploads there can be. Without http2, we cannot do more
* than six parallel requests to one host. With http2 (which we use), there
* is theretically no limit, but still we should make a reasonable limit
* to not kill users device.
*/
export const MAX_UPLOAD_JOBS = 5;
/**
* WAIT_TIME is used for pauses between checks, such as to check if buffer is
* still full or not, or if the upload is paused, and so on.
*/
export const WAIT_TIME = 50; // Milliseconds.
/**
* TOKEN_EXPIRATION_TIME defines after what time server expires the token.
* We can optimise and not even ask for block upload if we know the token
* is already old and we should ask for new one.
*/
export const TOKEN_EXPIRATION_TIME = 3 * 60 * 60 * 1000; // Milliseconds.
/**
* MAX_TOO_MANY_REQUESTS_WAIT defines how many seconds is allowed to wait
* if server rate limits upload. If server asks to wait longer, we don't
* wait and fail right away instead.
*/
export const MAX_TOO_MANY_REQUESTS_WAIT = 60 * 60; // Seconds.
/**
* MAX_BLOCK_VERIFICATION_RETRIES defines how many times we will retry
* encrypting a block if it fails verification.
*
* For context, blocks are verified after encryption to check for
* corrupted encrypted data. If this fails, we retry creating the block
* entirely. The main utility is to mitigate bitflip issues.
*/
export const MAX_BLOCK_VERIFICATION_RETRIES = 1;
/**
* Amount of time between heartbeats. These are used to ensure the worker
* is still alive, and not stuck in a bad state.
*/
export const HEARTBEAT_INTERVAL = 30 * 1000; // ms
/**
* Amount of time to wait for a new heartbeat. If no heartbeat is received
* during this interval, we cancel and restart the worker.
*
* This should be greater than HEARTBEAT_INTERVAL;
*/
export const HEARTBEAT_WAIT_TIME = HEARTBEAT_INTERVAL * 2; // ms
| 3,167
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/index.ts
|
export { UploadProvider, useUploadProvider as useUpload } from './UploadProvider';
export { useFileUploadInput, useFolderUploadInput } from './useUploadInput';
export { mimeTypeFromFile } from './mimeTypeParser/mimeTypeParser';
| 3,168
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/initUploadFileWorker.ts
|
import { traceError } from '@proton/shared/lib/helpers/sentry';
import { TransferCancel } from '../../components/TransferManager/transfer';
import type {
FileKeys,
FileRequestBlock,
PhotoUpload,
ThumbnailRequestBlock,
UploadCallbacks,
UploadFileControls,
UploadFileProgressCallbacks,
} from './interface';
import { getMediaInfo } from './media';
import { mimeTypeFromFile } from './mimeTypeParser/mimeTypeParser';
import { UploadWorkerController } from './workerController';
class TransferRetry extends Error {
constructor(options: { message: string }) {
super(options.message);
this.name = 'TransferRetry';
}
}
export function initUploadFileWorker(
file: File,
isForPhotos: boolean,
{ initialize, createFileRevision, createBlockLinks, getVerificationData, finalize, onError }: UploadCallbacks
): UploadFileControls {
const abortController = new AbortController();
let workerApi: UploadWorkerController;
// Start detecting mime type right away to have this information once the
// upload starts, so we can generate thumbnail as fast as possible without
// need to wait for creation of revision on API.
const mimeTypePromise = mimeTypeFromFile(file);
const start = async ({ onInit, onProgress, onNetworkError, onFinalize }: UploadFileProgressCallbacks = {}) => {
// Worker has a slight overhead about 40 ms. Let's start generating
// thumbnail a bit sooner.
const mediaInfoPromise = getMediaInfo(mimeTypePromise, file, isForPhotos);
return new Promise<void>((resolve, reject) => {
const worker = new Worker(
new URL(
/* webpackChunkName: "drive-worker" */
'./worker/worker.ts',
import.meta.url
)
);
workerApi = new UploadWorkerController(worker, {
keysGenerated: (keys: FileKeys) => {
mimeTypePromise
.then(async (mimeType) => {
return createFileRevision(abortController.signal, mimeType, keys).then(
async (fileRevision) => {
onInit?.(mimeType, fileRevision.fileName);
return Promise.all([
mediaInfoPromise,
getVerificationData(abortController.signal),
]).then(async ([mediaInfo, verificationData]) => {
await workerApi.postStart(
file,
{
mimeType,
isForPhotos,
media: {
width: mediaInfo?.width,
height: mediaInfo?.height,
duration: mediaInfo?.duration,
},
thumbnails: mediaInfo?.thumbnails,
},
fileRevision.address.privateKey,
fileRevision.address.email,
fileRevision.privateKey,
fileRevision.sessionKey,
fileRevision.parentHashKey,
verificationData
);
});
}
);
})
.catch(reject);
},
createBlocks: (fileBlocks: FileRequestBlock[], thumbnailBlocks?: ThumbnailRequestBlock[]) => {
createBlockLinks(abortController.signal, fileBlocks, thumbnailBlocks)
.then(({ fileLinks, thumbnailLinks }) => workerApi.postCreatedBlocks(fileLinks, thumbnailLinks))
.catch(reject);
},
onProgress: (increment: number) => {
onProgress?.(increment);
},
finalize: (signature: string, signatureAddress: string, xattr: string, photo?: PhotoUpload) => {
onFinalize?.();
finalize(signature, signatureAddress, xattr, photo).then(resolve).catch(reject);
},
onNetworkError: (error: string) => {
onNetworkError?.(error);
},
onError: (error: string) => {
reject(new Error(error));
},
notifySentry: (error: Error) => {
traceError(error);
},
onCancel: () => {
reject(new TransferCancel({ message: `Transfer canceled for ${file.name}` }));
},
onHeartbeatTimeout: () => {
reject(new TransferRetry({ message: `Heartbeat timeout` }));
},
});
initialize(abortController.signal)
.then(async ({ addressPrivateKey, parentPrivateKey }) => {
await workerApi.postGenerateKeys(addressPrivateKey, parentPrivateKey);
})
.catch(reject);
});
};
const pause = async () => {
workerApi?.postPause();
};
const resume = async () => {
workerApi?.postResume();
};
const cancel = async () => {
abortController.abort();
workerApi?.cancel();
};
return {
start: (progressCallbacks?: UploadFileProgressCallbacks) =>
start(progressCallbacks)
.catch((err) => {
abortController.abort();
onError?.(err);
throw err;
})
.finally(() => {
workerApi?.postClose();
// We give some time to the worker to `close()` itself, to safely erase the stored private keys.
// We still forcefully terminate it after a few seconds, in case the worker is unexpectedly stuck
// in a bad state, hence couldn't close itself.
setTimeout(() => {
workerApi?.terminate();
}, 5000);
}),
cancel,
pause,
resume,
};
}
| 3,169
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/interface.ts
|
import React from 'react';
import { PrivateKeyReference, SessionKey } from '@proton/crypto';
import { ThumbnailType } from './media';
export type UploadConflictModal = React.FunctionComponent<UploadConflictModalProps>;
export interface UploadConflictModalProps {
name: string;
isFolder?: boolean;
originalIsDraft?: boolean;
originalIsFolder?: boolean;
apply: (strategy: TransferConflictStrategy, all: boolean) => void;
cancelAll: () => void;
}
export interface UploadFileControls {
start: (progressCallbacks?: UploadFileProgressCallbacks) => Promise<void>;
pause: () => void;
resume: () => void;
cancel: () => void;
}
export interface UploadFileProgressCallbacks {
onInit?: (mimeType: string, fileName: string) => void;
onProgress?: (bytes: number) => void;
onNetworkError?: (error: any) => void;
onFinalize?: () => void;
}
export interface UploadFolderControls {
start: () => Promise<{ folderId: string; folderName: string }>;
cancel: () => void;
}
export interface UploadCallbacks {
initialize: (abortSignal: AbortSignal) => Promise<{
addressPrivateKey: PrivateKeyReference;
parentPrivateKey: PrivateKeyReference;
}>;
getVerificationData: (abortSignal: AbortSignal) => Promise<VerificationData>;
createFileRevision: (abortSignal: AbortSignal, mimeType: string, keys: FileKeys) => Promise<InitializedFileMeta>;
createBlockLinks: (
abortSignal: AbortSignal,
fileBlocks: FileRequestBlock[],
thumbnailBlocks?: ThumbnailRequestBlock[]
) => Promise<{ fileLinks: Link[]; thumbnailLinks?: Link[] }>;
finalize: (signature: string, signatureAddress: string, xattr: string, photo?: PhotoUpload) => Promise<void>;
onError?: (error: Error) => void;
}
export type UploadFileList = (UploadFileItem | UploadFolderItem)[];
export type UploadFileItem = { path: string[]; file: File };
export type UploadFolderItem = { path: string[]; folder: string; modificationTime?: Date };
export type FileKeys = {
nodeKey: string;
nodePassphrase: string;
nodePassphraseSignature: string;
contentKeyPacket: string;
contentKeyPacketSignature: string;
privateKey: PrivateKeyReference;
sessionKey: SessionKey;
};
export type InitializedFileMeta = {
fileName: string;
privateKey: PrivateKeyReference;
sessionKey: SessionKey;
parentHashKey: Uint8Array;
address: {
privateKey: PrivateKeyReference;
email: string;
};
};
export type EncryptedBlock = {
index: number;
originalSize: number;
encryptedData: Uint8Array;
hash: Uint8Array;
signature: string;
verificationToken: Uint8Array;
// Thumbnails specific properties
thumbnailType?: never;
};
export type ThumbnailEncryptedBlock = {
index: number;
originalSize: number;
encryptedData: Uint8Array;
hash: Uint8Array;
// Thumbnails specific properties
thumbnailType: ThumbnailType;
};
export type FileRequestBlock = {
index: number;
signature: string;
size: number;
hash: Uint8Array;
verificationToken: Uint8Array;
};
export type ThumbnailRequestBlock = {
size: number;
hash: Uint8Array;
type: ThumbnailType;
};
export type VerificationData = {
verificationCode: Uint8Array;
verifierSessionKey: SessionKey;
};
export type Link = {
index: number;
token: string;
url: string;
};
export type BlockTokenHash = {
index: number;
token: string;
hash: Uint8Array;
};
export type BlockToken = {
index: number;
token: string;
};
export type PhotoUpload = {
encryptedExif?: string;
captureTime: number;
contentHash?: string;
};
export enum TransferConflictStrategy {
Rename = 'rename',
Replace = 'replace',
Skip = 'skip',
}
| 3,170
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/useUploadInput.ts
|
import { ChangeEvent, useEffect, useRef } from 'react';
import { c } from 'ttag';
import { useNotifications } from '@proton/components';
import { logError } from '../../utils/errorHandling';
import { useUploadProvider } from './UploadProvider';
import { UploadFileItem, UploadFileList } from './interface';
export function useFileUploadInput(shareId: string, linkId: string, isForPhotos: boolean = false) {
return useUploadInput(shareId, linkId, false, isForPhotos);
}
export function useFolderUploadInput(shareId: string, linkId: string, isForPhotos: boolean = false) {
return useUploadInput(shareId, linkId, true, isForPhotos);
}
function useUploadInput(shareId: string, linkId: string, forFolders?: boolean, isForPhotos?: boolean) {
const { uploadFiles } = useUploadProvider();
const { createNotification } = useNotifications();
const inputRef = useRef<HTMLInputElement>(null);
useEffect(() => {
if (forFolders && inputRef.current) {
// React types don't allow `webkitdirectory` but it exists and works
inputRef.current.setAttribute('webkitdirectory', 'true');
}
}, [forFolders]);
const getItemsToUpload = (files: FileList): UploadFileList => {
const foldersCreated = new Set<string>();
const filesToUpload: UploadFileList = [];
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (forFolders) {
// webkitRelativePath might be available only if property
// webkitdirectory is set, or not at all. It is important
// to not use it if its not for folders so at least just
// files without structure can be uploaded.
if ('webkitRelativePath' in file) {
const path = ((file as any).webkitRelativePath as string).split('/');
for (let j = 1; j < path.length; j++) {
const folderPath = path.slice(0, j);
const folderPathStr = folderPath.join('/');
if (!foldersCreated.has(folderPathStr)) {
foldersCreated.add(folderPathStr);
filesToUpload.push({ path: folderPath.slice(0, -1), folder: folderPath.slice(-1)[0] });
}
}
filesToUpload.push({ path: path.slice(0, -1), file });
} else {
createNotification({
type: 'error',
text: c('Error').t`Your browser does not support uploading folders`,
});
}
} else {
filesToUpload.push({ path: [], file });
}
}
return filesToUpload;
};
const handleClick = () => {
if (!shareId || !linkId || !inputRef.current) {
return;
}
inputRef.current.value = '';
inputRef.current.click();
};
const handleChange = (e: ChangeEvent<HTMLInputElement>) => {
const { files } = e.target;
if (!shareId || !linkId || !files) {
return;
}
let filesToUpload = getItemsToUpload(files);
if (!forFolders) {
// MacOS has bug, where you can select folders when uploading files in some cases.
filesToUpload = filesToUpload.filter((item) => !!(item as UploadFileItem).file);
}
uploadFiles(shareId, linkId, filesToUpload, isForPhotos).catch(logError);
};
return { inputRef, handleClick, handleChange };
}
| 3,171
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/utils.ts
|
export function getErrorString(error?: any, fallback?: string): string {
if (error) {
return error.message || `${error}`;
}
return fallback || 'Unkown error';
}
| 3,172
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/workerController.ts
|
import { CryptoProxy, PrivateKeyReference, SessionKey, serverTime, updateServerTime } from '@proton/crypto';
import { SafeErrorObject, getSafeErrorObject } from '@proton/utils/getSafeErrorObject';
import { HEARTBEAT_INTERVAL, HEARTBEAT_WAIT_TIME } from './constants';
import type {
EncryptedBlock,
FileKeys,
FileRequestBlock,
Link,
PhotoUpload,
ThumbnailEncryptedBlock,
ThumbnailRequestBlock,
VerificationData,
} from './interface';
import type { Media, ThumbnailInfo } from './media';
import { getErrorString } from './utils';
type GenerateKeysMessage = {
command: 'generate_keys';
addressPrivateKey: Uint8Array;
parentPrivateKey: Uint8Array;
serverTime: Date;
};
type StartMessage = {
command: 'start';
file: File;
mimeType: string;
isForPhotos: boolean;
thumbnails?: ThumbnailInfo[];
media?: Media;
addressPrivateKey: Uint8Array;
addressEmail: string;
privateKey: Uint8Array;
sessionKey: SessionKey;
parentHashKey: Uint8Array;
verificationData: VerificationData;
};
type CreatedBlocksMessage = {
command: 'created_blocks';
fileLinks: Link[];
thumbnailLinks?: Link[];
};
type PauseMessage = {
command: 'pause';
};
type ResumeMessage = {
command: 'resume';
};
type CloseMessage = {
command: 'close';
};
/**
* WorkerControllerEvent contains all possible events which can come from
* the main thread to the upload web worker.
*/
type WorkerControllerEvent = {
data: GenerateKeysMessage | StartMessage | CreatedBlocksMessage | PauseMessage | ResumeMessage | CloseMessage;
};
/**
* WorkerHandlers defines what handlers are available to be used in the upload
* web worker to messages from the main thread defined in WorkerControllerEvent.
*/
interface WorkerHandlers {
generateKeys: (addressPrivateKey: PrivateKeyReference, parentPrivateKey: PrivateKeyReference) => void;
start: (
file: File,
{
mimeType,
isForPhotos,
media,
thumbnails,
}: {
mimeType: string;
isForPhotos: boolean;
thumbnails?: ThumbnailInfo[];
media?: Media;
},
addressPrivateKey: PrivateKeyReference,
addressEmail: string,
privateKey: PrivateKeyReference,
sessionKey: SessionKey,
parentHashKey: Uint8Array,
verificationData: VerificationData
) => void;
createdBlocks: (fileLinks: Link[], thumbnailLinks?: Link[]) => void;
pause: () => void;
resume: () => void;
}
type KeysGeneratedMessage = {
command: 'keys_generated';
nodeKey: string;
nodePassphrase: string;
nodePassphraseSignature: string;
contentKeyPacket: string;
contentKeyPacketSignature: string;
privateKey: Uint8Array;
sessionKey: SessionKey;
};
type CreateBlockMessage = {
command: 'create_blocks';
fileBlocks: FileRequestBlock[];
thumbnailBlocks?: ThumbnailRequestBlock[];
};
type ProgressMessage = {
command: 'progress';
increment: number;
};
type DoneMessage = {
command: 'done';
signature: string;
signatureAddress: string;
xattr: string;
photo?: PhotoUpload;
};
type NetworkErrorMessage = {
command: 'network_error';
error: string;
};
type ErrorMessage = {
command: 'error';
error: string;
};
type NotifySentryMessage = {
command: 'notify_sentry';
error: SafeErrorObject;
};
type HeartbeatMessage = {
command: 'heartbeat';
};
/**
* WorkerEvent contains all possible events which can come from the upload
* web worker to the main thread.
*/
type WorkerEvent = {
data:
| KeysGeneratedMessage
| CreateBlockMessage
| ProgressMessage
| DoneMessage
| NetworkErrorMessage
| ErrorMessage
| NotifySentryMessage
| HeartbeatMessage;
};
/**
* WorkerControllerHandlers defines what handlers are available to be used
* in the main thread to messages from the upload web worked defined in
* WorkerEvent.
*/
interface WorkerControllerHandlers {
keysGenerated: (keys: FileKeys) => void;
createBlocks: (fileBlocks: FileRequestBlock[], thumbnailBlocks?: ThumbnailRequestBlock[]) => void;
onProgress: (increment: number) => void;
finalize: (signature: string, signatureAddress: string, xattr: string, photo?: PhotoUpload) => void;
onNetworkError: (error: string) => void;
onError: (error: string) => void;
onHeartbeatTimeout: () => void;
onCancel: () => void;
notifySentry: (error: Error) => void;
}
/**
* UploadWorker provides communication between the main thread and upload web
* worker. The class ensures type safety as much as possible.
* UploadWorker is meant to be used on the side of the web worker.
*/
export class UploadWorker {
worker: Worker;
heartbeatInterval?: NodeJS.Timeout;
constructor(worker: Worker, { generateKeys, start, createdBlocks, pause, resume }: WorkerHandlers) {
// Before the worker termination, we want to release securely crypto
// proxy. That might need a bit of time, and we allow up to few seconds
// before we terminate the worker. During the releasing time, crypto
// might be failing, so any error should be ignored.
let closing = false;
this.worker = worker;
// Set up the heartbeat. This notifies the main thread that the worker is still alive.
this.heartbeatInterval = setInterval(() => this.postHeartbeat(), HEARTBEAT_INTERVAL);
worker.addEventListener('message', ({ data }: WorkerControllerEvent) => {
switch (data.command) {
case 'generate_keys':
(async (data) => {
// Setup CryptoProxy
// Dynamic import is needed since we want pmcrypto (incl. openpgpjs) to be loaded inside the worker, not in the main thread.
const { Api: CryptoApi } = await import(
/* webpackChunkName: "crypto-worker-api" */ '@proton/crypto/lib/worker/api'
);
CryptoApi.init();
CryptoProxy.setEndpoint(new CryptoApi(), (endpoint) => endpoint.clearKeyStore());
updateServerTime(data.serverTime); // align serverTime in worker with that of the main thread (received from API)
const addressPrivateKey = await CryptoProxy.importPrivateKey({
binaryKey: data.addressPrivateKey,
passphrase: null,
});
const parentPrivateKey = await CryptoProxy.importPrivateKey({
binaryKey: data.parentPrivateKey,
passphrase: null,
});
generateKeys(addressPrivateKey, parentPrivateKey);
})(data).catch((err) => {
this.postError(err);
});
break;
case 'start':
(async (data) => {
const addressPrivateKey = await CryptoProxy.importPrivateKey({
binaryKey: data.addressPrivateKey,
passphrase: null,
});
const privateKey = await CryptoProxy.importPrivateKey({
binaryKey: data.privateKey,
passphrase: null,
});
start(
data.file,
{
mimeType: data.mimeType,
isForPhotos: data.isForPhotos,
thumbnails: data.thumbnails,
media: data.media,
},
addressPrivateKey,
data.addressEmail,
privateKey,
data.sessionKey,
data.parentHashKey,
data.verificationData
);
})(data).catch((err) => {
this.postError(err);
});
break;
case 'created_blocks':
createdBlocks(data.fileLinks, data.thumbnailLinks);
break;
case 'pause':
pause();
break;
case 'resume':
resume();
break;
case 'close':
closing = true;
this.clearHeartbeatInterval();
void CryptoProxy.releaseEndpoint().then(() => self.close());
break;
default:
// Type linters should prevent this error.
throw new Error('Unexpected message');
}
});
worker.addEventListener('error', (event: ErrorEvent) => {
if (closing) {
return;
}
this.postError(getErrorString(event.error, event.message));
});
// @ts-ignore
worker.addEventListener('unhandledrejection', (event: PromiseRejectionEvent) => {
event.preventDefault();
if (closing) {
return;
}
this.postError(event.reason);
});
}
clearHeartbeatInterval() {
if (this.heartbeatInterval) {
clearInterval(this.heartbeatInterval);
}
}
async postKeysGenerated(keys: FileKeys) {
this.worker.postMessage({
command: 'keys_generated',
...keys,
privateKey: await CryptoProxy.exportPrivateKey({
privateKey: keys.privateKey,
passphrase: null,
format: 'binary',
}),
} satisfies KeysGeneratedMessage);
}
postCreateBlocks(fileBlocks: EncryptedBlock[], encryptedThumbnailBlocks?: ThumbnailEncryptedBlock[]) {
this.worker.postMessage({
command: 'create_blocks',
fileBlocks: fileBlocks.map<FileRequestBlock>((block) => ({
index: block.index,
signature: block.signature,
size: block.encryptedData.byteLength,
hash: block.hash,
verificationToken: block.verificationToken,
})),
thumbnailBlocks: encryptedThumbnailBlocks?.map((thumbnailBlock) => ({
size: thumbnailBlock.encryptedData.byteLength,
hash: thumbnailBlock.hash,
type: thumbnailBlock.thumbnailType,
})),
} satisfies CreateBlockMessage);
}
postProgress(increment: number) {
this.worker.postMessage({
command: 'progress',
increment,
} satisfies ProgressMessage);
}
postDone(signature: string, signatureAddress: string, xattr: string, photo?: PhotoUpload) {
this.worker.postMessage({
command: 'done',
signature,
signatureAddress,
xattr,
photo,
} satisfies DoneMessage);
}
postNetworkError(error: string) {
this.worker.postMessage({
command: 'network_error',
error,
} satisfies NetworkErrorMessage);
}
postError(error: string) {
this.worker.postMessage({
command: 'error',
error,
} satisfies ErrorMessage);
}
postNotifySentry(error: Error) {
this.worker.postMessage({
command: 'notify_sentry',
error: getSafeErrorObject(error),
} satisfies NotifySentryMessage);
}
postHeartbeat() {
this.worker.postMessage({
command: 'heartbeat',
} satisfies HeartbeatMessage);
}
}
/**
* UploadWorkerController provides communication between the main thread and
* upload web worker. The class ensures type safety as much as possible.
* UploadWorkerController is meant to be used on the side of the main thread.
*/
export class UploadWorkerController {
worker: Worker;
onCancel: () => void;
heartbeatTimeout?: NodeJS.Timeout;
constructor(
worker: Worker,
{
keysGenerated,
createBlocks,
onProgress,
finalize,
onNetworkError,
onError,
onCancel,
notifySentry,
onHeartbeatTimeout,
}: WorkerControllerHandlers
) {
this.worker = worker;
this.onCancel = onCancel;
worker.addEventListener('message', ({ data }: WorkerEvent) => {
switch (data.command) {
case 'keys_generated':
(async (data) => {
const privateKey = await CryptoProxy.importPrivateKey({
binaryKey: data.privateKey,
passphrase: null,
});
keysGenerated({
nodeKey: data.nodeKey,
nodePassphrase: data.nodePassphrase,
nodePassphraseSignature: data.nodePassphraseSignature,
contentKeyPacket: data.contentKeyPacket,
contentKeyPacketSignature: data.contentKeyPacketSignature,
privateKey,
sessionKey: data.sessionKey,
});
})(data).catch((err) => {
onError(err);
});
break;
case 'create_blocks':
createBlocks(data.fileBlocks, data.thumbnailBlocks);
break;
case 'progress':
onProgress(data.increment);
break;
case 'done':
this.clearHeartbeatTimeout();
finalize(data.signature, data.signatureAddress, data.xattr, data.photo);
break;
case 'network_error':
onNetworkError(data.error);
break;
case 'error':
this.clearHeartbeatTimeout();
onError(data.error);
break;
case 'notify_sentry':
notifySentry(data.error);
break;
case 'heartbeat':
this.clearHeartbeatTimeout();
this.heartbeatTimeout = setTimeout(() => {
notifySentry(new Error('Heartbeat was not received in time'));
onHeartbeatTimeout();
// Since the worker is stuck, we can terminate it
this.worker.terminate();
}, HEARTBEAT_WAIT_TIME);
break;
default:
// Type linters should prevent this error.
throw new Error('Unexpected message');
}
});
worker.addEventListener('error', (event: ErrorEvent) => {
onError(getErrorString(event.error, event.message));
});
}
clearHeartbeatTimeout() {
if (this.heartbeatTimeout) {
clearTimeout(this.heartbeatTimeout);
}
}
terminate() {
this.clearHeartbeatTimeout();
this.worker.terminate();
}
cancel() {
this.onCancel();
}
async postGenerateKeys(addressPrivateKey: PrivateKeyReference, parentPrivateKey: PrivateKeyReference) {
this.worker.postMessage({
command: 'generate_keys',
addressPrivateKey: await CryptoProxy.exportPrivateKey({
privateKey: addressPrivateKey,
passphrase: null,
format: 'binary',
}),
parentPrivateKey: await CryptoProxy.exportPrivateKey({
privateKey: parentPrivateKey,
passphrase: null,
format: 'binary',
}),
serverTime: serverTime(),
} satisfies GenerateKeysMessage);
}
async postStart(
file: File,
{
mimeType,
isForPhotos,
thumbnails,
media,
}: {
mimeType: string;
isForPhotos: boolean;
thumbnails?: ThumbnailInfo[];
media?: Media;
},
addressPrivateKey: PrivateKeyReference,
addressEmail: string,
privateKey: PrivateKeyReference,
sessionKey: SessionKey,
parentHashKey: Uint8Array,
verificationData: VerificationData
) {
this.worker.postMessage({
command: 'start',
file,
mimeType,
isForPhotos: isForPhotos,
thumbnails,
media,
addressPrivateKey: await CryptoProxy.exportPrivateKey({
privateKey: addressPrivateKey,
passphrase: null,
format: 'binary',
}),
addressEmail,
privateKey: await CryptoProxy.exportPrivateKey({
privateKey: privateKey,
passphrase: null,
format: 'binary',
}),
sessionKey,
parentHashKey,
verificationData,
} satisfies StartMessage);
}
postCreatedBlocks(fileLinks: Link[], thumbnailLinks?: Link[]) {
this.worker.postMessage({
command: 'created_blocks',
fileLinks,
thumbnailLinks,
} satisfies CreatedBlocksMessage);
}
postPause() {
this.worker.postMessage({
command: 'pause',
} satisfies PauseMessage);
}
postResume() {
this.worker.postMessage({
command: 'resume',
} satisfies ResumeMessage);
}
postClose() {
this.worker.postMessage({
command: 'close',
} satisfies CloseMessage);
}
}
| 3,173
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/UploadModalContainer.ts
|
import React from 'react';
export interface UploadModalContainer {
fileThresholdModal: React.JSX.Element | null;
conflictModal: React.JSX.Element | null;
}
| 3,174
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/UploadProvider.tsx
|
import { ReactNode, createContext, useContext } from 'react';
import { UploadProviderState } from './UploadProviderState';
import useUpload from './useUpload';
const UploadContext = createContext<UploadProviderState | null>(null);
/**
* @private please use TextAreaTwo instead
*/
export const UploadProvider = ({ children }: { children: ReactNode }) => {
const [providerState, { conflictModal, fileThresholdModal }] = useUpload();
return (
<UploadContext.Provider value={{ ...providerState }}>
{children}
{conflictModal}
{fileThresholdModal}
</UploadContext.Provider>
);
};
export const useUploadProvider = (): UploadProviderState => {
const state = useContext(UploadContext);
if (!state) {
throw new Error('Trying to use uninitialized UploadProvider');
}
return state;
};
| 3,175
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/UploadProviderState.ts
|
import { TransferProgresses } from '../../../components/TransferManager/transfer';
import { UploadFileList } from '../interface';
import { FileUpload, FolderUpload, UpdateFilter } from './interface';
export interface UploadProviderState {
uploads: (FileUpload | FolderUpload)[];
hasUploads: boolean;
uploadFiles: (shareId: string, parentId: string, list: UploadFileList, isForPhotos?: boolean) => Promise<void>;
pauseUploads: (idOrFilter: UpdateFilter) => void;
resumeUploads: (idOrFilter: UpdateFilter) => void;
cancelUploads: (idOrFilter: UpdateFilter) => void;
restartUploads: (idOrFilter: UpdateFilter) => void;
removeUploads: (idOrFilter: UpdateFilter) => void;
clearUploads: () => void;
getUploadsProgresses: () => TransferProgresses;
}
| 3,176
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/index.ts
|
export { UploadProvider, useUploadProvider } from './UploadProvider';
| 3,177
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/interface.ts
|
import { c, msgid } from 'ttag';
import { TransferMeta, TransferState } from '../../../components/TransferManager/transfer';
import { TransferConflictStrategy } from '../interface';
interface LinkUpload {
// ID of the upload for referencing (such as pausing and so on).
id: string;
// IDs to where the link should be uploaded. Share link ID is always known,
// but parent link ID might be empty if user uploads new struct which needs
// to be crated first.
shareId: string;
parentId?: string;
startDate: Date;
state: TransferState;
resumeState?: TransferState; // resumeState is set only when state is paused.
error?: Error;
originalIsDraft?: boolean;
isForPhotos?: boolean; // tell if the upload is for photos
}
export interface FileUpload extends LinkUpload {
file: File;
meta: TransferMeta; // To be compatible with Upload of TransferManager.
originalIsFolder?: boolean;
}
export interface FileUploadReady extends FileUpload {
parentId: string;
}
export interface FolderUpload extends LinkUpload {
name: string;
modificationTime?: Date;
meta: TransferMeta; // To be compatible with Upload of TransferManager.
linkId?: string;
originalIsFolder?: boolean;
files: FileUpload[];
folders: FolderUpload[];
}
export interface FolderUploadReady extends FolderUpload {
parentId: string;
}
export interface UploadQueue {
shareId: string;
linkId: string;
files: FileUpload[];
folders: FolderUpload[];
}
export type UpdateFilter = string | ((params: UpdateCallbackParams) => boolean);
export type UpdateState = TransferState | ((params: UpdateCallbackParams) => TransferState);
export type UpdateCallback = (params: UpdateCallbackParams) => void;
export type UpdateData = {
mimeType?: string;
name?: string;
error?: Error;
folderId?: string;
isNewFolder?: boolean;
originalIsDraft?: boolean;
originalIsFolder?: boolean;
};
export type UpdateCallbackParams = {
id: string;
state: TransferState;
resumeState?: TransferState;
parentId?: string;
file?: File;
};
export type ConflictStrategyHandler = (
abortSignal: AbortSignal,
originalIsDraft?: boolean,
originalIsFolder?: boolean
) => Promise<TransferConflictStrategy>;
export class UploadUserError extends Error {
constructor(message: string) {
super(message);
this.name = 'UploadUserError';
}
}
export class UploadConflictError extends Error {
filename: string;
constructor(filename: string, count: number = 0) {
let message = c('Notification').t`File or folder "${filename}" is already uploading`;
if (count) {
message = c('Notification').ngettext(
msgid`File or folder "${filename}" and ${count} other are already uploading`,
`File or folder "${filename}" and ${count} others are already uploading`,
count
);
}
super(message);
this.filename = filename;
this.name = 'UploadConflictError';
}
}
| 3,178
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/uploadClientUid.ts
|
import { captureMessage } from '@proton/shared/lib/helpers/sentry';
import { generateProtonWebUID } from '@proton/shared/lib/helpers/uid';
import { sendErrorReport } from '../../../utils/errorHandling';
enum UploadingState {
// File is being uploading - do not automatically replace file.
Uploading = 'uploading',
// File upload either failed or user closed the tab before upload
// finish - automatic replace is safe.
Failed = 'failed',
}
const KEY_PREFIX = 'upload-client-uid';
/**
* generateClientUid generates new client UID and two callbacks.
* One to be called when file is uploaded - that will remove the UID
* from local storage as it is not needed anymore.
* Second to be called when file failed to be uplaoded - that will
* change the state of the uploading UID in local storage to indicate
* it is safe to automatically replace the file draft.
* Note it will automatically set failed state when page is unloaded.
*/
export function generateClientUid(clientUid?: string) {
// When the file is being replaced, we want to reuse the same UID
// so it is properly removed from the local storage after successfull
// upload.
if (!clientUid) {
clientUid = generateProtonWebUID();
}
const key = getStorageKey(clientUid);
// LocalStorage can fail when setting values, which will fail the
// transfer. We can ignore this.
//
// The worst case scenario would be not knowing the state, but it's
// better to at least upload the file than fail here.
try {
localStorage.setItem(key, UploadingState.Uploading);
} catch (e) {
handleLocalStorageError(e);
}
const uploadFailed = () => {
try {
localStorage.setItem(key, UploadingState.Failed);
} catch (e) {
handleLocalStorageError(e);
}
removeEventListener('unload', uploadFailed);
};
const uploadFinished = () => {
localStorage.removeItem(key);
removeEventListener('unload', uploadFailed);
};
// If file is not finished and page is closed, we consider it
// as failed upload which is safe to automatically replace with
// next upload attempt by the user.
addEventListener('unload', uploadFailed);
return {
clientUid,
uploadFailed,
uploadFinished,
};
}
/**
* isClientUidAvailable returns true only if the client UID is known
* by the client and file failed to be uploaded, that is we are sure
* it is safe to automatically replace.
* If the file is still being uploaded or is not known by the client,
* user needs to be notified about it and asked what to do.
*/
export function isClientUidAvailable(clientUid: string): boolean {
const key = getStorageKey(clientUid);
const result = localStorage.getItem(key);
return result === UploadingState.Failed;
}
/**
* getStorageKey generates key to be used for local storage.
* Key should be unique enough to not be conflict with anything else.
*/
function getStorageKey(uid: string) {
return `${KEY_PREFIX}-${uid}`;
}
function handleLocalStorageError(err: unknown) {
// When upload succeeds, the keys are removed. If user uploaded many files
// and all failed, it could fill the whole storage. If we cannot fit more
// keys anymore, lets simply remove them to keep the local storage
// functional. The worst case is we ask user to replace the draft.
if (isQuotaExceededError(err)) {
captureMessage('The local storage is full, deleting old failed upload statuses');
clearAllUploadUIDsFromLocalStorage();
} else {
sendErrorReport(err);
}
}
function isQuotaExceededError(err: unknown): boolean {
return (
err instanceof DOMException &&
// Most browsers
(err.code === 22 ||
err.name === 'QuotaExceededError' ||
// Firefox
err.code === 1014 ||
err.name === 'NS_ERROR_DOM_QUOTA_REACHED')
);
}
function clearAllUploadUIDsFromLocalStorage() {
Object.keys(localStorage).forEach((key) => {
if (key.startsWith(KEY_PREFIX)) {
localStorage.removeItem(key);
}
});
}
| 3,179
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUpload.tsx
|
import { useCallback, useEffect } from 'react';
import { c } from 'ttag';
import { useEventManager, useGetUser, useNotifications, useOnline, usePreventLeave } from '@proton/components';
import { MAX_SAFE_UPLOADING_FILE_COUNT, MAX_SAFE_UPLOADING_FILE_SIZE } from '@proton/shared/lib/drive/constants';
import { HTTP_ERROR_CODES } from '@proton/shared/lib/errors';
import humanSize from '@proton/shared/lib/helpers/humanSize';
import { TransferCancel, TransferState } from '../../../components/TransferManager/transfer';
import { FileThresholdModalType, useFileThresholdModal } from '../../../components/modals/FileThresholdModal';
import { sendErrorReport } from '../../../utils/errorHandling';
import {
isTransferCancelError,
isTransferOngoing,
isTransferPausedByConnection,
isTransferProgress,
isTransferRetry,
} from '../../../utils/transfer';
import { MAX_UPLOAD_BLOCKS_LOAD, MAX_UPLOAD_FOLDER_LOAD } from '../constants';
import { UploadFileItem, UploadFileList } from '../interface';
import { UploadModalContainer } from './UploadModalContainer';
import { UploadProviderState } from './UploadProviderState';
import { UpdateFilter } from './interface';
import useUploadConflict from './useUploadConflict';
import useUploadControl from './useUploadControl';
import useUploadFile from './useUploadFile';
import useUploadFolder from './useUploadFolder';
import useUploadQueue, { convertFilterToFunction } from './useUploadQueue';
export default function useUpload(): [UploadProviderState, UploadModalContainer] {
const onlineStatus = useOnline();
const getUser = useGetUser();
const { call } = useEventManager();
const { createNotification } = useNotifications();
const { preventLeave } = usePreventLeave();
const queue = useUploadQueue();
const control = useUploadControl(queue.fileUploads, queue.updateWithCallback, queue.remove, queue.clear);
const { getFolderConflictHandler, getFileConflictHandler, conflictModal } = useUploadConflict(
queue.fileUploads,
queue.folderUploads,
queue.updateState,
queue.updateWithData,
control.cancelUploads
);
const { initFileUpload } = useUploadFile();
const { initFolderUpload } = useUploadFolder();
const [fileThresholdModal, showFileThresholdModal] = useFileThresholdModal();
const checkHasEnoughSpace = async (files: UploadFileList) => {
const totalFileListSize = files.reduce((sum, item) => sum + ((item as UploadFileItem).file?.size || 0), 0);
const remaining = control.calculateRemainingUploadBytes();
await call(); // Process events to get updated UsedSpace.
const { MaxSpace, UsedSpace } = await getUser();
const hasEnoughSpace = MaxSpace > UsedSpace + remaining + totalFileListSize;
return { hasEnoughSpace, total: totalFileListSize };
};
const showNotEnoughSpaceNotification = (total: number) => {
const formattedTotal = humanSize(total);
createNotification({
text: c('Notification').t`Not enough space to upload ${formattedTotal}`,
type: 'error',
});
};
/**
* uploadFiles should be considered as main entry point for uploading files
* in Drive app. It does all necessary checks, such as the space, the
* number of files, and it adds files to the queue. User is notified if
* there is any issue adding files to the queue.
*/
const uploadFiles = async (
shareId: string,
parentId: string,
list: UploadFileList,
isForPhotos: boolean = false
) => {
const { hasEnoughSpace, total } = await checkHasEnoughSpace(list);
if (!hasEnoughSpace) {
showNotEnoughSpaceNotification(total);
return;
}
const fileCount = list.length;
let fileThresholdModalType: FileThresholdModalType | undefined;
if (total >= MAX_SAFE_UPLOADING_FILE_SIZE) {
fileThresholdModalType = 'fileSizeTotal';
}
if (fileCount >= MAX_SAFE_UPLOADING_FILE_COUNT) {
fileThresholdModalType = 'fileNumberTotal';
}
if (fileThresholdModalType) {
await new Promise<void>((resolve, reject) => {
void showFileThresholdModal({
type: fileThresholdModalType,
onSubmit: () => {
resolve();
},
onCancel: () =>
reject(new TransferCancel({ message: `Upload of ${fileCount} files was canceled` })),
});
});
}
await queue.add(shareId, parentId, list, isForPhotos).catch((err: any) => {
const errors = Array.isArray(err) ? err : [err];
errors.forEach((err) => {
if ((err as Error).name === 'UploadUserError' || (err as Error).name === 'UploadConflictError') {
createNotification({
text: err.message,
type: 'error',
});
} else {
createNotification({
text: c('Notification').t`Failed to upload files: ${err}`,
type: 'error',
});
console.error(err);
}
});
});
};
const restartUploads = useCallback(
async (idOrFilter: UpdateFilter) => {
const uploadFileList = queue.fileUploads
.filter(convertFilterToFunction(idOrFilter))
.map(({ file }) => ({ path: [], file }));
const { hasEnoughSpace, total } = await checkHasEnoughSpace(uploadFileList);
if (!hasEnoughSpace) {
showNotEnoughSpaceNotification(total);
return;
}
queue.updateState(idOrFilter, ({ parentId }) => {
return parentId ? TransferState.Pending : TransferState.Initializing;
});
},
[queue.fileUploads, queue.updateState]
);
// Effect to start next folder upload if there is enough capacity to do so.
useEffect(() => {
const { nextFolderUpload } = queue;
if (!nextFolderUpload) {
return;
}
const folderLoad = queue.folderUploads.filter(isTransferProgress).length;
if (folderLoad > MAX_UPLOAD_FOLDER_LOAD) {
return;
}
// Set progress right away to not start the folder more than once.
queue.updateState(nextFolderUpload.id, TransferState.Progress);
const controls = initFolderUpload(
nextFolderUpload.shareId,
nextFolderUpload.parentId,
nextFolderUpload.name,
nextFolderUpload.modificationTime,
getFolderConflictHandler(nextFolderUpload.id)
);
control.add(nextFolderUpload.id, controls);
void preventLeave(
controls
.start()
.then(({ folderId, folderName }) => {
queue.updateWithData(nextFolderUpload.id, TransferState.Done, { folderId, name: folderName });
})
.catch((error) => {
if (isTransferCancelError(error)) {
queue.updateState(nextFolderUpload.id, TransferState.Canceled);
} else {
queue.updateWithData(nextFolderUpload.id, TransferState.Error, { error });
sendErrorReport(error);
}
})
.finally(() => {
control.remove(nextFolderUpload.id);
})
);
}, [queue.nextFolderUpload, queue.folderUploads]);
// Effect to start next file upload if there is enough capacity to do so.
useEffect(() => {
const { nextFileUpload } = queue;
if (!nextFileUpload) {
return;
}
const fileLoad = control.calculateFileUploadLoad();
if (fileLoad > MAX_UPLOAD_BLOCKS_LOAD) {
return;
}
// Set progress right away to not start the file more than once.
queue.updateState(nextFileUpload.id, TransferState.Progress);
const controls = initFileUpload(
nextFileUpload.shareId,
nextFileUpload.parentId,
nextFileUpload.file,
getFileConflictHandler(nextFileUpload.id),
nextFileUpload.isForPhotos
);
control.add(nextFileUpload.id, controls);
void preventLeave(
controls
.start({
onInit: (mimeType: string, fileName: string) => {
// Keep the previous state for cases when the upload is paused.
queue.updateWithData(nextFileUpload.id, ({ state }) => state, { mimeType, name: fileName });
},
onProgress: (increment: number) => {
control.updateProgress(nextFileUpload.id, increment);
},
onNetworkError: (error: any) => {
queue.updateWithData(nextFileUpload.id, TransferState.NetworkError, { error });
},
onFinalize: () => {
queue.updateState(nextFileUpload.id, TransferState.Finalizing);
},
})
.then(() => {
queue.updateState(nextFileUpload.id, TransferState.Done);
})
.catch((error) => {
if (isTransferCancelError(error)) {
queue.updateState(nextFileUpload.id, TransferState.Canceled);
} else if (isTransferRetry(error)) {
queue.updateState(nextFileUpload.id, ({ parentId }) =>
parentId ? TransferState.Pending : TransferState.Initializing
);
} else {
queue.updateWithData(nextFileUpload.id, TransferState.Error, { error });
sendErrorReport(error);
}
// If the error is 429 (rate limited), we should not continue
// with other uploads in the queue and fail fast, otherwise
// it just triggers more strict jails and leads to nowhere.
if (error?.status === HTTP_ERROR_CODES.TOO_MANY_REQUESTS) {
control.cancelUploads(isTransferOngoing);
}
})
.finally(() => {
control.remove(nextFileUpload.id);
})
);
}, [
queue.nextFileUpload,
// calculateFileUploadLoad gives different result every time, but we
// don't want to use it as a dependency to not run this effect too
// often (every time). Dependency to allUploads is a good compromise.
queue.allUploads,
]);
useEffect(() => {
if (onlineStatus) {
const ids = queue.allUploads.filter(isTransferPausedByConnection).map(({ id }) => id);
control.resumeUploads(({ id }) => ids.includes(id));
}
}, [onlineStatus]);
return [
{
uploads: queue.allUploads,
hasUploads: queue.hasUploads,
uploadFiles,
getUploadsProgresses: control.getProgresses,
pauseUploads: control.pauseUploads,
resumeUploads: control.resumeUploads,
cancelUploads: control.cancelUploads,
restartUploads,
removeUploads: control.removeUploads,
clearUploads: control.clearUploads,
},
{
fileThresholdModal,
conflictModal,
},
];
}
| 3,180
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadConflict.test.tsx
|
import { ReactNode } from 'react';
import { act, renderHook } from '@testing-library/react-hooks';
import { ModalsProvider } from '@proton/components';
import { TransferState } from '../../../components/TransferManager/transfer';
import { mockGlobalFile, testFile } from '../../../utils/test/file';
import { TransferConflictStrategy } from '../interface';
import { FileUpload, FolderUpload } from './interface';
import useUploadConflict from './useUploadConflict';
const mockModal = jest.fn();
const mockShowModal = jest.fn();
jest.mock('../../../components/modals/ConflictModal.tsx', () => ({
useConflictModal: () => [mockModal, mockShowModal],
}));
describe('useUploadConflict', () => {
const mockUpdateState = jest.fn();
const mockUpdateWithData = jest.fn();
const mockCancelUploads = jest.fn();
let abortController: AbortController;
const renderConflict = () => {
const fileUploads: FileUpload[] = ['file1', 'file2'].map((id) => ({
id,
shareId: 'shareId',
startDate: new Date(),
state: TransferState.Conflict,
file: testFile(`${id}.txt`),
meta: {
filename: `${id}.txt`,
mimeType: 'txt',
},
}));
const folderUploads: FolderUpload[] = [];
const wrapper = ({ children }: { children: ReactNode }) => <ModalsProvider>{children}</ModalsProvider>;
const { result } = renderHook(
() => useUploadConflict(fileUploads, folderUploads, mockUpdateState, mockUpdateWithData, mockCancelUploads),
{ wrapper }
);
return result;
};
beforeEach(() => {
mockGlobalFile();
mockModal.mockClear();
mockShowModal.mockClear();
mockUpdateState.mockClear();
mockUpdateWithData.mockClear();
mockCancelUploads.mockClear();
abortController = new AbortController();
});
it('aborts promise returned by file conflict handler', async () => {
const hook = renderConflict();
await act(async () => {
const conflictHandler = hook.current.getFileConflictHandler('file1');
const promise = conflictHandler(abortController.signal);
expect(mockUpdateWithData.mock.calls).toMatchObject([
['file1', 'conflict', { originalIsFolder: undefined }],
]);
abortController.abort();
await expect(promise).rejects.toThrowError('Upload was canceled');
});
});
it('updates with info about original item', async () => {
const hook = renderConflict();
await act(async () => {
const conflictHandler = hook.current.getFileConflictHandler('file1');
const originalIsDraft = true;
const originalIsFolder = false;
const promise = conflictHandler(abortController.signal, originalIsDraft, originalIsFolder);
expect(mockUpdateWithData.mock.calls).toMatchObject([
['file1', 'conflict', { originalIsDraft, originalIsFolder }],
]);
abortController.abort();
await expect(promise).rejects.toThrowError('Upload was canceled');
});
});
it('waits and resolves in conflict strategy for one', async () => {
mockShowModal.mockImplementation(({ apply }) => {
apply(TransferConflictStrategy.Rename, false);
});
const hook = renderConflict();
await act(async () => {
const conflictHandler = hook.current.getFileConflictHandler('file1');
const promise = conflictHandler(abortController.signal);
await expect(promise).resolves.toBe(TransferConflictStrategy.Rename);
expect(mockUpdateState.mock.calls.length).toBe(1);
expect(mockUpdateState.mock.calls[0][0]).toBe('file1');
expect(mockCancelUploads.mock.calls.length).toBe(0);
});
});
it('waits and resolves in conflict strategy for all', async () => {
mockShowModal.mockImplementation(({ apply }) => {
apply(TransferConflictStrategy.Rename, true);
});
const hook = renderConflict();
await act(async () => {
const conflictHandler1 = hook.current.getFileConflictHandler('file1');
const promise1 = conflictHandler1(abortController.signal);
await expect(promise1).resolves.toBe(TransferConflictStrategy.Rename);
expect(mockUpdateState.mock.calls.length).toBe(1);
expect(mockUpdateState.mock.calls[0][0]).not.toBe('file1'); // It is dynamic function check later.
expect(mockCancelUploads.mock.calls.length).toBe(0);
const conflictHandler2 = hook.current.getFileConflictHandler('file2');
const promise2 = conflictHandler2(abortController.signal);
await expect(promise2).resolves.toBe(TransferConflictStrategy.Rename);
// Only conflicting files are updated for file resolver.
const updateState = mockUpdateState.mock.calls[0][0];
[
[TransferState.Conflict, testFile('a.txt'), true],
[TransferState.Conflict, undefined, false],
[TransferState.Progress, testFile('a.txt'), false],
[TransferState.Progress, undefined, false],
].forEach(([state, file, expectedResult]) => {
expect(updateState({ state, file })).toBe(expectedResult);
});
});
});
it('waits and cancels all uploads', async () => {
mockShowModal.mockImplementation(({ cancelAll }) => {
cancelAll();
});
const hook = renderConflict();
await act(async () => {
const conflictHandler1 = hook.current.getFileConflictHandler('file1');
const promise1 = conflictHandler1(abortController.signal);
await expect(promise1).resolves.toBe(TransferConflictStrategy.Skip);
const conflictHandler2 = hook.current.getFileConflictHandler('file2');
const promise2 = conflictHandler2(abortController.signal);
await expect(promise2).resolves.toBe(TransferConflictStrategy.Skip);
expect(mockUpdateState.mock.calls.length).toBe(0);
expect(mockCancelUploads.mock.calls.length).toBe(1);
});
});
});
| 3,181
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadConflict.tsx
|
import { useCallback, useEffect, useRef } from 'react';
import { TransferCancel, TransferState } from '../../../components/TransferManager/transfer';
import { useConflictModal } from '../../../components/modals/ConflictModal';
import { waitUntil } from '../../../utils/async';
import { isTransferActive, isTransferConflict } from '../../../utils/transfer';
import { TransferConflictStrategy } from '../interface';
import { ConflictStrategyHandler, FileUpload, FolderUpload, UpdateData, UpdateFilter, UpdateState } from './interface';
// Empty string is ensured to not conflict with any upload ID or folder name.
// No upload has empty ID.
const CONFLICT_STRATEGY_ALL_ID = '';
export default function useUploadConflict(
fileUploads: FileUpload[],
folderUploads: FolderUpload[],
updateState: (filter: UpdateFilter, newState: UpdateState) => void,
updateWithData: (filter: UpdateFilter, newState: UpdateState, data: UpdateData) => void,
cancelUploads: (filter: UpdateFilter) => void
) {
const [conflictModal, showConflictModal] = useConflictModal();
// There should be always visible only one modal to chose conflict strategy.
const isConflictStrategyModalOpen = useRef(false);
// Conflict strategy is set per upload, or CONFLICT_STRATEGY_ALL_ID is used
// to handle selection for all uploads.
// Strategies are cleared once all uploads are finished so user is asked
// again (consider that user could do another upload after an hour).
const fileConflictStrategy = useRef<{ [id: string]: TransferConflictStrategy }>({});
const folderConflictStrategy = useRef<{ [id: string]: TransferConflictStrategy }>({});
useEffect(() => {
// "Apply to all" should be active till the last transfer is active.
// Once all transfers finish, user can start another minutes or hours
// later and that means we should ask again.
const hasNoActiveUpload = ![...fileUploads, ...folderUploads].find(isTransferActive);
if (hasNoActiveUpload) {
fileConflictStrategy.current = {};
folderConflictStrategy.current = {};
}
}, [fileUploads, folderUploads]);
/**
* getConflictHandler returns handler which either returns the strategy
* right away, or it sets the state of the upload to conflict which will
* open ConflictModal to ask user what to do next. Handler waits till the
* user selects the strategy, and also any other upload is not started
* in case user applies the selection for all transfers, which might be
* even to cancel all.
*/
const getConflictHandler = useCallback(
(
conflictStrategyRef: React.MutableRefObject<{ [id: string]: TransferConflictStrategy }>,
uploadId: string
): ConflictStrategyHandler => {
return (abortSignal, originalIsDraft, originalIsFolder) => {
const getStrategy = (): TransferConflictStrategy | undefined => {
return (
conflictStrategyRef.current[CONFLICT_STRATEGY_ALL_ID] || conflictStrategyRef.current[uploadId]
);
};
const strategy = getStrategy();
if (strategy) {
return Promise.resolve(strategy);
}
updateWithData(uploadId, TransferState.Conflict, { originalIsDraft, originalIsFolder });
return new Promise((resolve, reject) => {
waitUntil(() => !!getStrategy(), abortSignal)
.then(() => {
const strategy = getStrategy() as TransferConflictStrategy;
resolve(strategy);
})
.catch(() => {
reject(new TransferCancel({ message: 'Upload was canceled' }));
});
});
};
},
[updateWithData]
);
const getFileConflictHandler = useCallback(
(uploadId: string) => {
return getConflictHandler(fileConflictStrategy, uploadId);
},
[getConflictHandler]
);
const getFolderConflictHandler = useCallback(
(uploadId: string) => {
return getConflictHandler(folderConflictStrategy, uploadId);
},
[getConflictHandler]
);
const openConflictStrategyModal = (
uploadId: string,
conflictStrategyRef: React.MutableRefObject<{ [id: string]: TransferConflictStrategy }>,
params: { name: string; isFolder?: boolean; originalIsDraft?: boolean; originalIsFolder?: boolean }
) => {
isConflictStrategyModalOpen.current = true;
const apply = (strategy: TransferConflictStrategy, all: boolean) => {
isConflictStrategyModalOpen.current = false;
conflictStrategyRef.current[all ? CONFLICT_STRATEGY_ALL_ID : uploadId] = strategy;
if (all) {
updateState(({ state, file }) => {
// Update only folders for folder conflict strategy.
// And only files for file conflict strategy.
const isFolder = file === undefined;
if (isFolder !== (params.isFolder || false)) {
return false;
}
return isTransferConflict({ state });
}, TransferState.Progress);
} else {
updateState(uploadId, TransferState.Progress);
}
};
const cancelAll = () => {
isConflictStrategyModalOpen.current = false;
conflictStrategyRef.current[CONFLICT_STRATEGY_ALL_ID] = TransferConflictStrategy.Skip;
cancelUploads(isTransferActive);
};
void showConflictModal({ apply, cancelAll, ...params });
};
// Modals are openned on this one place only to not have race condition
// issue and ensure only one modal, either for file or folder, is openned.
useEffect(() => {
if (isConflictStrategyModalOpen.current) {
return;
}
const conflictingFolderUpload = folderUploads.find(isTransferConflict);
if (conflictingFolderUpload) {
openConflictStrategyModal(conflictingFolderUpload.id, folderConflictStrategy, {
name: conflictingFolderUpload.meta.filename,
isFolder: true,
originalIsDraft: conflictingFolderUpload.originalIsDraft,
originalIsFolder: conflictingFolderUpload.originalIsFolder,
});
return;
}
const conflictingFileUpload = fileUploads.find(isTransferConflict);
if (conflictingFileUpload) {
openConflictStrategyModal(conflictingFileUpload.id, fileConflictStrategy, {
name: conflictingFileUpload.meta.filename,
originalIsDraft: conflictingFileUpload.originalIsDraft,
originalIsFolder: conflictingFileUpload.originalIsFolder,
});
}
}, [fileUploads, folderUploads]);
return {
getFolderConflictHandler,
getFileConflictHandler,
conflictModal,
};
}
| 3,182
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadControl.test.ts
|
import { act, renderHook } from '@testing-library/react-hooks';
import { FILE_CHUNK_SIZE } from '@proton/shared/lib/drive/constants';
import { TransferState } from '../../../components/TransferManager/transfer';
import { mockGlobalFile, testFile } from '../../../utils/test/file';
import { MAX_BLOCKS_PER_UPLOAD } from '../constants';
import { UploadFileControls, UploadFolderControls } from '../interface';
import { FileUpload, UpdateFilter } from './interface';
import useUploadControl from './useUploadControl';
function makeFileUpload(id: string, state: TransferState, filename: string, size = 2 * FILE_CHUNK_SIZE): FileUpload {
const file = testFile(filename, size);
return {
id,
shareId: 'shareId',
startDate: new Date(),
state,
file,
meta: {
filename: file.name,
mimeType: file.type,
size: file.size,
},
};
}
describe('useUploadControl', () => {
const mockUpdateWithCallback = jest.fn();
const mockRemoveFromQueue = jest.fn();
const mockClearQueue = jest.fn();
let hook: {
current: {
add: (id: string, uploadControls: UploadFileControls | UploadFolderControls) => void;
remove: (id: string) => void;
updateProgress: (id: string, increment: number) => void;
calculateRemainingUploadBytes: () => number;
calculateFileUploadLoad: () => number;
pauseUploads: (idOrFilter: UpdateFilter) => void;
resumeUploads: (idOrFilter: UpdateFilter) => void;
cancelUploads: (idOrFilter: UpdateFilter) => void;
removeUploads: (idOrFilter: UpdateFilter) => void;
};
};
beforeEach(() => {
mockUpdateWithCallback.mockClear();
mockRemoveFromQueue.mockClear();
mockClearQueue.mockClear();
mockGlobalFile();
const fileUploads: FileUpload[] = [
makeFileUpload('init', TransferState.Initializing, 'init.txt'),
makeFileUpload('pending', TransferState.Pending, 'pending.txt'),
makeFileUpload('progress', TransferState.Progress, 'progress.txt', 2 * FILE_CHUNK_SIZE + 42),
makeFileUpload('empty', TransferState.Progress, 'empty.txt', 0),
makeFileUpload('big', TransferState.Progress, 'big.txt', 100 * FILE_CHUNK_SIZE),
makeFileUpload('done', TransferState.Done, 'done.txt'),
];
const { result } = renderHook(() =>
useUploadControl(fileUploads, mockUpdateWithCallback, mockRemoveFromQueue, mockClearQueue)
);
hook = result;
});
it('calculates remaining upload bytes', () => {
const controls = { start: jest.fn(), cancel: jest.fn() };
act(() => {
hook.current.add('progress', controls);
hook.current.updateProgress('progress', FILE_CHUNK_SIZE);
hook.current.updateProgress('progress', 47);
hook.current.updateProgress('progress', -5);
expect(hook.current.calculateRemainingUploadBytes()).toBe(
// 2 init + 2 pending + 1 progress (+42 extra) + 100 big
105 * FILE_CHUNK_SIZE + 42
);
});
});
it('calculates file upload load', () => {
const controls = { start: jest.fn(), cancel: jest.fn() };
act(() => {
hook.current.add('progress', controls);
hook.current.updateProgress('progress', FILE_CHUNK_SIZE);
expect(hook.current.calculateFileUploadLoad()).toBe(
// progress (3 - 1 done) + empty (always at least one) + big (max MAX_BLOCKS_PER_UPLOAD)
2 + 1 + MAX_BLOCKS_PER_UPLOAD
);
});
});
});
| 3,183
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadControl.ts
|
import { useCallback, useRef } from 'react';
import { FILE_CHUNK_SIZE } from '@proton/shared/lib/drive/constants';
import { TransferProgresses, TransferState } from '../../../components/TransferManager/transfer';
import { isTransferActive, isTransferFinalizing, isTransferPending, isTransferProgress } from '../../../utils/transfer';
import { MAX_BLOCKS_PER_UPLOAD } from '../constants';
import { UploadFileControls, UploadFolderControls } from '../interface';
import { FileUpload, UpdateCallback, UpdateFilter, UpdateState } from './interface';
export default function useUploadControl(
fileUploads: FileUpload[],
updateWithCallback: (idOrFilter: UpdateFilter, newState: UpdateState, callback: UpdateCallback) => void,
removeFromQueue: (idOrFilter: UpdateFilter, callback: UpdateCallback) => void,
clearQueue: () => void
) {
// Controls keep references to ongoing uploads to have ability
// to pause or cancel them.
const controls = useRef<{ [id: string]: UploadFileControls | UploadFolderControls }>({});
const progresses = useRef<TransferProgresses>({});
const add = (id: string, uploadControls: UploadFileControls | UploadFolderControls) => {
controls.current[id] = uploadControls;
progresses.current[id] = 0;
};
const remove = (id: string) => {
delete controls.current[id];
delete progresses.current[id];
};
const updateProgress = (id: string, increment: number) => {
// Progress might be updated even when transfer is already finished and
// thus progress is not here anymore. In such case it is OK to simply
// ignore the call to not crash.
if (progresses.current[id] === undefined) {
return;
}
progresses.current[id] += increment;
// Because increment can be float, some aritmetic operation can result
// in -0.0000000001 which would be then displayed as -0 after rounding.
if (progresses.current[id] < 0) {
progresses.current[id] = 0;
}
};
const getProgresses = () => ({ ...progresses.current });
/**
* calculateRemainingUploadBytes returns based on progresses of ongoing
* uploads how many data is planned to be uploaded to properly count the
* available space for next batch of files to be uploaded.
*/
const calculateRemainingUploadBytes = (): number => {
return fileUploads.reduce((sum, upload) => {
if (!isTransferActive(upload) || !upload.file.size) {
return sum;
}
// uploadedChunksSize counts only fully uploaded blocks. Fully
// uploaded blocks are counted into used space returned by API.
// The algorithm is not precise as file is uploaded in parallel,
// but this is what we can do without introducing complex
// computation. If better precision is needed, we need to keep
// track of each block, not the whole file.
const uploadedChunksSize =
progresses.current[upload.id] - (progresses.current[upload.id] % FILE_CHUNK_SIZE) || 0;
return sum + upload.file.size - uploadedChunksSize;
}, 0);
};
/**
* calculateFileUploadLoad returns how many blocks are being currently
* uploaded by all ongoing uploads, considering into account the real
* state using the progresses.
*/
const calculateFileUploadLoad = (): number => {
// Count both in-progress and finalizing transfers as the ones still
// running the worker and taking up some load. Without counting finalizing
// state and with the API being slow, we can keep around too many workers.
return fileUploads
.filter((transfer) => isTransferProgress(transfer) || isTransferFinalizing(transfer))
.reduce((load, upload) => {
const remainingSize = (upload.file.size || 0) - (progresses.current[upload.id] || 0);
// Even if the file is empty, keep the minimum of blocks to 1,
// otherwise it would start too many threads.
const chunks = Math.max(Math.ceil(remainingSize / FILE_CHUNK_SIZE), 1);
const loadIncrease = Math.min(MAX_BLOCKS_PER_UPLOAD, chunks);
return load + loadIncrease;
}, 0);
};
const pauseUploads = useCallback(
(idOrFilter: UpdateFilter) => {
updateWithCallback(idOrFilter, TransferState.Paused, ({ id, state }) => {
if (isTransferProgress({ state }) || isTransferPending({ state })) {
(controls.current[id] as UploadFileControls)?.pause?.();
}
});
},
[updateWithCallback]
);
const resumeUploads = useCallback(
(idOrFilter: UpdateFilter) => {
updateWithCallback(
idOrFilter,
({ resumeState, parentId }) => {
// If the parent folder was created during the pause,
// go back to pending, not initializing state.
if (parentId && resumeState === TransferState.Initializing) {
return TransferState.Pending;
}
return resumeState || TransferState.Progress;
},
({ id }) => {
(controls.current[id] as UploadFileControls)?.resume?.();
}
);
},
[updateWithCallback]
);
const cancelUploads = useCallback(
(idOrFilter: UpdateFilter) => {
updateWithCallback(idOrFilter, TransferState.Canceled, ({ id }) => {
controls.current[id]?.cancel();
});
},
[updateWithCallback]
);
const removeUploads = useCallback(
(idOrFilter: UpdateFilter) => {
// We should never simply remove uploads, but cancel it first, so
// it does not continue on background without our knowledge.
cancelUploads(idOrFilter);
removeFromQueue(idOrFilter, ({ id }) => remove(id));
},
[removeFromQueue]
);
const clearUploads = useCallback(() => {
Object.entries(controls.current).map(([, uploadControls]) => uploadControls.cancel());
controls.current = {};
progresses.current = {};
clearQueue();
}, [clearQueue]);
return {
add,
remove,
updateProgress,
getProgresses,
calculateRemainingUploadBytes,
calculateFileUploadLoad,
pauseUploads,
resumeUploads,
cancelUploads,
removeUploads,
clearUploads,
};
}
| 3,184
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadFile.ts
|
import { c } from 'ttag';
import { CryptoProxy, PrivateKeyReference, SessionKey } from '@proton/crypto';
import {
queryCreateFile,
queryCreateFileRevision,
queryDeleteFileRevision,
queryRequestUpload,
queryUpdateFileRevision,
queryVerificationData,
} from '@proton/shared/lib/api/drive/files';
import { base64StringToUint8Array, uint8ArrayToBase64String } from '@proton/shared/lib/helpers/encoding';
import {
CreateFileResult,
CreateFileRevisionResult,
GetVerificationDataResult,
RequestUploadResult,
} from '@proton/shared/lib/interfaces/drive/file';
import { encryptName, generateLookupHash } from '@proton/shared/lib/keys/driveKeys';
import { TransferCancel } from '../../../components/TransferManager/transfer';
import useQueuedFunction from '../../../hooks/util/useQueuedFunction';
import { logError } from '../../../utils/errorHandling';
import { ValidationError } from '../../../utils/errorHandling/ValidationError';
import { useDebouncedRequest } from '../../_api';
import { useDriveEventManager } from '../../_events';
import { DecryptedLink, useLink, useLinksActions, validateLinkName } from '../../_links';
import { useShare } from '../../_shares';
import { useVolumesState } from '../../_volumes';
import { MAX_UPLOAD_BLOCKS_LOAD } from '../constants';
import { initUploadFileWorker } from '../initUploadFileWorker';
import {
FileKeys,
FileRequestBlock,
PhotoUpload,
ThumbnailRequestBlock,
TransferConflictStrategy,
UploadFileControls,
VerificationData,
} from '../interface';
import { ConflictStrategyHandler, UploadUserError } from './interface';
import { generateClientUid } from './uploadClientUid';
import useUploadHelper from './useUploadHelper';
interface FileRevision {
isNewFile: boolean;
filename: string;
fileID: string;
revisionID: string;
previousRevisionID?: string;
sessionKey: SessionKey;
privateKey: PrivateKeyReference;
// Callbacks to control local client UIDs.
// See useUploadClientUid for more details.
uploadFinished: () => void;
uploadFailed: () => void;
}
export default function useUploadFile() {
const debouncedRequest = useDebouncedRequest();
const queuedFunction = useQueuedFunction();
const { getLinkPrivateKey, getLinkSessionKey, getLinkHashKey } = useLink();
const { trashLinks, deleteChildrenLinks } = useLinksActions();
const { getShareCreatorKeys } = useShare();
const { findAvailableName, getLinkByName } = useUploadHelper();
const driveEventManager = useDriveEventManager();
const volumeState = useVolumesState();
const initFileUpload = (
shareId: string,
parentId: string,
file: File,
getFileConflictStrategy: ConflictStrategyHandler,
isForPhotos: boolean = false
): UploadFileControls => {
let shareKeysCache: Awaited<ReturnType<typeof getShareCreatorKeys>>;
const getShareKeys = async (abortSignal: AbortSignal) => {
if (!shareKeysCache) {
shareKeysCache = await getShareCreatorKeys(abortSignal, shareId);
}
return shareKeysCache;
};
const createFile = async (
abortSignal: AbortSignal,
filename: string,
mimeType: string,
hash: string,
keys: FileKeys,
previousClientUid?: string
): Promise<FileRevision> => {
const error = validateLinkName(file.name);
if (error) {
throw new ValidationError(error);
}
const [addressKeyInfo, parentPrivateKey] = await Promise.all([
getShareKeys(abortSignal),
getLinkPrivateKey(abortSignal, shareId, parentId),
]);
const Name = await encryptName(filename, parentPrivateKey, addressKeyInfo.privateKey);
checkSignal(abortSignal, filename);
const { clientUid, uploadFinished, uploadFailed } = generateClientUid(previousClientUid);
// Do not abort using signal - file could be created and we
// wouldn't know ID to do proper cleanup.
const { File: createdFile } = await debouncedRequest<CreateFileResult>(
queryCreateFile(shareId, {
ContentKeyPacket: keys.contentKeyPacket,
ContentKeyPacketSignature: keys.contentKeyPacketSignature,
Hash: hash,
MIMEType: mimeType,
Name,
NodeKey: keys.nodeKey,
NodePassphrase: keys.nodePassphrase,
NodePassphraseSignature: keys.nodePassphraseSignature,
ParentLinkID: parentId,
SignatureAddress: addressKeyInfo.address.Email,
ClientUID: clientUid,
})
).catch((err) => {
uploadFailed();
throw err;
});
return {
fileID: createdFile.ID,
filename,
isNewFile: true,
privateKey: keys.privateKey,
revisionID: createdFile.RevisionID,
sessionKey: keys.sessionKey,
uploadFinished,
uploadFailed,
};
};
const createRevision = async (abortSignal: AbortSignal, link: DecryptedLink): Promise<FileRevision> => {
const currentActiveRevisionID = link.activeRevision?.id;
if (!currentActiveRevisionID) {
throw new Error(c('Error').t`The original file has missing active revision`);
}
const [privateKey, sessionKey] = await Promise.all([
getLinkPrivateKey(abortSignal, shareId, link.linkId),
getLinkSessionKey(abortSignal, shareId, link.linkId),
]);
if (!sessionKey) {
throw new Error(c('Error').t`The original file has missing session key`);
}
checkSignal(abortSignal, link.name);
const { clientUid, uploadFinished, uploadFailed } = generateClientUid();
// Do not abort using signal - revision could be created and we
// wouldn't know ID to do proper cleanup.
const { Revision } = await debouncedRequest<CreateFileRevisionResult>(
queryCreateFileRevision(shareId, link.linkId, currentActiveRevisionID, clientUid)
).catch((err) => {
if (err.data?.Code === 2500) {
throw new UploadUserError(
c('Error').t`The new revision of original file is not uploaded yet, please try again later`
);
}
throw err;
});
return {
isNewFile: false,
filename: file.name,
fileID: link.linkId,
revisionID: Revision.ID,
previousRevisionID: currentActiveRevisionID,
sessionKey,
privateKey,
uploadFinished,
uploadFailed,
};
};
/**
* replaceFile loads all children in the target folder and finds
* the link which is about to be replaced. If the original link is
* a folder, the the whole folder is moved to trash and new file is
* created. If the original link is file, new revision is created.
*/
const replaceFile = async (
abortSignal: AbortSignal,
mimeType: string,
keys: FileKeys
): Promise<FileRevision> => {
const link = await getLinkByName(abortSignal, shareId, parentId, file.name);
// If collision happened but the link is not present, that means
// the file is just being uploaded.
if (!link) {
throw new UploadUserError(c('Error').t`The original file is not uploaded yet, please try again later`);
}
checkSignal(abortSignal, file.name);
if (!link.isFile) {
const parentHashKey = await getLinkHashKey(abortSignal, shareId, parentId);
if (!parentHashKey) {
throw Error(c('Error').t`The original file has missing hash key`);
}
const hash = await generateLookupHash(file.name, parentHashKey);
await trashLinks(abortSignal, [{ shareId, parentLinkId: parentId, linkId: link.linkId }]);
return createFile(abortSignal, file.name, mimeType, hash, keys);
}
return createRevision(abortSignal, link);
};
/**
* replaceDraft removes previous link completely and creates new
* file, because API does not allow creation of new revision for link
* without any revision. One day it would be good to keep the draft
* and just finish upload of the missing blocks.
*/
const replaceDraft = async (
abortSignal: AbortSignal,
filename: string,
mimeType: string,
hash: string,
keys: FileKeys,
linkId: string,
clientUid?: string
) => {
await deleteChildrenLinks(abortSignal, shareId, parentId, [linkId]);
return createFile(abortSignal, filename, mimeType, hash, keys, clientUid);
};
const createFileRevision = queuedFunction(
'create_file_revision',
async (abortSignal: AbortSignal, mimeType: string, keys: FileKeys): Promise<FileRevision> => {
if (isForPhotos) {
const parentHashKey = await getLinkHashKey(abortSignal, shareId, parentId);
if (!parentHashKey) {
throw Error('Missing hash key on folder link');
}
const hash = await generateLookupHash(file.name, parentHashKey);
return createFile(abortSignal, file.name, mimeType, hash, keys);
}
const {
filename: newName,
hash,
draftLinkId,
clientUid,
} = await findAvailableName(abortSignal, shareId, parentId, file.name);
checkSignal(abortSignal, file.name);
// Automatically replace file - previous draft was uploaded
// by the same client.
if (draftLinkId && clientUid) {
// Careful: uploading duplicate file has different name and
// this newName has to be used, not file.name.
// Example: upload A, then do it again with adding number
// A (2) which will fail, then do it again to replace draft
// with new upload - it needs to be A (2), not just A.
return replaceDraft(abortSignal, newName, mimeType, hash, keys, draftLinkId, clientUid);
}
// TODO: Remove isForPhotos when we will implement Photos conflict check
if (file.name === newName || isForPhotos) {
return createFile(abortSignal, file.name, mimeType, hash, keys);
}
const link = await getLinkByName(abortSignal, shareId, parentId, file.name);
const originalIsFolder = link ? !link.isFile : false;
const conflictStrategy = await getFileConflictStrategy(abortSignal, !!draftLinkId, originalIsFolder);
if (conflictStrategy === TransferConflictStrategy.Rename) {
return createFile(abortSignal, newName, mimeType, hash, keys);
}
if (conflictStrategy === TransferConflictStrategy.Replace) {
if (draftLinkId) {
return replaceDraft(abortSignal, file.name, mimeType, hash, keys, draftLinkId);
}
return replaceFile(abortSignal, mimeType, keys);
}
if (conflictStrategy === TransferConflictStrategy.Skip) {
throw new TransferCancel({ message: c('Info').t`Transfer skipped for file "${file.name}"` });
}
throw new Error(`Unknown conflict strategy: ${conflictStrategy}`);
},
MAX_UPLOAD_BLOCKS_LOAD
);
// If the upload was aborted but we already called finalize to commit
// revision, we cannot delete the revision. API does not support
// aborting of request, so the request will finish anyway. And calling
// deletion and revision commiting at the same time can cause confusing
// error "file not found".
// Other options would be to wait for finalize to finish and then to
// delete it right away. But thats more complex and probably this is
// safer option to do.
let finalizeCalled = false;
// Keep promise reference so when upload is canceled but init is not
// finished yet, onError handler can wait for the creation to get ID
// with created file or revision to do proper clean-up.
let createdFileRevisionPromise: Promise<FileRevision>;
return initUploadFileWorker(file, isForPhotos, {
initialize: async (abortSignal: AbortSignal) => {
const [addressKeyInfo, parentPrivateKey] = await Promise.all([
getShareKeys(abortSignal),
getLinkPrivateKey(abortSignal, shareId, parentId),
]);
return {
addressPrivateKey: addressKeyInfo.privateKey,
parentPrivateKey,
};
},
createFileRevision: async (abortSignal: AbortSignal, mimeType: string, keys: FileKeys) => {
createdFileRevisionPromise = createFileRevision(abortSignal, mimeType, keys);
const [createdFileRevision, addressKeyInfo, parentHashKey] = await Promise.all([
createdFileRevisionPromise,
getShareKeys(abortSignal),
getLinkHashKey(abortSignal, shareId, parentId),
]);
checkSignal(abortSignal, createdFileRevision.filename);
return {
fileName: createdFileRevision.filename,
privateKey: createdFileRevision.privateKey,
sessionKey: createdFileRevision.sessionKey,
parentHashKey,
address: {
privateKey: addressKeyInfo.privateKey,
email: addressKeyInfo.address.Email,
},
};
},
getVerificationData: async (abortSignal: AbortSignal) => {
const createdFileRevision = await createdFileRevisionPromise;
if (!createdFileRevision) {
throw new Error(`Draft for "${file.name}" hasn't been created prior to verifying`);
}
try {
const { VerificationCode, ContentKeyPacket } = await debouncedRequest<GetVerificationDataResult>(
queryVerificationData(shareId, createdFileRevision.fileID, createdFileRevision.revisionID),
abortSignal
);
const verifierSessionKey = await CryptoProxy.decryptSessionKey({
binaryMessage: base64StringToUint8Array(ContentKeyPacket),
decryptionKeys: createdFileRevision.privateKey,
});
if (!verifierSessionKey) {
throw new Error('Verification session key could not be decrypted');
}
return {
verificationCode: base64StringToUint8Array(VerificationCode),
verifierSessionKey,
} satisfies VerificationData;
} catch (e) {
throw new Error('Upload failed: Verification of data failed', {
cause: e,
});
}
},
createBlockLinks: async (
abortSignal: AbortSignal,
fileBlocks: FileRequestBlock[],
thumbnailBlocks?: ThumbnailRequestBlock[]
) => {
const createdFileRevision = await createdFileRevisionPromise;
if (!createdFileRevision) {
throw new Error(`Draft for "${file.name}" hasn't been created prior to uploading`);
}
const addressKeyInfo = await getShareKeys(abortSignal);
const { UploadLinks, ThumbnailLinks } = await debouncedRequest<RequestUploadResult>(
queryRequestUpload({
BlockList: fileBlocks.map((block) => ({
Index: block.index,
Hash: uint8ArrayToBase64String(block.hash),
EncSignature: block.signature,
Size: block.size,
Verifier: {
Token: uint8ArrayToBase64String(block.verificationToken),
},
})),
AddressID: addressKeyInfo.address.ID,
LinkID: createdFileRevision.fileID,
RevisionID: createdFileRevision.revisionID,
ShareID: shareId,
ThumbnailList: thumbnailBlocks?.map((block) => ({
Hash: uint8ArrayToBase64String(block.hash),
Size: block.size,
Type: block.type,
})),
}),
abortSignal
);
return {
fileLinks: UploadLinks.map((link, index) => ({
index: fileBlocks[index].index,
token: link.Token,
url: link.BareURL,
})),
thumbnailLinks: ThumbnailLinks?.map((link, index) => ({
index,
token: link.Token,
url: link.BareURL,
})),
};
},
finalize: queuedFunction(
'upload_finalize',
async (signature: string, signatureAddress: string, xattr: string, photo?: PhotoUpload) => {
const createdFileRevision = await createdFileRevisionPromise;
if (!createdFileRevision) {
throw new Error(`Draft for "${file.name}" hasn't been created prior to uploading`);
}
if (finalizeCalled) {
return;
}
finalizeCalled = true;
await debouncedRequest(
queryUpdateFileRevision(shareId, createdFileRevision.fileID, createdFileRevision.revisionID, {
ManifestSignature: signature,
SignatureAddress: signatureAddress,
XAttr: xattr,
Photo: photo
? {
MainPhotoLinkID: null, // This is for live photos
CaptureTime: photo.captureTime,
Exif: photo.encryptedExif,
ContentHash: photo.contentHash,
}
: undefined,
})
);
createdFileRevision.uploadFinished();
const volumeId = volumeState.findVolumeId(shareId);
if (volumeId) {
await driveEventManager.pollEvents.volumes(volumeId, {
includeCommon: true,
});
}
},
5
),
onError: async (err) => {
if (finalizeCalled && err.name === 'AbortError') {
return;
}
finalizeCalled = true;
// If creation of revision failed, it is already processed by
// this handler. Do not throw it here again.
const createdFileRevision = await createdFileRevisionPromise?.catch(() => undefined);
try {
if (createdFileRevision) {
createdFileRevision.uploadFailed();
if (createdFileRevision.isNewFile) {
// Cleanup should not be able to abort.
await deleteChildrenLinks(new AbortController().signal, shareId, parentId, [
createdFileRevision.fileID,
]);
} else {
await debouncedRequest(
queryDeleteFileRevision(
shareId,
createdFileRevision.fileID,
createdFileRevision.revisionID
)
);
}
}
} catch (err: any) {
logError(err);
}
},
});
};
return {
initFileUpload,
};
}
function checkSignal(abortSignal: AbortSignal, name: string) {
if (abortSignal.aborted) {
throw new TransferCancel({ message: c('Info').t`Transfer canceled for file "${name}"` });
}
}
| 3,185
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadFolder.ts
|
import { c } from 'ttag';
import { TransferCancel } from '../../../components/TransferManager/transfer';
import useQueuedFunction from '../../../hooks/util/useQueuedFunction';
import { useLinkActions, useLinksActions } from '../../_links';
import { TransferConflictStrategy, UploadFolderControls } from '../interface';
import { ConflictStrategyHandler } from './interface';
import useUploadHelper from './useUploadHelper';
interface Folder {
isNewFolder: boolean;
folderId: string;
folderName: string;
}
export default function useUploadFolder() {
const queuedFunction = useQueuedFunction();
const { createFolder } = useLinkActions();
const { deleteChildrenLinks } = useLinksActions();
const { findAvailableName, getLinkByName } = useUploadHelper();
const createEmptyFolder = async (
abortSignal: AbortSignal,
shareId: string,
parentId: string,
folderName: string,
modificationTime?: Date
): Promise<Folder> => {
const folderId = await createFolder(abortSignal, shareId, parentId, folderName, modificationTime);
return {
folderId,
isNewFolder: true,
folderName,
};
};
const getFolder = async (
abortSignal: AbortSignal,
shareId: string,
parentId: string,
folderName: string
): Promise<Folder> => {
const link = await getLinkByName(abortSignal, shareId, parentId, folderName);
if (!link) {
throw Error(c('Error').t`The original folder not found`);
}
if (link.isFile) {
throw Error(c('Error').t`File cannot be merged with folder`);
}
checkSignal(abortSignal, folderName);
return {
folderId: link.linkId,
isNewFolder: false,
folderName,
};
};
const replaceDraft = async (
abortSignal: AbortSignal,
shareId: string,
parentId: string,
linkId: string,
folderName: string,
modificationTime?: Date
) => {
await deleteChildrenLinks(abortSignal, shareId, parentId, [linkId]);
return createEmptyFolder(abortSignal, shareId, parentId, folderName, modificationTime);
};
const prepareFolder = (
abortSignal: AbortSignal,
shareId: string,
parentId: string,
folderName: string,
modificationTime: Date | undefined,
getFolderConflictStrategy: ConflictStrategyHandler
): Promise<Folder> => {
const lowercaseName = folderName.toLowerCase();
return queuedFunction(`upload_empty_folder:${lowercaseName}`, async () => {
const {
filename: newName,
draftLinkId,
clientUid,
} = await findAvailableName(abortSignal, shareId, parentId, folderName);
checkSignal(abortSignal, folderName);
// Automatically replace file - previous draft was uploaded
// by the same client.
if (draftLinkId && clientUid) {
return replaceDraft(abortSignal, shareId, parentId, draftLinkId, newName, modificationTime);
}
if (folderName === newName) {
return createEmptyFolder(abortSignal, shareId, parentId, folderName, modificationTime);
}
const link = await getLinkByName(abortSignal, shareId, parentId, folderName);
const originalIsFolder = link ? !link.isFile : false;
checkSignal(abortSignal, folderName);
const conflictStrategy = await getFolderConflictStrategy(abortSignal, !!draftLinkId, originalIsFolder);
if (conflictStrategy === TransferConflictStrategy.Rename) {
return createEmptyFolder(abortSignal, shareId, parentId, newName, modificationTime);
}
if (conflictStrategy === TransferConflictStrategy.Replace) {
if (draftLinkId) {
return replaceDraft(abortSignal, shareId, parentId, draftLinkId, folderName, modificationTime);
}
return getFolder(abortSignal, shareId, parentId, folderName);
}
if (conflictStrategy === TransferConflictStrategy.Skip) {
throw new TransferCancel({ message: c('Info').t`Transfer skipped for folder "${folderName}"` });
}
throw new Error(`Unknown conflict strategy: ${conflictStrategy}`);
})();
};
const initFolderUpload = (
shareId: string,
parentId: string,
folderName: string,
modificationTime: Date | undefined,
getFolderConflictStrategy: ConflictStrategyHandler
): UploadFolderControls => {
const abortController = new AbortController();
return {
start: () => {
return prepareFolder(
abortController.signal,
shareId,
parentId,
folderName,
modificationTime,
getFolderConflictStrategy
);
},
cancel: () => {
abortController.abort();
},
};
};
return {
initFolderUpload,
};
}
function checkSignal(abortSignal: AbortSignal, name: string) {
if (abortSignal.aborted) {
throw new TransferCancel({ message: c('Info').t`Transfer canceled for folder "${name}"` });
}
}
| 3,186
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadHelper.ts
|
import { queryCheckAvailableHashes } from '@proton/shared/lib/api/drive/link';
import { HashCheckResult } from '@proton/shared/lib/interfaces/drive/link';
import { generateLookupHash } from '@proton/shared/lib/keys/driveKeys';
import range from '@proton/utils/range';
import { useDebouncedRequest } from '../../_api';
import { adjustName, splitLinkName, useLink, useLinksListing } from '../../_links';
import { isClientUidAvailable } from './uploadClientUid';
const HASH_CHECK_AMOUNT = 10;
export default function useUploadHelper() {
const debouncedRequest = useDebouncedRequest();
const { getLinkHashKey } = useLink();
const { loadChildren, getCachedChildren } = useLinksListing();
const findAvailableName = async (
abortSignal: AbortSignal,
shareId: string,
parentLinkID: string,
filename: string,
suppressErrors = false
) => {
const parentHashKey = await getLinkHashKey(abortSignal, shareId, parentLinkID);
if (!parentHashKey) {
throw Error('Missing hash key on folder link');
}
const [namePart, extension] = splitLinkName(filename);
const hash = await generateLookupHash(filename, parentHashKey);
const findAdjustedName = async (
start = 0
): Promise<{
filename: string;
hash: string;
draftLinkId?: string;
clientUid?: string;
}> => {
const hashesToCheck = await Promise.all(
range(start, start + HASH_CHECK_AMOUNT).map(async (i) => {
if (i === 0) {
return {
filename,
hash,
};
}
const adjustedFileName = adjustName(i, namePart, extension);
return {
filename: adjustedFileName,
hash: await generateLookupHash(adjustedFileName, parentHashKey),
};
})
);
const Hashes = hashesToCheck.map(({ hash }) => hash);
const { AvailableHashes, PendingHashes } = await debouncedRequest<HashCheckResult>(
queryCheckAvailableHashes(shareId, parentLinkID, { Hashes }, suppressErrors),
abortSignal
);
// Check if pending drafts are created by this client and is safe
// to automatically replace the draft without user interaction.
const pendingAvailableHashes = PendingHashes.filter(({ ClientUID }) => isClientUidAvailable(ClientUID));
if (pendingAvailableHashes.length) {
const availableName = hashesToCheck.find(({ hash }) => hash === pendingAvailableHashes[0].Hash);
if (availableName) {
return {
...availableName,
draftLinkId: pendingAvailableHashes[0].LinkID,
clientUid: pendingAvailableHashes[0].ClientUID,
};
}
}
if (!AvailableHashes.length) {
return findAdjustedName(start + HASH_CHECK_AMOUNT);
}
const availableName = hashesToCheck.find(({ hash }) => hash === AvailableHashes[0]);
if (!availableName) {
throw new Error('Backend returned unexpected hash');
}
const draftHashes = PendingHashes.filter(({ ClientUID }) => !isClientUidAvailable(ClientUID));
const draftLinkId = draftHashes.find(({ Hash }) => Hash === hash)?.LinkID;
return {
...availableName,
draftLinkId,
};
};
return findAdjustedName();
};
const getLinkByName = async (abortSignal: AbortSignal, shareId: string, parentLinkID: string, name: string) => {
await loadChildren(abortSignal, shareId, parentLinkID);
const { links } = getCachedChildren(abortSignal, shareId, parentLinkID);
return links?.find((link) => link.name === name);
};
return {
findAvailableName,
getLinkByName,
};
}
| 3,187
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadQueue.add.test.ts
|
import { act, renderHook } from '@testing-library/react-hooks';
import { TransferState } from '../../../components/TransferManager/transfer';
import { mockGlobalFile, testFile } from '../../../utils/test/file';
import { UploadFileList } from '../interface';
import { FileUpload, FolderUpload, UploadQueue } from './interface';
import useUploadQueue, { addItemToQueue } from './useUploadQueue';
function createEmptyQueue(): UploadQueue {
return {
shareId: 'shareId',
linkId: 'parentId',
files: [],
folders: [],
};
}
describe('useUploadQueue::add', () => {
let hook: {
current: {
fileUploads: FileUpload[];
folderUploads: FolderUpload[];
add: (shareId: string, parentId: string, fileList: UploadFileList) => void;
};
};
beforeEach(() => {
mockGlobalFile();
const { result } = renderHook(() => useUploadQueue());
hook = result;
});
it('creates new upload queue', () => {
act(() => {
hook.current.add('shareId', 'parentId', [{ path: [], folder: 'folder' }]);
hook.current.add('shareId2', 'parentId2', [{ path: [], folder: 'folder' }]);
});
expect(hook.current.folderUploads).toMatchObject([
{
name: 'folder',
shareId: 'shareId',
parentId: 'parentId',
},
{
name: 'folder',
shareId: 'shareId2',
parentId: 'parentId2',
},
]);
});
it('merges upload queue', () => {
act(() => {
hook.current.add('shareId', 'parentId', [{ path: [], folder: 'folder' }]);
hook.current.add('shareId', 'parentId', [{ path: [], folder: 'folder2' }]);
});
expect(hook.current.folderUploads).toMatchObject([
{
name: 'folder',
shareId: 'shareId',
parentId: 'parentId',
},
{
name: 'folder2',
shareId: 'shareId',
parentId: 'parentId',
},
]);
});
it('throws error when adding file with empty name', () => {
expect(() => {
addItemToQueue('shareId', createEmptyQueue(), { path: [], file: testFile('') });
}).toThrow('File or folder is missing a name');
});
it('throws error when adding folder with empty name', () => {
expect(() => {
addItemToQueue('shareId', createEmptyQueue(), { path: [], folder: '' });
}).toThrow('File or folder is missing a name');
});
it('throws error when adding file to non-existing folder', () => {
expect(() => {
addItemToQueue('shareId', createEmptyQueue(), { path: ['folder'], file: testFile('a.txt') });
}).toThrow('Wrong file or folder structure');
});
it('throws error when adding the same file again', () => {
const queue = createEmptyQueue();
addItemToQueue('shareId', queue, { path: [], file: testFile('a.txt') });
expect(() => {
addItemToQueue('shareId', queue, { path: [], file: testFile('a.txt') });
}).toThrow('File or folder "a.txt" is already uploading');
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
addItemToQueue('shareId', queue, { path: ['folder'], file: testFile('a.txt') });
expect(() => {
addItemToQueue('shareId', queue, { path: ['folder'], file: testFile('a.txt') });
}).toThrow('File or folder "a.txt" is already uploading');
});
it('throws error when adding the same folder again', () => {
const queue = createEmptyQueue();
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
expect(() => {
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
}).toThrow('File or folder "folder" is already uploading');
addItemToQueue('shareId', queue, { path: ['folder'], folder: 'subfolder' });
expect(() => {
addItemToQueue('shareId', queue, { path: ['folder'], folder: 'subfolder' });
}).toThrow('File or folder "subfolder" is already uploading');
});
it('throws error when adding the same folder again with unfinished childs', () => {
const queue = createEmptyQueue();
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
addItemToQueue('shareId', queue, { path: ['folder'], file: testFile('a.txt') });
queue.folders[0].state = TransferState.Done;
expect(() => {
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
}).toThrow('File or folder "folder" is already uploading');
queue.folders[0].files[0].state = TransferState.Done;
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
});
it('adds files to the latest folder', () => {
const queue = createEmptyQueue();
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
queue.folders[0].state = TransferState.Done;
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
addItemToQueue('shareId', queue, { path: ['folder'], file: testFile('b.txt') });
expect(queue.folders[0].files.length).toBe(0);
expect(queue.folders[1].files.length).toBe(1);
expect(queue.folders[1].files[0].meta.filename).toBe('b.txt');
});
it('adds files to already prepared filter with pending state', () => {
const queue = createEmptyQueue();
addItemToQueue('shareId', queue, { path: [], folder: 'folder' });
// The first file, before folder is done, is set to init state.
addItemToQueue('shareId', queue, { path: ['folder'], file: testFile('a.txt') });
expect(queue.folders[0].files[0]).toMatchObject({
meta: { filename: 'a.txt' },
state: TransferState.Initializing,
});
// The second file, after folder is done, is set to pending state.
queue.folders[0].state = TransferState.Done;
queue.folders[0].linkId = 'folderId';
addItemToQueue('shareId', queue, { path: ['folder'], file: testFile('b.txt') });
expect(queue.folders[0].files[1]).toMatchObject({
meta: { filename: 'b.txt' },
state: TransferState.Pending,
});
});
});
| 3,188
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadQueue.attributes.test.ts
|
import { act, renderHook } from '@testing-library/react-hooks';
import { TransferState } from '../../../components/TransferManager/transfer';
import { mockGlobalFile, testFile } from '../../../utils/test/file';
import { UploadFileList } from '../interface';
import { FileUpload, FolderUpload } from './interface';
import useUploadQueue from './useUploadQueue';
describe("useUploadQueue' attributes", () => {
let hook: {
current: {
hasUploads: boolean;
fileUploads: FileUpload[];
folderUploads: FolderUpload[];
allUploads: (FileUpload | FolderUpload)[];
add: (shareId: string, parentId: string, fileList: UploadFileList) => void;
};
};
beforeEach(() => {
mockGlobalFile();
const { result } = renderHook(() => useUploadQueue());
hook = result;
});
it('returns empty queue', () => {
expect(hook.current.hasUploads).toBe(false);
expect(hook.current.fileUploads).toMatchObject([]);
expect(hook.current.folderUploads).toMatchObject([]);
expect(hook.current.allUploads).toMatchObject([]);
});
it('returns folder only', () => {
act(() => {
hook.current.add('shareId', 'parentId', [{ path: [], folder: 'folder' }]);
});
const expectedFolder = {
// We don't check ID and startDate.
shareId: 'shareId',
parentId: 'parentId',
state: TransferState.Pending,
name: 'folder',
files: [],
folders: [],
meta: {
filename: 'folder',
size: 0,
mimeType: 'Folder',
},
};
expect(hook.current.hasUploads).toBe(true);
expect(hook.current.fileUploads).toMatchObject([]);
expect(hook.current.folderUploads).toMatchObject([expectedFolder]);
expect(hook.current.allUploads).toMatchObject([expectedFolder]);
});
it('returns file only', () => {
const file = testFile('file.txt');
const dsStore = testFile('.DS_Store');
act(() => {
hook.current.add('shareId', 'parentId', [
{ path: [], file },
{ path: [], file: dsStore }, // .DS_Store files are ignored.
]);
});
const expectedFile = {
// We don't check ID and startDate.
shareId: 'shareId',
parentId: 'parentId',
state: TransferState.Pending,
file,
meta: {
filename: file.name,
mimeType: file.type,
size: file.size,
},
};
expect(hook.current.hasUploads).toBe(true);
expect(hook.current.fileUploads).toMatchObject([expectedFile]);
expect(hook.current.folderUploads).toMatchObject([]);
expect(hook.current.allUploads).toMatchObject([expectedFile]);
});
it('returns both files and folders', () => {
const file = testFile('file.txt');
act(() => {
hook.current.add('shareId', 'parentId', [
{ path: [], folder: 'folder' },
{ path: ['folder'], file },
]);
});
expect(hook.current.hasUploads).toBe(true);
expect(hook.current.fileUploads).toMatchObject([
{
state: TransferState.Initializing,
parentId: undefined,
},
]);
expect(hook.current.folderUploads).toMatchObject([
{
state: TransferState.Pending,
parentId: 'parentId',
files: [{ meta: { filename: 'file.txt' } }],
},
]);
expect(hook.current.allUploads.length).toBe(2);
});
});
| 3,189
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadQueue.remove.test.ts
|
import { act, renderHook } from '@testing-library/react-hooks';
import { mockGlobalFile, testFile } from '../../../utils/test/file';
import { UploadFileList } from '../interface';
import { FileUpload, FolderUpload, UpdateCallback, UpdateFilter } from './interface';
import useUploadQueue from './useUploadQueue';
describe('useUploadQueue::remove', () => {
const mockCallback = jest.fn();
let hook: {
current: {
fileUploads: FileUpload[];
folderUploads: FolderUpload[];
add: (shareId: string, parentId: string, fileList: UploadFileList) => void;
remove: (idOrFilter: UpdateFilter, callback?: UpdateCallback) => void;
};
};
let firstFileId: string;
let firstFolderId: string;
let secondIds: string[];
beforeEach(() => {
mockCallback.mockClear();
mockGlobalFile();
const { result } = renderHook(() => useUploadQueue());
hook = result;
act(() => {
hook.current.add('shareId', 'parentId', [
{ path: [], folder: 'folder1' },
{ path: [], folder: 'folder2' },
{ path: [], folder: 'folder3' },
{ path: [], file: testFile('file1.txt') },
{ path: ['folder1'], file: testFile('file2.txt') },
{ path: ['folder1'], file: testFile('file3.txt') },
{ path: ['folder2'], file: testFile('file4.txt') },
]);
});
firstFileId = hook.current.fileUploads[0].id;
firstFolderId = hook.current.folderUploads[0].id;
secondIds = [hook.current.fileUploads[1].id, hook.current.folderUploads[1].id];
});
it('removes file from the queue using id', () => {
act(() => {
hook.current.remove(firstFileId, mockCallback);
});
expect(mockCallback.mock.calls).toMatchObject([[{ meta: { filename: 'file1.txt' } }]]);
expect(hook.current.folderUploads).toMatchObject([
{ meta: { filename: 'folder1' } },
{ meta: { filename: 'folder2' } },
{ meta: { filename: 'folder3' } },
]);
expect(hook.current.fileUploads).toMatchObject([
{ meta: { filename: 'file2.txt' } },
{ meta: { filename: 'file3.txt' } },
{ meta: { filename: 'file4.txt' } },
]);
});
it('removes folder from the queue using id', () => {
act(() => {
hook.current.remove(firstFolderId, mockCallback);
});
expect(mockCallback.mock.calls).toMatchObject([
[{ meta: { filename: 'folder1' } }],
[{ meta: { filename: 'file2.txt' } }],
[{ meta: { filename: 'file3.txt' } }],
]);
expect(hook.current.folderUploads).toMatchObject([
{ meta: { filename: 'folder2' } },
{ meta: { filename: 'folder3' } },
]);
expect(hook.current.fileUploads).toMatchObject([
{ meta: { filename: 'file1.txt' } },
{ meta: { filename: 'file4.txt' } },
]);
});
it('removes file and folder from the queue using filter', () => {
act(() => {
hook.current.remove(({ id }) => secondIds.includes(id), mockCallback);
});
expect(mockCallback.mock.calls).toMatchObject([
[{ meta: { filename: 'folder2' } }],
[{ meta: { filename: 'file4.txt' } }],
[{ meta: { filename: 'file2.txt' } }],
]);
expect(hook.current.folderUploads).toMatchObject([
{ meta: { filename: 'folder1' } },
{ meta: { filename: 'folder3' } },
]);
expect(hook.current.fileUploads).toMatchObject([
{ meta: { filename: 'file1.txt' } },
{ meta: { filename: 'file3.txt' } },
]);
});
});
| 3,190
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadQueue.ts
|
import { useCallback, useMemo, useState } from 'react';
import { c } from 'ttag';
import { generateUID } from '@proton/components';
import { DS_STORE } from '@proton/shared/lib/drive/constants';
import { TransferState } from '../../../components/TransferManager/transfer';
import {
isTransferConflict,
isTransferFinished,
isTransferInitializing,
isTransferPending,
} from '../../../utils/transfer';
import { UploadFileItem, UploadFileList, UploadFolderItem } from '../interface';
import {
FileUpload,
FileUploadReady,
FolderUpload,
FolderUploadReady,
UpdateCallback,
UpdateCallbackParams,
UpdateData,
UpdateFilter,
UpdateState,
UploadConflictError,
UploadQueue,
UploadUserError,
} from './interface';
export default function useUploadQueue() {
const [queue, setQueue] = useState<UploadQueue[]>([]);
const fileUploads = useMemo((): FileUpload[] => {
const f = ({ files, folders }: { files: FileUpload[]; folders: FolderUpload[] }): FileUpload[] => {
return [...files, ...folders.flatMap(f)];
};
return queue.flatMap(f);
}, [queue]);
const folderUploads = useMemo((): FolderUpload[] => {
const f = ({ folders }: { folders: FolderUpload[] }): FolderUpload[] => {
return [...folders, ...folders.flatMap(f)];
};
return queue.flatMap(f);
}, [queue]);
const allUploads = useMemo((): (FileUpload | FolderUpload)[] => {
return [...fileUploads, ...folderUploads];
}, [fileUploads, folderUploads]);
const hasUploads = useMemo((): boolean => {
return allUploads.length > 0;
}, [allUploads]);
const { nextFileUpload, nextFolderUpload } = useMemo(() => {
let nextFileUpload: FileUploadReady | undefined;
let nextFolderUpload: FolderUploadReady | undefined;
const conflictingUpload = allUploads.some(isTransferConflict);
if (conflictingUpload) {
return { nextFileUpload, nextFolderUpload };
}
nextFileUpload = fileUploads.find((file) => isTransferPending(file) && file.parentId) as FileUploadReady;
nextFolderUpload = folderUploads.find(
(folder) => isTransferPending(folder) && folder.parentId
) as FolderUploadReady;
return { nextFileUpload, nextFolderUpload };
}, [allUploads, fileUploads, folderUploads]);
const add = useCallback(
async (
shareId: string,
parentId: string,
list: UploadFileList,
isForPhotos: boolean = false
): Promise<void> => {
return new Promise((resolve, reject) => {
setQueue((queue) => {
const errors: Error[] = [];
const conflictErrors: UploadConflictError[] = [];
const queueItem = queue.find((item) => item.shareId === shareId && item.linkId === parentId) || {
shareId,
linkId: parentId,
files: [],
folders: [],
};
for (const item of list) {
if ((item as UploadFileItem).file?.name === DS_STORE) {
continue;
}
try {
addItemToQueue(shareId, queueItem, item, isForPhotos);
} catch (err: any) {
if ((err as Error).name === 'UploadConflictError') {
conflictErrors.push(err);
} else {
errors.push(err);
}
}
}
const newQueue = [
...queue.filter((item) => item.shareId !== shareId || item.linkId !== parentId),
queueItem,
];
if (conflictErrors.length > 0) {
errors.push(new UploadConflictError(conflictErrors[0].filename, conflictErrors.length - 1));
}
if (errors.length > 0) {
reject(errors);
} else {
resolve();
}
return newQueue;
});
});
},
[]
);
const update = useCallback(
(
idOrFilter: UpdateFilter,
newStateOrCallback: UpdateState,
{ mimeType, name, error, folderId, originalIsDraft, originalIsFolder }: UpdateData = {},
callback?: UpdateCallback
) => {
const filter = convertFilterToFunction(idOrFilter);
const newStateCallback = convertNewStateToFunction(newStateOrCallback);
const updateFileOrFolder = <T extends FileUpload | FolderUpload>(item: T) => {
callback?.(item);
const newState = newStateCallback(item);
// If pause is set twice, prefer resumeState set already before
// to not be locked in paused state forever.
item.resumeState = newState === TransferState.Paused ? item.resumeState || item.state : undefined;
item.state = newState;
if (mimeType) {
item.meta.mimeType = mimeType;
}
if (name) {
item.meta.filename = name;
}
if (originalIsDraft) {
item.originalIsDraft = originalIsDraft;
}
if (originalIsFolder) {
item.originalIsFolder = originalIsFolder;
}
item.error = error;
};
const updateFile = (file: FileUpload): FileUpload => {
if (filter(file)) {
updateFileOrFolder(file);
}
return file;
};
const updateFolder = (folder: FolderUpload): FolderUpload => {
if (filter(folder)) {
// When parent folder is canceled, all childs would hang up
// in initializing state - therefore we need to cancel
// recursively all children.
if (newStateCallback(folder) === TransferState.Canceled) {
folder = recursiveCancel(folder);
}
updateFileOrFolder(folder);
if (folderId) {
folder.linkId = folderId;
folder.files = folder.files.map((file) => ({
...file,
parentId: folderId,
state: file.state === TransferState.Initializing ? TransferState.Pending : file.state,
}));
folder.folders = folder.folders.map((folder) => ({
...folder,
parentId: folderId,
state: folder.state === TransferState.Initializing ? TransferState.Pending : folder.state,
}));
}
}
folder.files = folder.files.map(updateFile);
folder.folders = folder.folders.map(updateFolder);
// When any child is restarted after parent folder is canceled,
// the child would hang up in initializing state - therefore we
// need to restart also all canceled parents of that child.
if (folder.state === TransferState.Canceled && hasInitializingUpload(folder)) {
folder.state = folder.parentId ? TransferState.Pending : TransferState.Initializing;
}
return folder;
};
setQueue((queue) => [
...queue.map((item) => {
item.files = item.files.map(updateFile);
item.folders = item.folders.map(updateFolder);
return item;
}),
]);
},
[]
);
const updateState = useCallback(
(idOrFilter: UpdateFilter, newStateOrCallback: UpdateState) => {
update(idOrFilter, newStateOrCallback);
},
[update]
);
const updateWithData = useCallback(
(idOrFilter: UpdateFilter, newStateOrCallback: UpdateState, data: UpdateData = {}) => {
update(idOrFilter, newStateOrCallback, data);
},
[update]
);
const updateWithCallback = useCallback(
(idOrFilter: UpdateFilter, newStateOrCallback: UpdateState, callback: UpdateCallback) => {
update(idOrFilter, newStateOrCallback, {}, callback);
},
[update]
);
const remove = useCallback((idOrFilter: UpdateFilter, callback?: UpdateCallback) => {
const filter = convertFilterToFunction(idOrFilter);
const invertFilter: UpdateFilter = (item) => !filter(item);
setQueue((queue) => {
if (callback) {
const recursiveCallback = (item: FolderUpload) => {
callback(item);
item.files.forEach((value) => callback(value));
item.folders.forEach(recursiveCallback);
};
const doCallback = (item: UploadQueue | FolderUpload) => {
item.files.filter(filter).forEach((value) => callback(value));
item.folders.filter(filter).forEach(recursiveCallback);
item.folders.forEach(doCallback);
};
queue.forEach(doCallback);
}
const doFilter = <T extends UploadQueue | FolderUpload>(item: T): T => {
item.files = item.files.filter(invertFilter);
item.folders = item.folders.filter(invertFilter).map(doFilter);
return item;
};
return [...queue.map(doFilter)];
});
}, []);
const clear = useCallback(() => {
setQueue([]);
}, []);
return {
fileUploads,
folderUploads,
allUploads,
hasUploads,
nextFileUpload,
nextFolderUpload,
add,
updateState,
updateWithData,
updateWithCallback,
remove,
clear,
};
}
export function convertFilterToFunction(filterOrId: UpdateFilter) {
return typeof filterOrId === 'function' ? filterOrId : ({ id }: UpdateCallbackParams) => id === filterOrId;
}
function convertNewStateToFunction(newStateOrCallback: UpdateState) {
return typeof newStateOrCallback === 'function' ? newStateOrCallback : () => newStateOrCallback;
}
export function addItemToQueue(
shareId: string,
newQueue: UploadQueue,
item: UploadFileItem | UploadFolderItem,
isForPhotos: boolean = false
) {
const name = (item as UploadFileItem).file ? (item as UploadFileItem).file.name : (item as UploadFolderItem).folder;
if (!name) {
throw new UploadUserError(c('Notification').t`File or folder is missing a name`);
}
const part = findUploadQueueFolder(newQueue, item.path);
if (isNameAlreadyUploading(part, name)) {
throw new UploadConflictError(name);
}
const generalAttributes = {
id: generateUID(),
shareId,
parentId: part.linkId,
state: part.linkId ? TransferState.Pending : TransferState.Initializing,
startDate: new Date(),
isForPhotos,
};
if ((item as UploadFileItem).file) {
const fileItem = item as UploadFileItem;
part.files.push({
...generalAttributes,
file: fileItem.file,
meta: {
filename: name,
size: fileItem.file.size,
mimeType: fileItem.file.type,
},
});
} else {
const folderItem = item as UploadFolderItem;
part.folders.push({
...generalAttributes,
name: folderItem.folder,
modificationTime: folderItem.modificationTime,
files: [],
folders: [],
meta: {
filename: folderItem.folder,
size: 0,
mimeType: 'Folder',
},
});
}
}
function findUploadQueueFolder(part: UploadQueue | FolderUpload, path: string[]): UploadQueue | FolderUpload {
if (path.length === 0) {
return part;
}
const nextStep = path[0];
const sortedMatchingFolders = part.folders
// Find all folders with the same name. This can happen in situation
// when user uploads folder and after its done, user uploads it again.
.filter(({ name }) => name === nextStep)
// Sort it by date, the latest one is at the beginning of the array.
// We want to add new uploads to the latest folder, not the one which
// was already finished before.
.sort((a, b) => b.startDate.getTime() - a.startDate.getTime());
// Folders can have the same startDate (mostly in unit test, probably not
// in real world), but lets explicitely always prefer non finished folder
// to be super sure.
const folder = sortedMatchingFolders.find((folder) => !isTransferFinished(folder)) || sortedMatchingFolders[0];
if (folder) {
return findUploadQueueFolder(folder, path.slice(1));
}
throw new Error('Wrong file or folder structure');
}
function isNameAlreadyUploading(part: UploadQueue | FolderUpload, name: string): boolean {
const recursiveIsNotFinished = (upload: FolderUpload): boolean => {
return (
!isTransferFinished(upload) ||
upload.files.some((upload) => !isTransferFinished(upload)) ||
upload.folders.some(recursiveIsNotFinished)
);
};
return (
part.files.filter((upload) => !isTransferFinished(upload)).some(({ file }) => file.name === name) ||
part.folders.filter(recursiveIsNotFinished).some((folder) => folder.name === name)
);
}
function recursiveCancel(folder: FolderUpload): FolderUpload {
return {
...folder,
files: folder.files.map((file) => ({
...file,
state: TransferState.Canceled,
})),
folders: folder.folders
.map((folder) => ({
...folder,
state: TransferState.Canceled,
}))
.map(recursiveCancel),
};
}
function hasInitializingUpload(folder: FolderUpload): boolean {
return (
folder.files.some(isTransferInitializing) ||
folder.folders.some(isTransferInitializing) ||
folder.folders.some(hasInitializingUpload)
);
}
| 3,191
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/UploadProvider/useUploadQueue.update.test.ts
|
import { act, renderHook } from '@testing-library/react-hooks';
import { TransferState } from '../../../components/TransferManager/transfer';
import { mockGlobalFile, testFile } from '../../../utils/test/file';
import { UploadFileList } from '../interface';
import { FileUpload, FolderUpload, UpdateCallback, UpdateData, UpdateFilter, UpdateState } from './interface';
import useUploadQueue from './useUploadQueue';
describe("useUploadQueue's update functions", () => {
let hook: {
current: {
fileUploads: FileUpload[];
folderUploads: FolderUpload[];
add: (shareId: string, parentId: string, fileList: UploadFileList) => void;
updateState: (idOrFilter: UpdateFilter, newStateOrCallback: UpdateState) => void;
updateWithData: (idOrFilter: UpdateFilter, newStateOrCallback: UpdateState, data: UpdateData) => void;
updateWithCallback: (
idOrFilter: UpdateFilter,
newStateOrCallback: UpdateState,
callback: UpdateCallback
) => void;
};
};
let firstFileId: string;
let firstFolderId: string;
let secondFileId: string;
let secondIds: string[];
beforeEach(() => {
mockGlobalFile();
const { result } = renderHook(() => useUploadQueue());
hook = result;
act(() => {
hook.current.add('shareId', 'parentId', [
{ path: [], folder: 'folder1' },
{ path: [], folder: 'folder2' },
{ path: [], folder: 'folder3' },
{ path: [], file: testFile('file1.txt') },
{ path: ['folder1'], file: testFile('file2.txt') },
{ path: ['folder1'], file: testFile('file3.txt') },
{ path: ['folder2'], file: testFile('file4.txt') },
]);
});
firstFileId = hook.current.fileUploads[0].id;
firstFolderId = hook.current.folderUploads[0].id;
secondFileId = hook.current.fileUploads[1].id;
secondIds = [secondFileId, hook.current.folderUploads[1].id];
});
it('updates file state using id', () => {
act(() => {
hook.current.updateState(firstFileId, TransferState.Canceled);
});
expect(hook.current.fileUploads.map(({ state }) => state)).toMatchObject([
TransferState.Canceled,
TransferState.Initializing,
TransferState.Initializing,
TransferState.Initializing,
]);
});
it('updates folder state using id', () => {
act(() => {
hook.current.updateState(firstFolderId, TransferState.Canceled);
});
expect(hook.current.folderUploads.map(({ state }) => state)).toMatchObject([
TransferState.Canceled,
TransferState.Pending,
TransferState.Pending,
]);
});
it('updates file and folder state using filter', () => {
act(() => {
hook.current.updateState(({ id }) => secondIds.includes(id), TransferState.Canceled);
});
expect(hook.current.fileUploads.map(({ state }) => state)).toMatchObject([
TransferState.Pending,
TransferState.Canceled,
TransferState.Initializing,
TransferState.Canceled,
]);
expect(hook.current.folderUploads.map(({ state }) => state)).toMatchObject([
TransferState.Pending,
TransferState.Canceled,
TransferState.Pending,
]);
});
it('updates file and folder by callback', () => {
act(() => {
hook.current.updateState(
() => true,
({ state }) => (state === TransferState.Pending ? TransferState.Error : TransferState.Canceled)
);
});
expect(hook.current.fileUploads.map(({ state }) => state)).toMatchObject([
TransferState.Error,
TransferState.Canceled,
TransferState.Canceled,
TransferState.Canceled,
]);
expect(hook.current.folderUploads.map(({ state }) => state)).toMatchObject([
TransferState.Error,
TransferState.Error,
TransferState.Error,
]);
});
it('updates file state with data', () => {
act(() => {
hook.current.updateWithData(firstFileId, TransferState.Progress, {
name: 'file1 (1).txt',
mimeType: 'txt2',
originalIsDraft: true,
});
});
expect(hook.current.fileUploads[0]).toMatchObject({
state: TransferState.Progress,
meta: {
filename: 'file1 (1).txt',
mimeType: 'txt2',
size: 42,
},
originalIsDraft: true,
});
});
it('updates folder state with data', () => {
act(() => {
hook.current.updateWithData(firstFolderId, TransferState.Progress, {
folderId: 'folderId',
originalIsDraft: true,
});
});
expect(hook.current.folderUploads[0]).toMatchObject({
state: TransferState.Progress,
linkId: 'folderId',
originalIsDraft: true,
});
expect(
hook.current.fileUploads.map(({ parentId, state, meta }) => [meta.filename, state, parentId])
).toMatchObject([
['file1.txt', TransferState.Pending, 'parentId'],
['file2.txt', TransferState.Pending, 'folderId'],
['file3.txt', TransferState.Pending, 'folderId'],
['file4.txt', TransferState.Initializing, undefined],
]);
});
it('updates folder but keeps sub files and folders cancelled', () => {
act(() => {
hook.current.updateState(() => true, TransferState.Canceled);
hook.current.updateWithData(firstFolderId, TransferState.Progress, {
folderId: 'folderId',
});
});
expect(
hook.current.fileUploads.map(({ parentId, state, meta }) => [meta.filename, state, parentId])
).toMatchObject([
['file1.txt', TransferState.Canceled, 'parentId'],
['file2.txt', TransferState.Canceled, 'folderId'],
['file3.txt', TransferState.Canceled, 'folderId'],
['file4.txt', TransferState.Canceled, undefined],
]);
});
it('updates states with error', () => {
const error = new Error('some failuer');
act(() => {
hook.current.updateWithData(({ id }) => secondIds.includes(id), TransferState.Error, {
error,
});
});
expect(hook.current.fileUploads.map(({ state, error, meta }) => [meta.filename, state, error])).toMatchObject([
['file1.txt', TransferState.Pending, undefined],
['file2.txt', TransferState.Error, error],
['file3.txt', TransferState.Initializing, undefined],
['file4.txt', TransferState.Initializing, undefined],
]);
expect(hook.current.folderUploads.map(({ state, error, meta }) => [meta.filename, state, error])).toMatchObject(
[
['folder1', TransferState.Pending, undefined],
['folder2', TransferState.Error, error],
['folder3', TransferState.Pending, undefined],
]
);
});
it('updates state with callback', () => {
const mockCallback = jest.fn();
act(() => {
hook.current.updateWithCallback(
({ state }) => state === TransferState.Pending,
TransferState.Progress,
mockCallback
);
});
expect(mockCallback.mock.calls).toMatchObject([
[{ parentId: 'parentId', meta: { filename: 'file1.txt' } }],
[{ parentId: 'parentId', meta: { filename: 'folder1' } }],
[{ parentId: 'parentId', meta: { filename: 'folder2' } }],
[{ parentId: 'parentId', meta: { filename: 'folder3' } }],
]);
});
it('updates state to cancel for folder recursively to not hang children forever', () => {
act(() => {
hook.current.updateState(firstFolderId, TransferState.Canceled);
});
expect(hook.current.folderUploads.map(({ state, meta }) => [meta.filename, state])).toMatchObject([
['folder1', TransferState.Canceled],
['folder2', TransferState.Pending],
['folder3', TransferState.Pending],
]);
expect(hook.current.fileUploads.map(({ state, meta }) => [meta.filename, state])).toMatchObject([
['file1.txt', TransferState.Pending],
['file2.txt', TransferState.Canceled],
['file3.txt', TransferState.Canceled],
['file4.txt', TransferState.Initializing],
]);
});
it('restarts child also restarts parent folder recursively to not hang forever', () => {
act(() => {
hook.current.updateState(firstFolderId, TransferState.Canceled);
hook.current.updateState(secondFileId, TransferState.Initializing);
});
expect(hook.current.folderUploads.map(({ state, meta }) => [meta.filename, state])).toMatchObject([
['folder1', TransferState.Pending],
['folder2', TransferState.Pending],
['folder3', TransferState.Pending],
]);
expect(hook.current.fileUploads.map(({ state, meta }) => [meta.filename, state])).toMatchObject([
['file1.txt', TransferState.Pending],
['file2.txt', TransferState.Initializing],
['file3.txt', TransferState.Canceled],
['file4.txt', TransferState.Initializing],
]);
});
});
| 3,192
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/media/canvasUtil.ts
|
import { HD_THUMBNAIL_MAX_SIZE, THUMBNAIL_MAX_SIZE, THUMBNAIL_QUALITIES } from '@proton/shared/lib/drive/constants';
import { ThumbnailType } from './interface';
export async function canvasToThumbnail(
canvas: HTMLCanvasElement,
thumbnailType: ThumbnailType = ThumbnailType.PREVIEW
): Promise<ArrayBuffer> {
// We check clear text thumbnail size but the limit on API is for encrypted
// text. To do the check on proper place would be too difficult for little
// gain. The increase in size is under 10 percent, so limit it to 90% of
// real limit is reasonable.
const maxSize = thumbnailType === ThumbnailType.HD_PREVIEW ? HD_THUMBNAIL_MAX_SIZE * 0.9 : THUMBNAIL_MAX_SIZE * 0.9;
for (const quality of THUMBNAIL_QUALITIES) {
const data = await canvasToArrayBuffer(canvas, 'image/jpeg', quality);
if (data.byteLength < maxSize) {
return data;
}
}
throw new Error('Cannot create small enough thumbnail');
}
function canvasToArrayBuffer(canvas: HTMLCanvasElement, mime: string, quality: number): Promise<ArrayBuffer> {
return new Promise((resolve, reject) =>
canvas.toBlob(
(d) => {
if (!d) {
reject(new Error('Blob not available'));
return;
}
const r = new FileReader();
r.addEventListener('load', () => {
resolve(r.result as ArrayBuffer);
});
r.addEventListener('error', reject);
r.readAsArrayBuffer(d);
},
mime,
quality
)
);
}
| 3,193
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/media/getMediaInfo.test.ts
|
import { getMediaInfo } from './getMediaInfo';
describe('makeThumbnail', () => {
it('does nothing when mime type is not supported', async () => {
await expect(getMediaInfo(new Promise((resolve) => resolve('png')), new Blob(), true)).resolves.toEqual(
undefined
);
await expect(
getMediaInfo(new Promise((resolve) => resolve('image/jpeeg')), new Blob(), false)
).resolves.toEqual(undefined);
});
});
| 3,194
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/media/getMediaInfo.ts
|
import { isSVG, isSupportedImage, isVideo } from '@proton/shared/lib/helpers/mimetype';
import { traceError } from '@proton/shared/lib/helpers/sentry';
import { imageCannotBeLoadedError, scaleImageFile } from './image';
import { Media, ThumbnailInfo, ThumbnailType } from './interface';
import { scaleSvgFile } from './svg';
import { getVideoInfo } from './video';
export interface ThumbnailGenerator {
(file: Blob, thumbnailTypes: ThumbnailType[] | never, mimeType: string | never): Promise<
(Media & { thumbnails?: ThumbnailInfo[] }) | undefined
>;
}
interface CheckerThumbnailCreatorPair {
checker: (mimeType: string) => boolean;
creator: ThumbnailGenerator;
}
// This is a standardised (using interface) list of function pairs - for checking mimeType and creating a thumbnail.
// This way we don't have to write separate 'if' statements for every type we handle.
// Instead, we look for the first pair where the checker returns true, and then we use the creator to generate the thumbnail.
const CHECKER_CREATOR_LIST: readonly CheckerThumbnailCreatorPair[] = [
{ checker: isVideo, creator: getVideoInfo },
{ checker: isSVG, creator: scaleSvgFile },
{
checker: isSupportedImage,
creator: async (file: Blob, thumbnailTypes: ThumbnailType[], mimeType) =>
scaleImageFile({ file, thumbnailTypes, mimeType }).catch((err) => {
// Corrupted images cannot be loaded which we don't care about.
if (err === imageCannotBeLoadedError) {
return undefined;
}
throw err;
}),
},
] as const;
export const getMediaInfo = (mimeTypePromise: Promise<string>, file: Blob, isForPhotos: boolean) =>
mimeTypePromise.then(async (mimeType) => {
const mediaInfo = CHECKER_CREATOR_LIST.find(({ checker }) => checker(mimeType))
?.creator(
file,
isForPhotos ? [ThumbnailType.PREVIEW, ThumbnailType.HD_PREVIEW] : [ThumbnailType.PREVIEW],
mimeType
)
.catch((err) => {
traceError(err);
return undefined;
});
return mediaInfo;
});
| 3,195
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/media/image.test.ts
|
import { SupportedMimeTypes, THUMBNAIL_MAX_SIZE } from '@proton/shared/lib/drive/constants';
import { scaleImageFile } from './image';
import { ThumbnailType } from './interface';
describe('scaleImageFile', () => {
beforeEach(() => {
global.URL.createObjectURL = jest.fn(() => 'url');
// Image under test does not handle events.
// @ts-ignore
global.Image = class {
addEventListener(type: string, listener: (value?: any) => void) {
if (type === 'load') {
listener();
}
}
};
// @ts-ignore
global.HTMLCanvasElement.prototype.getContext = jest.fn(() => {
return {
drawImage: jest.fn(),
fillRect: jest.fn(),
};
});
global.HTMLCanvasElement.prototype.toBlob = jest.fn((callback) => {
callback(new Blob(['abc']));
});
});
it('returns the scaled image', async () => {
await expect(scaleImageFile({ file: new Blob(), mimeType: SupportedMimeTypes.jpg })).resolves.toEqual({
width: undefined,
height: undefined,
thumbnails: [
{
thumbnailData: new Uint8Array([97, 98, 99]),
thumbnailType: ThumbnailType.PREVIEW,
},
],
});
});
it('returns multiple scaled image', async () => {
await expect(
scaleImageFile({
file: new Blob(),
mimeType: SupportedMimeTypes.jpg,
thumbnailTypes: [ThumbnailType.PREVIEW, ThumbnailType.HD_PREVIEW],
})
).resolves.toEqual({
width: undefined,
height: undefined,
thumbnails: [
{
thumbnailData: new Uint8Array([97, 98, 99]),
thumbnailType: ThumbnailType.PREVIEW,
},
{
thumbnailData: new Uint8Array([97, 98, 99]),
thumbnailType: ThumbnailType.HD_PREVIEW,
},
],
});
});
it('fails due to problem to load the image', async () => {
// @ts-ignore
global.Image = class {
addEventListener(type: string, listener: (value?: any) => void) {
if (type === 'error') {
listener(new Error('Failed to load image'));
}
}
};
await expect(scaleImageFile({ file: new Blob(), mimeType: SupportedMimeTypes.jpg })).rejects.toEqual(
new Error('Image cannot be loaded')
);
});
it('fails due to no small enough thumbnail', async () => {
global.HTMLCanvasElement.prototype.toBlob = jest.fn((callback) => {
callback(new Blob(['x'.repeat(THUMBNAIL_MAX_SIZE + 1)]));
});
await expect(scaleImageFile({ file: new Blob(), mimeType: SupportedMimeTypes.jpg })).rejects.toEqual(
new Error('Cannot create small enough thumbnail')
);
});
it('fails due to no blob', async () => {
global.HTMLCanvasElement.prototype.toBlob = jest.fn((callback) => {
callback(null);
});
await expect(scaleImageFile({ file: new Blob(), mimeType: SupportedMimeTypes.jpg })).rejects.toEqual(
new Error('Blob not available')
);
});
});
| 3,196
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/media/image.ts
|
import { HD_THUMBNAIL_MAX_SIDE, SupportedMimeTypes } from '@proton/shared/lib/drive/constants';
import { canvasToThumbnail } from './canvasUtil';
import { ThumbnailInfo, ThumbnailType } from './interface';
import { calculateThumbnailSize } from './util';
export const imageCannotBeLoadedError = new Error('Image cannot be loaded');
interface ReturnProps {
width?: number;
height?: number;
thumbnails?: ThumbnailInfo[];
}
const shouldGenerateHDPreview = ({ width, mimeType }: { width: number; mimeType: string }) =>
mimeType == SupportedMimeTypes.jpg && width && width <= HD_THUMBNAIL_MAX_SIDE;
export function scaleImageFile({
file,
mimeType,
thumbnailTypes = [ThumbnailType.PREVIEW],
}: {
file: Blob;
mimeType: string;
thumbnailTypes?: ThumbnailType[];
}): Promise<ReturnProps> {
return new Promise((resolve, reject) => {
const img = new Image();
img.addEventListener('load', async () => {
const thumbnailTypesToGenerate = shouldGenerateHDPreview({ width: img.width, mimeType })
? [ThumbnailType.PREVIEW]
: thumbnailTypes;
Promise.all(thumbnailTypesToGenerate.map((thumbnailType) => scaleImage(img, thumbnailType)))
.then((thumbnails) => {
resolve({ width: img.width, height: img.height, thumbnails });
})
.catch(reject);
});
// If image fails to be loaded, it doesn't provide any error.
// We need to provide custom to state clearly what is happening.
img.addEventListener('error', () => {
reject(imageCannotBeLoadedError);
});
img.src = URL.createObjectURL(file);
});
}
async function scaleImage(
img: HTMLImageElement,
thumbnailType: ThumbnailType = ThumbnailType.PREVIEW
): Promise<ThumbnailInfo> {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
// Null is returned only when using wrong context type.
if (ctx === null) {
throw new Error('Context is not available');
}
const [width, height] = calculateThumbnailSize(img, thumbnailType);
canvas.width = width;
canvas.height = height;
// Make white background default for transparent images.
ctx.fillStyle = '#FFFFFF';
ctx.fillRect(0, 0, width, height);
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
return {
thumbnailType,
thumbnailData: new Uint8Array(await canvasToThumbnail(canvas, thumbnailType)),
};
}
| 3,197
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/media/index.ts
|
export * from './interface';
export { getMediaInfo } from './getMediaInfo';
| 3,198
|
0
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads
|
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_uploads/media/interface.ts
|
export interface ThumbnailInfo {
thumbnailData: Uint8Array;
thumbnailType: ThumbnailType;
}
export enum ThumbnailType {
PREVIEW = 1,
HD_PREVIEW = 2,
}
export interface Media {
width?: number;
height?: number;
duration?: number;
}
| 3,199
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.