index
int64
0
0
repo_id
stringlengths
16
181
file_path
stringlengths
28
270
content
stringlengths
1
11.6M
__index_level_0__
int64
0
10k
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/containers/SharedLinksContainer.tsx
import { Redirect, Route, RouteComponentProps, Switch } from 'react-router-dom'; import SharedLinksView from '../components/sections/SharedLinks/SharedLinksView'; const SharedLinksContainer = ({ match }: RouteComponentProps) => { return ( <Switch> <Route path={match.url} exact component={SharedLinksView} /> <Redirect to="/" /> </Switch> ); }; export default SharedLinksContainer;
3,000
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/containers/TrashContainer.tsx
import { Redirect, Route, RouteComponentProps, Switch } from 'react-router-dom'; import TrashView from '../components/sections/Trash/TrashView'; const TrashContainer = ({ match }: RouteComponentProps) => { return ( <Switch> <Route path={match.url} exact component={TrashView} /> <Redirect to="/trash" /> </Switch> ); }; export default TrashContainer;
3,001
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/drive/useActiveShare.tsx
import { createContext, useCallback, useContext, useState } from 'react'; import * as React from 'react'; export type DriveFolder = { shareId: string; linkId: string }; interface ActiveShareProviderState { activeShareId: string; activeFolder: DriveFolder; setFolder: (folder: DriveFolder) => void; setDefaultRoot: () => void; } const DriveFolderContext = createContext<ActiveShareProviderState | null>(null); interface Props { defaultShareRoot: DriveFolder; children: React.ReactNode; } /** * Manages active share ID and folder. * Share is entrypoint to file tree. Every user has at least one default share * which is passed to provider by `defaultShare`. * Default share points to folder link. That is considered root and is default * `activeFolder`. Active folder can be changed by `setFolder`, typically from * container based on URL parameters. * When navigating back to root, `setDefaultRoot` should be called to reset * the active share ID and folder back to default root share. */ export const ActiveShareProvider = ({ defaultShareRoot, children }: Props) => { const [activeFolder, setActiveFolder] = useState<DriveFolder>(defaultShareRoot); const setFolder = useCallback((newActiveFolder: DriveFolder) => { setActiveFolder(newActiveFolder); }, []); const setDefaultRoot = useCallback(() => { setActiveFolder(defaultShareRoot); }, [defaultShareRoot]); return ( <DriveFolderContext.Provider value={{ activeShareId: activeFolder.shareId, activeFolder, setFolder, setDefaultRoot, }} > {children} </DriveFolderContext.Provider> ); }; export const useActiveShare = () => { const state = useContext(DriveFolderContext); if (!state) { throw new Error('Trying to use uninitialized ActiveShareProvider'); } return state; }; export default useActiveShare;
3,002
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/drive/useDebug.tsx
import { useLocalState } from '@proton/components/hooks'; /** * Reads a flag from local storage, if found returns true */ export const useDebug = () => { const [debug] = useLocalState(false, 'proton-drive-debug'); return Boolean(debug); };
3,003
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/drive/useDesktopDownloads.test.tsx
import { expect, jest } from '@jest/globals'; import { renderHook } from '@testing-library/react-hooks'; import { fetchDesktopVersion } from '@proton/shared/lib/apps/desktopVersions'; import { appPlatforms } from '../../utils/appPlatforms'; import useDesktopDownloads from './useDesktopDownloads'; jest.mock('@proton/shared/lib/apps/desktopVersions'); const mockFetchDesktopVersion = jest.mocked(fetchDesktopVersion); const originalConsoleWarn = console.warn; const mockConsoleWarn = jest.fn(); describe('useDesktopDownloads', () => { let hook: ReturnType<typeof renderHook<{}, ReturnType<typeof useDesktopDownloads>>>; const render = async () => { hook = renderHook(() => useDesktopDownloads()); await hook.waitForNextUpdate(); }; const assertOK = ( downloads: ReturnType<typeof useDesktopDownloads>['downloads'], expectedUrls?: (string | undefined)[] ) => { appPlatforms.forEach(({ platform, hideIfUnavailable }) => { const index = downloads.findIndex((download) => download.platform === platform); const download = downloads[index]; if (!download) { if (hideIfUnavailable) { return; } throw new Error(`Platform not present: ${platform}`); } expect(download.url).toBe(expectedUrls ? expectedUrls[index] : 'url'); expect(download.startDownload).toBeDefined(); }); }; beforeEach(async () => { jest.resetAllMocks(); console.warn = mockConsoleWarn; // Some default values for mocks mockFetchDesktopVersion.mockResolvedValue({ url: 'url', version: '0.x' }); }); afterEach(() => { console.warn = originalConsoleWarn; }); it('should return downloads for each platform', async () => { await render(); assertOK(hook.result.current.downloads); }); it('should remove from list ONLY hidden platforms if all fetch calls fail', async () => { mockFetchDesktopVersion.mockRejectedValue(new Error('oh no')); await render(); assertOK(hook.result.current.downloads, [undefined, undefined]); expect(mockConsoleWarn).toHaveBeenCalledTimes(appPlatforms.length); }); it('should not remove from list platforms not marked for hiding', async () => { mockFetchDesktopVersion .mockRejectedValueOnce(new Error('oh no')) .mockResolvedValueOnce({ url: 'url', version: '0.x' }); await render(); assertOK(hook.result.current.downloads, [undefined, 'url']); expect(mockConsoleWarn).toHaveBeenCalledTimes(1); }); });
3,004
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/drive/useDesktopDownloads.tsx
import { useEffect, useState } from 'react'; import useLoading from '@proton/hooks/useLoading'; import isTruthy from '@proton/utils/isTruthy'; import { PlatformInfo, appPlatforms, fetchDesktopDownloads } from '../../utils/appPlatforms'; type PlatformDownload = PlatformInfo & { url?: string; startDownload?: () => void; }; /** * A hook that will fetch all available desktop downloads on mount, and provide the data. */ export const useDesktopDownloads = () => { const [isLoading, withLoading] = useLoading(); const [downloads, setDownloads] = useState<PlatformDownload[]>([]); useEffect( () => { void withLoading( fetchDesktopDownloads().then((result) => { setDownloads( appPlatforms .map<PlatformDownload | undefined>((platform) => { const url = result[platform.platform]; if (platform.hideIfUnavailable && !url) { return undefined; } return { ...platform, url, startDownload: () => { if (!url) { return; } window.location.href = url; }, }; }) .filter(isTruthy) ); }) ); }, // We specifically pass an empty object as withLoading is not memoized // eslint-disable-next-line react-hooks/exhaustive-deps [] ); return { isLoading, downloads, }; }; export default useDesktopDownloads;
3,005
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/drive/useDriveDragMove.tsx
import { useRef, useState } from 'react'; import * as React from 'react'; import { c } from 'ttag'; import { useGlobalLoader } from '@proton/components'; import { CUSTOM_DATA_FORMAT } from '@proton/shared/lib/drive/constants'; import isTruthy from '@proton/utils/isTruthy'; import noop from '@proton/utils/noop'; import { useSelection } from '../../components/FileBrowser'; import { DragMoveControls } from '../../components/FileBrowser/interface'; import { DriveItem } from '../../components/sections/Drive/Drive'; import { useActions } from '../../store'; import { LinkInfo } from '../../store/_actions/interface'; type DragAndDropItem = DriveItem; export default function useDriveDragMove(shareId: string, contents: DragAndDropItem[], clearSelections: () => void) { const { moveLinks } = useActions(); const withGlobalLoader = useGlobalLoader({ text: c('Info').t`Moving files` }); const [allDragging, setAllDragging] = useState<DragAndDropItem[]>([]); const [activeDropTarget, setActiveDropTarget] = useState<DragAndDropItem>(); const dragEnterCounter = useRef(0); const selectionControls = useSelection(); const selectedItems = React.useMemo( () => selectionControls?.selectedItemIds .map((selectedItemId) => contents.find(({ id, isLocked }) => !isLocked && selectedItemId === id)) .filter(isTruthy) || [], [selectionControls?.selectedItemIds] ); const getHandleItemDrop = (newParentLinkId: string) => async (e: React.DragEvent) => { let toMove: DragAndDropItem[]; try { toMove = JSON.parse(e.dataTransfer.getData(CUSTOM_DATA_FORMAT)); } catch (err: any) { // Data should be set by DecryptedLink when drag starts. // If the data transfer was not available or the move was so // fast that the data were not set yet, we should ignore the // event. console.warn('Could not finish move operation due to', err); return; } dragEnterCounter.current = 0; clearSelections(); setActiveDropTarget(undefined); const toMoveInfo: LinkInfo[] = toMove.map((item) => ({ parentLinkId: item.parentLinkId, name: item.name, isFile: item.isFile, linkId: item.id, rootShareId: shareId, })); await withGlobalLoader( moveLinks(new AbortController().signal, { shareId, linksToMove: toMoveInfo, newParentLinkId }) ); }; const getDragMoveControls = (item: DragAndDropItem): DragMoveControls => { const dragging = allDragging.some(({ id }) => id === item.id); const setDragging = (isDragging: boolean) => isDragging ? setAllDragging(selectedItems.some(({ id }) => id === item.id) ? selectedItems : [item]) : setAllDragging([]); const isActiveDropTarget = activeDropTarget?.id === item.id; const availableTarget = !item.isFile && allDragging.every(({ id }) => item.id !== id); const handleDrop = getHandleItemDrop(item.id); const handleDragOver = (e: React.DragEvent<HTMLTableRowElement>) => { if (availableTarget) { e.dataTransfer.dropEffect = 'move'; e.preventDefault(); if (dragEnterCounter.current === 1 && !isActiveDropTarget) { setActiveDropTarget(item); } } }; const handleDragLeave = () => { if (availableTarget) { dragEnterCounter.current -= 1; if (dragEnterCounter.current <= 0 && isActiveDropTarget) { setActiveDropTarget(undefined); } } }; const handleDragEnter = () => { if (availableTarget) { dragEnterCounter.current += 1; } }; return { handleDragOver, handleDrop, handleDragLeave, handleDragEnter, dragging, setDragging, isActiveDropTarget, selectedItems, }; }; return { getDragMoveControls, getHandleItemDrop }; } export function useDriveDragMoveTarget(shareId: string) { const { getHandleItemDrop } = useDriveDragMove(shareId, [], noop); return { getHandleItemDrop }; }
3,006
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/drive/useFolderContainerTitle.tsx
import { DriveSectionRouteProps } from '../../components/sections/Drive/DriveView'; import { useLinkName } from '../../store/_views/utils'; export const useFolderContainerTitle = ({ params, setAppTitle, }: { params: DriveSectionRouteProps; setAppTitle: (title?: string) => void; }) => { const name = useLinkName(params.shareId || '', params.linkId || ''); setAppTitle(name); };
3,007
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/drive/useNavigate.ts
import { useCallback } from 'react'; import { useHistory, useLocation } from 'react-router-dom'; import { generateUID } from '@proton/components'; import { toLinkURLType } from '../../components/sections/helpers'; interface NavigationEvenListener { id: string; run: () => void; } let listeners: NavigationEvenListener[] = []; function useNavigate() { const history = useHistory(); const location = useLocation(); const pushToHistory = (path: string) => { history.push(path); listeners.forEach((listener) => { listener.run(); }); }; const navigateToLink = useCallback( (shareId: string, linkId: string, isFile: boolean) => { pushToHistory(`/${shareId}/${toLinkURLType(isFile)}/${linkId}?r=${location.pathname}`); }, [history, location.pathname] ); const navigateToRoot = useCallback(() => { pushToHistory(`/`); }, [history]); const navigateToSharedURLs = useCallback(() => { pushToHistory(`/shared-urls`); }, [history]); const navigateToTrash = useCallback(() => { pushToHistory(`/trash`); }, [history]); const navigateToDevices = () => { pushToHistory('/devices'); }; const navigateToSearch = useCallback( (searchTerm) => { history.push({ pathname: '/search', hash: `q=${searchTerm}`, }); }, [history] ); const addListener = (listener: () => void) => { const listenerId = generateUID('drive-navigation-event'); listeners.push({ id: listenerId, run: listener }); return listenerId; }; const removeListener = (listenerId: string) => { listeners = listeners.filter(({ id }) => id !== listenerId); }; return { navigateToLink, navigateToRoot, navigateToSharedURLs, navigateToTrash, navigateToSearch, addListener, removeListener, navigateToDevices, }; } export default useNavigate;
3,008
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/drive/usePublicToken.tsx
import { useMemo } from 'react'; import { useLocation } from 'react-router-dom'; export default function usePublicToken() { const { pathname, hash } = useLocation(); const token = useMemo(() => pathname.replace(/\/urls\/?/, ''), [pathname]); const urlPassword = useMemo(() => hash.replace('#', ''), [hash]); return { token, urlPassword, }; }
3,009
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/util/useOnScrollEnd.ts
import { MutableRefObject, useEffect } from 'react'; import { useElementRect } from '@proton/components'; const isScrollEnd = (target: HTMLElement | null, offsetRatio: number) => target && target.scrollHeight - target.scrollTop <= target.clientHeight / offsetRatio; function useOnScrollEnd( callback: () => void, targetRef: MutableRefObject<HTMLElement | null>, offsetRatio = 1, deps: React.DependencyList = [] ) { const boundingBox = useElementRect(targetRef); useEffect(() => { const handleScroll = ({ target }: Event) => { if (isScrollEnd(target as HTMLElement | null, offsetRatio)) { callback(); } }; if (targetRef.current) { targetRef.current.addEventListener('scroll', handleScroll); } return () => { if (targetRef.current) { targetRef.current.removeEventListener('scroll', handleScroll); } }; }, [targetRef.current, callback]); useEffect(() => { // If initially at the end or no scrollbar execute callback if (isScrollEnd(targetRef.current, offsetRatio)) { callback(); } }, [callback, boundingBox, targetRef.current, ...deps]); } export default useOnScrollEnd;
3,010
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/util/useQueuedFunction.ts
import { useCache } from '@proton/components'; import noop from '@proton/utils/noop'; type FunctionQueue<R> = [number, (() => Promise<R>)[]]; /** * Puts function execution into a queue with a threshold of maximum active functions processing at once */ const useQueuedFunction = () => { const cache = useCache(); const queuedFunction = <R, A extends any[]>(fnKey: string, fn: (...args: A) => Promise<R>, threshold = 1) => { const key = `queuedfn_${fnKey}`; if (!cache.has(key)) { cache.set(key, [0, []]); } const runNextQueued = () => { const [processing, queued]: FunctionQueue<R> = cache.get(key); if (queued.length) { const [next, ...remaining] = queued; next().catch(noop).finally(runNextQueued); cache.set(key, [processing, remaining]); } else { cache.set(key, [processing - 1, []]); } }; const run = (...args: A) => { const [processing, queued]: FunctionQueue<R> = cache.get(key); cache.set(key, [processing + 1, queued]); const promise = fn(...args); promise.catch(noop).finally(runNextQueued); return promise; }; const enqueue = (...args: A) => new Promise<R>((resolve) => { const [processing, queued]: FunctionQueue<R> = cache.get(key); cache.set(key, [ processing, [ ...queued, () => { const promise = fn(...args); resolve(promise); return promise; }, ], ]); }); return (...args: A) => { const [processing]: FunctionQueue<R> = cache.get(key); return processing < threshold ? run(...args) : enqueue(...args); }; }; return queuedFunction; }; export default useQueuedFunction;
3,011
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/hooks/util/useShiftKey.ts
import { useEffect, useRef } from 'react'; export const useShiftKey = () => { const shiftRef = useRef(false); useEffect(() => { const onKeyDown = (e: KeyboardEvent) => { if (e.key === 'Shift') { shiftRef.current = true; } }; const onKeyUp = (e: KeyboardEvent) => { if (e.key === 'Shift') { shiftRef.current = false; } }; window.addEventListener('keydown', onKeyDown); window.addEventListener('keyup', onKeyUp); return () => { window.removeEventListener('keydown', onKeyDown); window.removeEventListener('keyup', onKeyUp); }; }, []); return () => shiftRef.current; };
3,012
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/DriveProvider.tsx
import { ReactNode } from 'react'; import { PublicSessionProvider } from './_api'; import { DevicesProvider } from './_devices'; import { DownloadsProvider, PublicDownloadsProvider } from './_downloads'; import { DriveEventManagerProvider } from './_events'; import { LinksProvider, PublicLinksProvider } from './_links'; import { PhotosProvider } from './_photos'; import { SearchProvider } from './_search'; import { SharesProvider } from './_shares'; import { UploadProvider } from './_uploads'; import { VolumesProvider } from './_volumes'; interface DriveProviderProps { children: ReactNode; } export function DriveProvider({ children }: DriveProviderProps) { return ( <DriveEventManagerProvider> <VolumesProvider> <SharesProvider> <LinksProvider> <DevicesProvider> <DownloadsProvider> <UploadProvider> <SearchProvider> <PhotosProvider>{children}</PhotosProvider> </SearchProvider> </UploadProvider> </DownloadsProvider> </DevicesProvider> </LinksProvider> </SharesProvider> </VolumesProvider> </DriveEventManagerProvider> ); } interface PublicDriveProviderProps { children: ReactNode; } export function PublicDriveProvider({ children }: PublicDriveProviderProps) { return ( <PublicSessionProvider> <SharesProvider> <PublicLinksProvider> <PublicDownloadsProvider>{children}</PublicDownloadsProvider> </PublicLinksProvider> </SharesProvider> </PublicSessionProvider> ); }
3,013
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/architecture.md
# Drive Architecture The main Drive logic, where all the magic happens. The app has several states which are automatically updated using an event loop and all possible actions available. The idea is to keep the implementation details hidden from the outside and export friendly interfaces to be used in simple enough React components. Therefore, prefer to use what is re-exported in the top module only, and add new exports wisely. The central part is the link for sure. `useLink` provides methods to get decrypted links or their keys. If the link is not in the cache yet, it will fetch it from API, decrypt it and store it in the cache. Using links directly should not be needed and should be a sign of bad design. At least, it might not be an efficient way to get data. For that, there is `useLinksListing` which queries APIs per page and ensures the needed links to be presented to the user are ready. This hook has its own state and complex algorithm to properly get all links from API even if conditions in between changes. To do anything with links, such as rename link or move links elsewhere, there are two hooks that should be used: `useLinkActions` and `useLinksActions`. Basically, it could be one, but it would be a pretty long one, and this seems like a nice clear separation. `useLinksState` or `useLinksKeys` are a very internal implementation detail that should not be used outside of the links folder. But the link cannot work without its share. Every user has some default share with all files, which points to the root link. Any link in that share can have its own share to be used for sharing with other members or for sharing using the share URL. That's the reason why the graph looks a bit like recursive dependencies. `useShare` provides the least possible implementation for shares which doesn't need any information about the link, that is fetching and decrypting shares with storing the state similarly like links do (`useSharesState` and `useSharesKeys`). Sharing options (`useShareActions` or `useShareUrl`) then can depend on both `useLink` and `useShare` and combine logic into sharing capability. Similarly, `useLockedVolume` can combine both to provide ways to restore locked files. So far, this is still quite hard to use. You need to know a lot before doing any action, not to mention the need to handle all possible errors. Here comes `views` folder with its hooks providing way nicer interfaces for React components. We can see some parallels with database views. It is not storing any extra information; it is just a bit different view to make stuff easier. Here we have, for example, `useFolderView`, which can provide all necessary logic for `FileBrowser`, such as loading, sorting, or selection; or `useFileView`, which can handle preview with navigation; or `useTransfersView` to combine upload and download transfers with global actions for easier use in `TransferManager`. The last important part is `useActions` from `actions` folder which wraps actions from `useLinkActions` and `useLinksActions` and `useShareUrl`, and adds error handling with creating notification to user. Details about upload and download code are presented here as simple one box, but actually, it is very complex, and to learn more about it, read on in own architecture files in uploads and download folders. ```mermaid graph TD subgraph "drive folder" subgraph "events folder" eventManager end subgraph "shares folder" useShare useDefaultShare useShareActions useSharesKeys useSharesState useShareUrl useVolume useLockedVolume useShare --> useSharesState useShare --> useSharesKeys useDefaultShare --> useShare useDefaultShare --> useVolume useDefaultShare --> useSharesState useShareUrl --> useShare useShareUrl --> useShareActions useShareUrl -- poll --> eventManager useLockedVolume --> useShare useLockedVolume --> useDefaultShare useLockedVolume --> useSharesState end subgraph "links folder" useLink useLinks useLinkActions useLinksActions useLinksKeys useLinksListing useLinksState useLink --> useLinksState useLink --> useLinksKeys useLinks --> useLink useLinkActions --> useLink useLinkActions -- poll --> eventManager useLinksActions --> useLink useLinksActions --> useLinks useLinksActions -- poll --> eventManager useLinksListing --> useLinks useLinksListing --> useLinksState useLinksState -- register callback --> eventManager end useShareUrl --> useLink useShareActions --> useLink useLockedVolume --> useLink useLink --> useShare subgraph "views folder" useFolderView["use[Folder|SharedLinks|Trash]View"] useFileView useTransfersView subgraph "views utils folder" useSelection useSorting end useFolderView --> useSelection useFolderView --> useSorting end useFolderView --> useLinksListing useFileView --> useLink subgraph "actions folder" useActions end useActions --> useLinkActions useActions --> useLinksActions useActions --> useShareUrl subgraph "downloads folder" useDownload end subgraph "uploads folder" useUpload end useTransfersView --> useDownload useTransfersView --> useUpload useDownload --> useLinksListing useUpload --> useLinkActions useUpload --> useLinksActions end subgraph "components folder" FileBrowser FileBrowser --> GridView FileBrowser --> ListView FileBrowser --> Toolbar Toolbar["Toolbar / ContextMenu"] TransferManager end Toolbar --> useActions subgraph "containers folder" MainContainer PreviewContainer FolderContainer["[Folder|SharedLinks|Trash]Container"] MainContainer --> FolderContainer MainContainer --> PreviewContainer MainContainer -- set active share --> eventManager end FolderContainer --> FileBrowser FolderContainer --> useFolderView PreviewContainer --> useFileView TransferManager --> useTransfersView ```
3,014
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/index.ts
export { DriveProvider, PublicDriveProvider } from './DriveProvider'; export { useActions } from './_actions'; export { usePublicAuth } from './_api'; export { useDriveEventManager } from './_events'; export { validateLinkNameField, formatLinkName, splitLinkName } from './_links'; export { useRevisions } from './_revisions'; export { useUserSettings, UserSettingsProvider } from './_settings'; export { useDefaultShare, usePublicShare, useLockedVolume, useShareUrl } from './_shares'; // TODO: Check with Michal if it's okey to import mimeTypeFromFile here export { useUpload, useFileUploadInput, useFolderUploadInput, mimeTypeFromFile } from './_uploads'; export * from './_uploads/interface'; export { useDownloadProvider as useDownload, useThumbnailsDownload } from './_downloads'; export * from './_downloads/interface'; export * from './_links/interface'; export * from './_shares/interface'; export * from './_devices/interface'; export * from './_revisions/interface'; export * from './_views'; export { useSearchLibrary } from './_search'; export { usePhotos, usePhotosFeatureFlag, usePhotosRecovery } from './_photos'; export * from './_photos/interface';
3,015
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_actions/index.ts
export { default as useActions } from './useActions';
3,016
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_actions/interface.ts
export type LinkInfo = { parentLinkId: string; linkId: string; rootShareId: string; name: string; isFile: boolean; };
3,017
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_actions/useActions.tsx
import { c, msgid } from 'ttag'; import { useConfirmActionModal, useNotifications } from '@proton/components'; import { VERIFICATION_STATUS } from '@proton/crypto'; import { getIsConnectionIssue } from '@proton/shared/lib/api/helpers/apiErrorHelper'; import { isSafari, textToClipboard } from '@proton/shared/lib/helpers/browser'; import isTruthy from '@proton/utils/isTruthy'; import { sendErrorReport } from '../../utils/errorHandling'; import { ValidationError } from '../../utils/errorHandling/ValidationError'; import useDevicesActions from '../_devices/useDevicesActions'; import { useDownload } from '../_downloads'; import { useLinkActions, useLinksActions } from '../_links'; import { usePhotos } from '../_photos'; import { useShareUrl } from '../_shares'; import useUploadFile from '../_uploads/UploadProvider/useUploadFile'; import { TransferConflictStrategy } from '../_uploads/interface'; import { useErrorHandler } from '../_utils'; import { LinkInfo } from './interface'; import useListNotifications from './useListNotifications'; /** * useActions provides actions over links and its results is reported back * to user using notifications. * * {@return {confirmModal}} Only needed for deletePermanently/emptyTrash/stopSharingLinks */ export default function useActions() { const { showErrorNotification } = useErrorHandler(); const [confirmModal, showConfirmModal] = useConfirmActionModal(); const { createNotification } = useNotifications(); const { createMovedItemsNotifications, createTrashedItemsNotifications, createRestoredItemsNotifications, createDeletedItemsNotifications, createDeletedSharedLinksNotifications, } = useListNotifications(); const { checkFirstBlockSignature } = useDownload(); const { initFileUpload } = useUploadFile(); const link = useLinkActions(); const links = useLinksActions(); const shareUrl = useShareUrl(); const devicesActions = useDevicesActions(); const { removePhotosFromCache } = usePhotos(); const createFolder = async ( abortSignal: AbortSignal, shareId: string, parentLinkId: string, name: string ): Promise<string> => { return link .createFolder(abortSignal, shareId, parentLinkId, name) .then((id: string) => { createNotification({ text: <span className="text-pre-wrap">{c('Notification').t`"${name}" created successfully`}</span>, }); return id; }) .catch((e) => { showErrorNotification( e, <span className="text-pre-wrap">{c('Notification').t`"${name}" failed to be created`}</span> ); throw e; }); }; const createFile = async (shareId: string, parentLinkId: string, name: string) => { const file = new File([], name, { type: 'text/plain' }); const controls = initFileUpload(shareId, parentLinkId, file, async () => { throw new ValidationError(c('Error').t`"${name}" already exists`); }); await controls .start() .then(() => { createNotification({ text: <span className="text-pre-wrap">{c('Notification').t`"${name}" created successfully`}</span>, }); }) .catch((e) => { showErrorNotification( e, <span className="text-pre-wrap">{c('Notification').t`"${name}" failed to be created`}</span> ); throw e; }); }; const saveFile = async ( shareId: string, parentLinkId: string, name: string, mimeType: string, content: Uint8Array[] ) => { // saveFile is using file upload using name with replace strategy as // default. That's not the best way - better would be to use link ID // and also verify revision ID that file was not touched in meantime // by other client. But this is enough for first version to play with // the feature and see what all needs to be changed and implemented. const file = new File(content, name, { type: mimeType }); const controls = initFileUpload(shareId, parentLinkId, file, async () => TransferConflictStrategy.Replace); await controls .start() .then(() => { createNotification({ text: <span className="text-pre-wrap">{c('Notification').t`"${name}" saved successfully`}</span>, }); }) .catch((e) => { showErrorNotification( e, <span className="text-pre-wrap">{c('Notification').t`"${name}" failed to be saved`}</span> ); throw e; }); }; const renameLink = async (abortSignal: AbortSignal, shareId: string, linkId: string, newName: string) => { // translator: ${newName} is for a folder or file name. const successNotificationText = c('Notification').t`"${newName}" renamed successfully`; // translator: ${newName} is for a folder or file name. const failNotificationText = c('Notification').t`"${newName}" failed to be renamed`; return link .renameLink(abortSignal, shareId, linkId, newName) .then(() => { createNotification({ text: <span className="text-pre-wrap">{successNotificationText}</span>, }); }) .catch((e) => { showErrorNotification(e, <span className="text-pre-wrap">{failNotificationText}</span>); throw e; }); }; const checkLinkSignatures = async (abortSignal: AbortSignal, shareId: string, linkId: string) => { const [metaSignatureIssues, blockSignatureIssue] = await Promise.all([ link.checkLinkMetaSignatures(abortSignal, shareId, linkId), // To avoid the need to download the whole file we assume that // either all blocks fail, or none, at least in most cases. So it // should be enough to check only the first block. During download // we check every single block, so user is still protected. checkFirstBlockSignature(abortSignal, shareId, linkId), ]).catch((e) => { // Only network error can be thrown here to indicate the signature // couldn't be checked and user should try again. Any other case // such as a very bad data should be represented as missing // signature (technically the signature is not there - some other // malformed data is). if (getIsConnectionIssue(e)) { throw e; } sendErrorReport(e); return [ { passphrase: VERIFICATION_STATUS.NOT_SIGNED, name: VERIFICATION_STATUS.NOT_SIGNED, xattrs: VERIFICATION_STATUS.NOT_SIGNED, }, { contentKeyPacket: VERIFICATION_STATUS.NOT_SIGNED, blocks: VERIFICATION_STATUS.NOT_SIGNED, thumbnail: VERIFICATION_STATUS.NOT_SIGNED, }, ]; }); if (!metaSignatureIssues && !blockSignatureIssue) { return; } return { ...metaSignatureIssues, ...blockSignatureIssue, }; }; const moveLinks = async ( abortSignal: AbortSignal, { shareId, linksToMove, newParentLinkId, newShareId, }: { shareId: string; linksToMove: LinkInfo[]; newParentLinkId: string; newShareId?: string; } ) => { if (!linksToMove.length) { return; } const linkIds = linksToMove.map(({ linkId }) => linkId); const result = await links.moveLinks(abortSignal, { shareId, linkIds, newParentLinkId, newShareId }); // This is a bit ugly, but the photo linkId cache is not connected // very well to the rest of our state. removePhotosFromCache(result.successes); const undoAction = async () => { const linkIdsPerParentId = Object.entries(result.originalParentIds).reduce( (acc, [linkId, originalParentId]) => { (acc[originalParentId] ||= []).push(linkId); return acc; }, {} as { [parentLinkId: string]: string[] } ); const undoResult = aggregateResults( await Promise.all( Object.entries(linkIdsPerParentId).map(async ([parentLinkId, toMoveBackIds]) => { return links.moveLinks(abortSignal, { shareId, linkIds: toMoveBackIds, newParentLinkId: parentLinkId, newShareId, }); }) ) ); createMovedItemsNotifications(linksToMove, undoResult.successes, undoResult.failures); }; createMovedItemsNotifications(linksToMove, result.successes, result.failures, undoAction); }; const trashLinks = async (abortSignal: AbortSignal, linksToTrash: LinkInfo[]) => { if (!linksToTrash.length) { return; } const result = await links.trashLinks( abortSignal, linksToTrash.map(({ linkId, rootShareId, parentLinkId }) => ({ linkId, shareId: rootShareId, parentLinkId, })) ); // This is a bit ugly, but the photo linkId cache is not connected // very well to the rest of our state. removePhotosFromCache(result.successes); const undoAction = async () => { const linksToUndo = result.successes .map((linkId) => linksToTrash.find((link) => link.linkId === linkId)) .filter(isTruthy) .map((link) => ({ linkId: link.linkId, shareId: link.rootShareId })); const undoResult = await links.restoreLinks(abortSignal, linksToUndo); createRestoredItemsNotifications(linksToTrash, undoResult.successes, undoResult.failures); }; createTrashedItemsNotifications(linksToTrash, result.successes, result.failures, undoAction); }; const restoreLinks = async (abortSignal: AbortSignal, linksToRestore: LinkInfo[]) => { if (!linksToRestore.length) { return; } const result = await links.restoreLinks( abortSignal, linksToRestore.map(({ linkId, rootShareId }) => ({ linkId, shareId: rootShareId })) ); const undoAction = async () => { const linksToTrash = result.successes .map((linkId) => linksToRestore.find((link) => link.linkId === linkId)) .filter(isTruthy); await trashLinks(abortSignal, linksToTrash); }; createRestoredItemsNotifications(linksToRestore, result.successes, result.failures, undoAction); }; const deletePermanently = async (abortSignal: AbortSignal, linksToDelete: LinkInfo[]) => { if (linksToDelete.length === 0) { return; } const itemName = linksToDelete[0].name; const title = c('Title').t`Delete permanently`; const confirm = c('Action').t`Delete permanently`; const message = linksToDelete.length === 1 ? c('Info').t`Are you sure you want to permanently delete "${itemName}" from trash?` : c('Info').t`Are you sure you want to permanently delete selected items from trash?`; void showConfirmModal({ title, submitText: confirm, message, onSubmit: async () => { const result = await links.deleteTrashedLinks( abortSignal, linksToDelete.map(({ linkId, rootShareId }) => ({ linkId, shareId: rootShareId })) ); createDeletedItemsNotifications(linksToDelete, result.successes, result.failures); }, }); }; const emptyTrash = async (abortSignal: AbortSignal) => { const title = c('Title').t`Empty trash`; const confirm = c('Action').t`Empty trash`; const message = c('Info').t`Are you sure you want to empty trash and permanently delete all the items?`; void showConfirmModal({ title, submitText: confirm, message, onSubmit: async () => { await links .emptyTrash(abortSignal) .then(() => { const notificationText = c('Notification') .t`All items will soon be permanently deleted from trash`; createNotification({ text: notificationText }); }) .catch((err: any) => { showErrorNotification(err, c('Notification').t`Trash failed to be emptied`); }); }, }); }; const stopSharingLinks = (abortSignal: AbortSignal, linksToStopSharing: LinkInfo[]) => { if (!linksToStopSharing.length) { return; } void showConfirmModal({ title: c('Title').t`Stop sharing`, submitText: c('Title').t`Stop sharing`, message: c('Info').ngettext( msgid`This will delete the link and remove access to your file or folder for anyone with the link.`, `This will delete the links and remove access to your files or folders for anyone with the links.`, linksToStopSharing.length ), onSubmit: async () => { const result = await shareUrl.deleteShareUrls( abortSignal, linksToStopSharing.map(({ linkId, rootShareId }) => ({ linkId, shareId: rootShareId })) ); createDeletedSharedLinksNotifications(linksToStopSharing, result.successes, result.failures); }, }); }; // Safari does not allow copy to clipboard outside of the event // (e.g., click). No await or anything does not do the trick. // Clipboard API also doesn't work. Therefore we cannot have this // feature on Safari at this moment. const copyShareLinkToClipboard = isSafari() ? undefined : async (abortSignal: AbortSignal, shareId: string, linkId: string) => { return shareUrl .loadShareUrlLink(abortSignal, shareId, linkId) .then((url) => { if (url) { textToClipboard(url); createNotification({ text: c('Info').t`Link copied to clipboard`, }); } }) .catch((err: any) => { showErrorNotification(err, c('Notification').t`Cannot load link`); }); }; const removeDevice = (deviceId: string, abortSignal: AbortSignal) => { return devicesActions .remove(deviceId, abortSignal) .then(() => { const notificationText = c('Notification').t`Device removed`; createNotification({ text: notificationText }); }) .catch((err) => { showErrorNotification(err, c('Notification').t`Device failed to be removed`); sendErrorReport(err); }); }; const renameDevice = async ( params: { shareId: string; linkId: string; deviceId: string; newName: string; haveLegacyName: boolean }, abortSignal?: AbortSignal ) => { await Promise.all([ await link.renameLink(new AbortController().signal, params.shareId, params.linkId, params.newName), await devicesActions.rename(params, abortSignal), ]) .then(() => { const notificationText = c('Notification').t`Device renamed`; createNotification({ text: notificationText }); }) .catch((err) => { showErrorNotification(err, c('Notification').t`Device failed to be renamed`); sendErrorReport(err); }); }; return { createFolder, createFile, saveFile, renameLink, checkLinkSignatures, moveLinks, trashLinks, restoreLinks, deletePermanently, emptyTrash, stopSharingLinks, copyShareLinkToClipboard, removeDevice, renameDevice, confirmModal, }; } function aggregateResults(results: { successes: string[]; failures: { [linkId: string]: any } }[]) { return results.reduce( (acc, val) => { return { successes: [...acc.successes, ...val.successes], failures: { ...acc.failures, ...val.failures }, }; }, { successes: [], failures: {} } ); }
3,018
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_actions/useListNotifications.tsx
import { c, msgid } from 'ttag'; import { NotificationButton, useNotifications } from '@proton/components'; import { useErrorHandler } from '../_utils'; import { LinkInfo } from './interface'; export default function useListNotifications() { const { createNotification } = useNotifications(); const { showAggregatedErrorNotification } = useErrorHandler(); const createSuccessMessage = ( linkInfos: LinkInfo[], linkIds: string[], oneItemMessage: (name: string) => string, manyItemsMessage: (numberOfItems: number) => string, undoAction?: () => Promise<void> ) => { if (!linkIds.length) { return; } const firstItemName = linkInfos.find((link) => link.linkId === linkIds[0])?.name; const message = firstItemName && linkIds.length === 1 ? oneItemMessage(firstItemName) : manyItemsMessage(linkIds.length); createNotification({ type: 'success', text: ( <> <span>{message}</span> {undoAction && ( <> <NotificationButton onClick={() => undoAction()}>{c('Action').t`Undo`}</NotificationButton> </> )} </> ), }); }; const createFailureMessage = ( linkInfos: LinkInfo[], failures: { [linkId: string]: any }, oneItemMessage: (name: string) => string, manyItemsMessage: (numberOfItems: number) => string ) => { showAggregatedErrorNotification(Object.values(failures), (errors) => { const firstItemId = Object.keys(failures)[0]; const firstItemName = linkInfos.find((link) => link.linkId === firstItemId)?.name; return firstItemName && errors.length === 1 ? oneItemMessage(firstItemName) : manyItemsMessage(errors.length); }); }; const createMovedItemsNotifications = ( linkInfos: LinkInfo[], ok: string[], failures: { [linkId: string]: any }, undoAction?: () => Promise<void> ) => { createSuccessMessage( linkInfos, ok, (name: string) => c('Notification').t`"${name}" successfully moved`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} item successfully moved`, `${numberOfItems} items successfully moved`, numberOfItems ), undoAction ); createFailureMessage( linkInfos, failures, (name: string) => c('Notification').t`"${name}" failed to be moved`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} item failed to be moved`, `${numberOfItems} items failed to be moved`, numberOfItems ) ); }; const createTrashedItemsNotifications = ( linkInfos: LinkInfo[], ok: string[], failures: { [linkId: string]: any }, undoAction?: () => Promise<void> ) => { createSuccessMessage( linkInfos, ok, (name: string) => c('Notification').t`"${name}" moved to trash`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} item moved to trash`, `${numberOfItems} items moved to trash`, numberOfItems ), undoAction ); createFailureMessage( linkInfos, failures, (name: string) => c('Notification').t`"${name}" failed to be moved to trash`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} item failed to be moved to trash`, `${numberOfItems} items failed to be moved to trash`, numberOfItems ) ); }; const createRestoredItemsNotifications = ( linkInfos: LinkInfo[], ok: string[], failures: { [linkId: string]: any }, undoAction?: () => Promise<void> ) => { createSuccessMessage( linkInfos, ok, (name: string) => c('Notification').t`"${name}" restored from trash`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} item restored from trash`, `${numberOfItems} items restored from trash`, numberOfItems ), undoAction ); createFailureMessage( linkInfos, failures, (name: string) => c('Notification').t`"${name}" failed to be restored from trash`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} item failed to be restored from trash`, `${numberOfItems} items failed to be restored from trash`, numberOfItems ) ); }; const createDeletedItemsNotifications = ( linkInfos: LinkInfo[], ok: string[], failures: { [linkId: string]: any } ) => { createSuccessMessage( linkInfos, ok, (name: string) => c('Notification').t`"${name}" deleted permanently from trash`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} item deleted permanently from trash`, `${numberOfItems} items deleted permanently from trash`, numberOfItems ) ); createFailureMessage( linkInfos, failures, (name: string) => c('Notification').t`"${name}" failed to be deleted permanently from trash`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} item failed to be deleted permanently from trash`, `${numberOfItems} items failed to be deleted permanently from trash`, numberOfItems ) ); }; const createDeletedSharedLinksNotifications = ( linkInfos: LinkInfo[], ok: string[], failures: { [linkId: string]: any } ) => { createSuccessMessage( linkInfos, ok, (name: string) => c('Notification').t`The link to "${name}" was deleted`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} link to your item was deleted`, `${numberOfItems} links to your items were deleted`, numberOfItems ) ); createFailureMessage( linkInfos, failures, (name: string) => c('Notification').t`The link to "${name}" failed to be deleted`, (numberOfItems: number) => c('Notification').ngettext( msgid`${numberOfItems} link to your item failed to be deleted`, `${numberOfItems} links to your items failed to be deleted`, numberOfItems ) ); }; return { createMovedItemsNotifications, createTrashedItemsNotifications, createRestoredItemsNotifications, createDeletedItemsNotifications, createDeletedSharedLinksNotifications, }; }
3,019
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_api/index.ts
export { default as useDebouncedRequest } from './useDebouncedRequest'; export { default as usePublicAuth } from './usePublicAuth'; export { default as usePublicSession, PublicSessionProvider } from './usePublicSession'; export * from './transformers';
3,020
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_api/transformers.ts
import { EVENT_TYPES } from '@proton/shared/lib/drive/constants'; import { isMainShare } from '@proton/shared/lib/drive/utils/share'; import type { DevicePayload } from '@proton/shared/lib/interfaces/drive/device'; import type { DriveEventsResult } from '@proton/shared/lib/interfaces/drive/events'; import { DriveFileRevisionPayload } from '@proton/shared/lib/interfaces/drive/file'; import { LinkMeta, LinkType, SharedUrlInfo } from '@proton/shared/lib/interfaces/drive/link'; import type { Photo as PhotoPayload } from '@proton/shared/lib/interfaces/drive/photos'; import type { ShareMeta, ShareMetaShort } from '@proton/shared/lib/interfaces/drive/share'; import type { ShareURL as ShareURLPayload } from '@proton/shared/lib/interfaces/drive/sharing'; import type { Device } from '../_devices'; import type { DriveEvents } from '../_events'; import type { EncryptedLink } from '../_links'; import type { Photo } from '../_photos'; import type { DriveFileRevision } from '../_revisions'; import { hasCustomPassword, hasGeneratedPasswordIncluded } from '../_shares'; import type { Share, ShareURL, ShareWithKey } from '../_shares'; // LinkMetaWithShareURL is used when loading shared links. // We need this to load information about number of accesses. type LinkMetaWithShareURL = LinkMeta & { ShareUrls: (SharedUrlInfo & { ShareURL?: ShareURLPayload; })[]; }; export function linkMetaToEncryptedLink(link: LinkMetaWithShareURL, shareId: string): EncryptedLink { return { linkId: link.LinkID, parentLinkId: link.ParentLinkID, // API recognises only file and folder at this moment. In the future, // it might include hard- and soft-links, but still, for our case we // will differenciate only between files and folders, so we can convert // to simple boolean property. isFile: link.Type === LinkType.FILE, name: link.Name, nameSignatureAddress: link.NameSignatureEmail, mimeType: link.MIMEType, size: link.Size, hash: link.Hash, activeRevision: link.FileProperties?.ActiveRevision ? { id: link.FileProperties.ActiveRevision.ID, size: link.FileProperties.ActiveRevision.Size, signatureAddress: link.FileProperties.ActiveRevision.SignatureAddress, thumbnail: link.FileProperties.ActiveRevision.ThumbnailURLInfo ? { bareUrl: link.FileProperties.ActiveRevision.ThumbnailURLInfo.BareURL, token: link.FileProperties.ActiveRevision.ThumbnailURLInfo.Token, } : undefined, photo: link.FileProperties.ActiveRevision.Photo ? { linkId: link.FileProperties.ActiveRevision.Photo.LinkID, captureTime: link.FileProperties.ActiveRevision.Photo.CaptureTime, contentHash: link.FileProperties.ActiveRevision.Photo.ContentHash ?? undefined, mainPhotoLinkId: link.FileProperties.ActiveRevision.Photo.MainPhotoLinkID ?? undefined, hash: link.FileProperties.ActiveRevision.Photo.Hash ?? undefined, } : undefined, } : undefined, createTime: link.CreateTime, metaDataModifyTime: link.ModifyTime, trashed: link.Trashed, hasThumbnail: link.FileProperties?.ActiveRevision?.Thumbnail === 1, isShared: !!link.Shared, shareId: link.ShareIDs?.length > 0 ? link.ShareIDs[0] : undefined, rootShareId: shareId, shareUrl: link.ShareUrls?.length > 0 ? { id: link.ShareUrls[0].ShareUrlID, token: link.ShareUrls[0].Token, isExpired: link.UrlsExpired, createTime: link.ShareUrls[0].CreateTime, expireTime: link.ShareUrls[0].ExpireTime, numAccesses: link.ShareUrls[0].NumAccesses, } : undefined, nodeKey: link.NodeKey, nodePassphrase: link.NodePassphrase, nodePassphraseSignature: link.NodePassphraseSignature, nodeHashKey: link.FolderProperties?.NodeHashKey, contentKeyPacket: link.FileProperties?.ContentKeyPacket, contentKeyPacketSignature: link.FileProperties?.ContentKeyPacketSignature, signatureAddress: link.SignatureAddress, xAttr: link.XAttr, }; } export function shareMetaShortToShare(share: ShareMetaShort): Share { return { shareId: share.ShareID, rootLinkId: share.LinkID, volumeId: share.VolumeID, creator: share.Creator, isLocked: share.Locked, isDefault: isMainShare(share), isVolumeSoftDeleted: share.VolumeSoftDeleted, possibleKeyPackets: (share.PossibleKeyPackets || []).map(({ KeyPacket }) => KeyPacket), type: share.Type, state: share.State, }; } export function shareMetaToShareWithKey(share: ShareMeta): ShareWithKey { return { ...shareMetaShortToShare(share), key: share.Key, passphrase: share.Passphrase, passphraseSignature: share.PassphraseSignature, addressId: share.AddressID, rootLinkRecoveryPassphrase: share.RootLinkRecoveryPassphrase, }; } export function driveEventsResultToDriveEvents({ EventID, Events, Refresh }: DriveEventsResult): DriveEvents { return { eventId: EventID, events: Events.map((event) => ({ eventType: event.EventType, // ContextShareID is guaranteed to be on the event for all types // besides delete (after link is deleted, it is not possible to // find the share it was part of). For delete operation, it is // fine to keep rootShareId empty as its only for deleting data // from cache. In future, once the cache is volume oriented, it // will not be a problem, because we will always know proper // volume ID. encryptedLink: event.EventType === EVENT_TYPES.DELETE ? linkMetaToEncryptedLink(event.Link, '') : linkMetaToEncryptedLink(event.Link, event.ContextShareID), originShareId: event.EventType === EVENT_TYPES.DELETE ? undefined : event.FromContextShareID, })), refresh: Refresh !== 0, }; } export const deviceInfoToDevices = (info: DevicePayload): Device => { return { id: info.Device.DeviceID, volumeId: info.Device.VolumeID, shareId: info.Share.ShareID, name: info.Share.Name, modificationTime: info.Device.ModifyTime, linkId: info.Share.LinkID, haveLegacyName: !!info.Share.Name, }; }; export const shareUrlPayloadToShareUrl = (shareUrl: ShareURLPayload): ShareURL => { return { shareId: shareUrl.ShareID, shareUrlId: shareUrl.ShareURLID, expirationTime: shareUrl.ExpirationTime, creatorEmail: shareUrl.CreatorEmail, password: shareUrl.Password, flags: shareUrl.Flags, token: shareUrl.Token, publicUrl: shareUrl.PublicUrl, sharePassphraseKeyPacket: shareUrl.SharePassphraseKeyPacket, sharePasswordSalt: shareUrl.SharePasswordSalt, hasCustomPassword: hasCustomPassword({ flags: shareUrl.Flags }), hasGeneratedPasswordIncluded: hasGeneratedPasswordIncluded({ flags: shareUrl.Flags }), numAccesses: shareUrl.NumAccesses, urlPasswordSalt: shareUrl.UrlPasswordSalt, srpVerifier: shareUrl.SRPVerifier, srpModulusID: shareUrl.SRPModulusID, maxAccesses: shareUrl.MaxAccesses, permissions: shareUrl.Permissions, }; }; export const photoPayloadToPhotos = (photo: PhotoPayload): Photo => { return { linkId: photo.LinkID, captureTime: photo.CaptureTime, mainPhotoLinkId: photo.MainPhotoLinkID ?? undefined, hash: photo.Hash ?? undefined, contentHash: photo.ContentHash ?? undefined, }; }; export const revisionPayloadToRevision = (revision: DriveFileRevisionPayload): DriveFileRevision => { return { id: revision.ID, createTime: revision.CreateTime, size: revision.Size, state: revision.State, manifestSignature: revision.ManifestSignature, signatureAddress: revision.SignatureAddress, signatureEmail: revision.SignatureEmail, blocs: revision.Blocks, thumbnails: revision.Thumbnails, xAttr: revision.XAttr, }; };
3,021
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_api/useDebouncedRequest.ts
import { useApi } from '@proton/components'; import { useDebouncedFunction } from '../_utils'; export default function useDebouncedRequest() { const api = useApi(); const debouncedFunction = useDebouncedFunction(); const debouncedRequest = <T>(args: object, abortSignal?: AbortSignal): Promise<T> => { return debouncedFunction( (signal: AbortSignal) => { return api<T>({ signal, ...args }); }, args, abortSignal ); }; return debouncedRequest; }
3,022
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_api/usePublicAuth.ts
import { useEffect, useState } from 'react'; import { c } from 'ttag'; import { useNotifications } from '@proton/components'; import { useLoading } from '@proton/hooks'; import { getApiError } from '@proton/shared/lib/api/helpers/apiErrorHelper'; import { HTTP_STATUS_CODE } from '@proton/shared/lib/constants'; import { RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import { sendErrorReport } from '../../utils/errorHandling'; import { ERROR_CODE_INVALID_SRP_PARAMS, default as usePublicSession } from './usePublicSession'; /** * usePublicAuth automatically starts SRP handshake and if not password is * needed, it also continues automatically with initiating session. * In case custom password is set, it will be set in `isPasswordNeeded` and * then `submitPassword` callback should be used. */ export default function usePublicAuth(token: string, urlPassword: string) { const { createNotification } = useNotifications(); const { hasSession, initHandshake, initSession } = usePublicSession(); const [isLoading, withLoading] = useLoading(true); const [error, setError] = useState<string | undefined>(); const [isPasswordNeeded, setIsPasswordNeeded] = useState(false); /** * handleInitialLoadError processes error from initializing handshake * or session. It provides custom message in case of not existing link, * otherwise it uses the message from API. Any non-structured error is * converted to general message about failure and is reported to Sentry. */ const handleInitialLoadError = (error: any) => { const apiError = getApiError(error); if (apiError.status === HTTP_STATUS_CODE.NOT_FOUND || apiError.code === RESPONSE_CODE.NOT_FOUND) { setError(c('Title').t`The link either does not exist or has expired`); return; } // Code 2026 can happen for different reasons. // During initHandshake, it can happen when "volume is not available" // or "file has reached the download limit". // During initSession, it can mean the generated password is wrong or // custom password is needed. These cases are handled manually in // useEffect below and doesn't have to be taken care of here as here // would not be possible to distinguish the situation anyway. if (apiError.code === ERROR_CODE_INVALID_SRP_PARAMS) { setError(c('Title').t`The link expired`); return; } // Any other message from API, for example "Volume is not available". if (apiError.message) { setError(apiError.message); return; } setError(c('Title').t`Cannot load link`); sendErrorReport(error); }; useEffect(() => { if (hasSession) { return; } void withLoading( initHandshake(token) .then(({ handshakeInfo, hasCustomPassword }) => { if (hasCustomPassword) { setIsPasswordNeeded(true); return; } return initSession(token, urlPassword, handshakeInfo).catch((error) => { const apiError = getApiError(error); if (apiError.code === ERROR_CODE_INVALID_SRP_PARAMS) { setIsPasswordNeeded(true); return; } throw error; }); }) .catch((error) => { handleInitialLoadError(error); }) ); }, [hasSession]); const submitPassword = async (customPassword: string) => { await initHandshake(token) .then(async ({ handshakeInfo, hasGeneratedPasswordIncluded }) => { const password = hasGeneratedPasswordIncluded ? urlPassword + customPassword : customPassword; return initSession(token, password, handshakeInfo) .then(() => setIsPasswordNeeded(false)) .catch((error) => { const apiError = getApiError(error); if (apiError.code === ERROR_CODE_INVALID_SRP_PARAMS) { createNotification({ type: 'error', text: c('Error').t`Incorrect password. Please try again.`, }); return; } throw error; }); }) .catch((error) => { handleInitialLoadError(error); }); }; return { isLoading, error, isPasswordNeeded, submitPassword, }; }
3,023
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_api/usePublicSession.tsx
import { createContext, useContext, useRef, useState } from 'react'; import { useApi } from '@proton/components'; import { queryInitSRPHandshake, queryShareURLAuth } from '@proton/shared/lib/api/drive/sharing'; import { getApiError } from '@proton/shared/lib/api/helpers/apiErrorHelper'; import { HTTP_ERROR_CODES } from '@proton/shared/lib/errors'; import { withAuthHeaders } from '@proton/shared/lib/fetch/headers'; import { SRPHandshakeInfo } from '@proton/shared/lib/interfaces/drive/sharing'; import { srpAuth } from '@proton/shared/lib/srp'; import retryOnError from '../../utils/retryOnError'; import { hasCustomPassword, hasGeneratedPasswordIncluded } from '../_shares'; import useDebouncedRequest from './useDebouncedRequest'; export const ERROR_CODE_INVALID_SRP_PARAMS = 2026; const AUTH_RETRY_COUNT = 2; interface SessionInfo { token: string; password: string; accessToken: string; sessionUid: string; } /** * usePublicSession maintain authentication of public session for shared links. * To properly authenticate, user should first init SRP handshake, followed by * unlocking session with password. Then any allowed request for this session * can be made. In case the access token expires, request will automatically * reauth with the same token and password and retry. */ function usePublicSessionProvider() { const api = useApi(); const debouncedRequest = useDebouncedRequest(); const [hasSession, setHasSession] = useState(false); const sessionInfo = useRef<SessionInfo>(); const initHandshake = async (token: string) => { return api<SRPHandshakeInfo>(queryInitSRPHandshake(token)).then((handshakeInfo) => { return { handshakeInfo, hasCustomPassword: hasCustomPassword({ flags: handshakeInfo.Flags }), hasGeneratedPasswordIncluded: hasGeneratedPasswordIncluded({ flags: handshakeInfo.Flags }), }; }); }; const getSessionToken = async ( token: string, password: string, initHandshake: SRPHandshakeInfo ): Promise<{ AccessToken: string; UID: string }> => { const { Modulus, ServerEphemeral, UrlPasswordSalt, SRPSession, Version } = initHandshake; const response = await srpAuth({ api, credentials: { password }, info: { Modulus, ServerEphemeral, Version, Salt: UrlPasswordSalt, SRPSession, }, config: queryShareURLAuth(token), }); return response.json(); }; const initSession = async (token: string, password: string, handshakeInfo: SRPHandshakeInfo) => { return getSessionToken(token, password, handshakeInfo).then(({ AccessToken, UID }) => { setHasSession(true); sessionInfo.current = { token, password, accessToken: AccessToken, sessionUid: UID, }; return sessionInfo.current; }); }; const queryWithHeaders = (query: any) => { if (!sessionInfo.current) { // This should not happend. If you see this, it indicate wrong flow. throw new Error('Cannot query unauthenticated session'); } return withAuthHeaders(sessionInfo.current.sessionUid, sessionInfo.current.accessToken, query); }; const shouldReauth = (error: any) => { const apiError = getApiError(error); return apiError.status === HTTP_ERROR_CODES.UNAUTHORIZED; }; const reauth = async () => { if (!sessionInfo.current) { // This should not happend. If you see this, it indicate wrong flow. throw new Error('Cannot reauth unauthenticated session'); } const { handshakeInfo } = await initHandshake(sessionInfo.current.token); await initSession(sessionInfo.current.token, sessionInfo.current.password, handshakeInfo).catch((err) => { // Custom password was changed probably, lets refresh and ask again. if (err?.data?.Code === ERROR_CODE_INVALID_SRP_PARAMS) { setHasSession(false); } throw err; }); }; const request = <T,>(args: any, abortSignal?: AbortSignal) => { const fn = () => debouncedRequest<T>(queryWithHeaders(args), abortSignal); return retryOnError<T>({ fn, shouldRetryBasedOnError: shouldReauth, beforeRetryCallback: reauth, maxRetriesNumber: AUTH_RETRY_COUNT, })(); }; const getSessionInfo = () => sessionInfo.current; return { hasSession, initHandshake, initSession, request, getSessionInfo, }; } const PublicSessionContext = createContext<ReturnType<typeof usePublicSessionProvider> | null>(null); export function PublicSessionProvider({ children }: { children: React.ReactNode }) { const value = usePublicSessionProvider(); return <PublicSessionContext.Provider value={value}>{children}</PublicSessionContext.Provider>; } export default function usePublicSession() { const state = useContext(PublicSessionContext); if (!state) { throw new Error('Trying to use uninitialized PublicSessionProvider'); } return state; }
3,024
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_crypto/driveCrypto.ts
import { PrivateKeyReference, PublicKeyReference, toPublicKeyReference } from '@proton/crypto'; import { ADDRESS_STATUS } from '@proton/shared/lib/constants'; import { canonicalizeInternalEmail } from '@proton/shared/lib/helpers/email'; import { DecryptedKey } from '@proton/shared/lib/interfaces'; import { Address } from '@proton/shared/lib/interfaces/Address'; import { getPrimaryKey } from '@proton/shared/lib/keys'; import { decryptPassphrase } from '@proton/shared/lib/keys/drivePassphrase'; import { splitKeys } from '@proton/shared/lib/keys/keys'; import { ShareWithKey } from '../_shares'; export interface PrimaryAddressKey { privateKey: PrivateKeyReference; publicKey: PublicKeyReference | undefined; address: Address; } // Special case for drive to allow users with just an external address export const getActiveAddresses = (addresses: Address[]): Address[] => { return addresses.filter(({ Status }) => Status === ADDRESS_STATUS.STATUS_ENABLED); }; export const getPrimaryAddressAsync = async (getAddresses: () => Promise<Address[]>) => { const addresses = await getAddresses(); const [activeAddress] = getActiveAddresses(addresses); if (!activeAddress) { throw new Error('User has no active address'); } return activeAddress; }; export const getPrimaryAddressKeyAsync = async ( getPrimaryAddress: () => Promise<Address>, getAddressKeys: (id: string) => Promise<DecryptedKey[]> ) => { const activeAddress = await getPrimaryAddress(); const addressKeys = await getAddressKeys(activeAddress.ID); const { privateKey, publicKey: maybePublicKey } = getPrimaryKey(addressKeys) || {}; if (!privateKey) { // Should never happen throw new Error('Primary private key is not available'); } const publicKey = maybePublicKey || (await toPublicKeyReference(privateKey)); return { privateKey, publicKey, address: activeAddress }; }; const getOwnAddress = async (email: string, getAddresses: () => Promise<Address[]>) => { // Some characters can be changed but still be the same email. return (await getAddresses()).find( ({ Email }) => canonicalizeInternalEmail(Email) === canonicalizeInternalEmail(email) ); }; const getOwnAddressAndKeys = async ( email: string, getAddresses: () => Promise<Address[]>, getAddressKeys: (id: string) => Promise<DecryptedKey[]> ) => { const address = await getOwnAddress(email, getAddresses); if (!address) { return {}; } const addressKeys = await getAddressKeys(address.ID); return { address, addressKeys }; }; export const getOwnAddressAndPrimaryKeysAsync = async ( email: string, getAddresses: () => Promise<Address[]>, getAddressKeys: (id: string) => Promise<DecryptedKey[]> ) => { const { address, addressKeys } = await getOwnAddressAndKeys(email, getAddresses, getAddressKeys); const { privateKey, publicKey } = getPrimaryKey(addressKeys) || {}; if (!privateKey) { // Should never happen throw new Error('Primary private key is not available'); } if (!address) { // Should never happen throw new Error('Address is not available'); } return { address, privateKey, publicKey: publicKey || (await toPublicKeyReference(privateKey)) }; }; export const getOwnAddressKeysAsync = async ( email: string, getAddresses: () => Promise<Address[]>, getAddressKeys: (id: string) => Promise<DecryptedKey[]> ) => { const { addressKeys } = await getOwnAddressAndKeys(email, getAddresses, getAddressKeys); return addressKeys ? splitKeys(addressKeys) : undefined; }; export const decryptSharePassphraseAsync = async ( meta: ShareWithKey, privateKeys: PrivateKeyReference[], getVerificationKey: (email: string) => Promise<PublicKeyReference[]> ) => { const publicKeys = await getVerificationKey(meta.creator); return decryptPassphrase({ armoredPassphrase: meta.passphrase, armoredSignature: meta.passphraseSignature, privateKeys, publicKeys, }); };
3,025
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_crypto/index.ts
export { default as useDriveCrypto } from './useDriveCrypto';
3,026
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_crypto/useDriveCrypto.ts
import { useCallback } from 'react'; import { c } from 'ttag'; import { useAuthentication, useGetAddressKeys, useGetAddresses, useNotifications } from '@proton/components'; import { PrivateKeyReference } from '@proton/crypto'; import { ADDRESS_STATUS } from '@proton/shared/lib/constants'; import { Address } from '@proton/shared/lib/interfaces/Address'; import { sign as signMessage } from '@proton/shared/lib/keys/driveKeys'; import { ShareWithKey } from '../_shares'; import { decryptSharePassphraseAsync, getOwnAddressAndPrimaryKeysAsync, getOwnAddressKeysAsync, getPrimaryAddressAsync, getPrimaryAddressKeyAsync, } from './driveCrypto'; // Special case for drive to allow users with just an external address export const getActiveAddresses = (addresses: Address[]): Address[] => { return addresses.filter(({ Status }) => Status === ADDRESS_STATUS.STATUS_ENABLED); }; function useDriveCrypto() { const { createNotification } = useNotifications(); const getAddressKeys = useGetAddressKeys(); const getAddresses = useGetAddresses(); const { UID } = useAuthentication(); const getPrimaryAddress = useCallback(async () => { return getPrimaryAddressAsync(getAddresses).catch((error) => { createNotification({ text: c('Error').t`No valid address found`, type: 'error' }); throw error; }); }, [getAddresses]); // getPrimaryAddressKey returns only currently primary key of the primary // address. Used for bootstrapping. const getPrimaryAddressKey = useCallback(async () => { return getPrimaryAddressKeyAsync(getPrimaryAddress, getAddressKeys); }, [getPrimaryAddress, getAddressKeys]); const getOwnAddressKeys = useCallback( async (email: string) => getOwnAddressKeysAsync(email, getAddresses, getAddressKeys), [getAddresses, getAddressKeys] ); // This should be used for encryption and signing of any content. const getOwnAddressAndPrimaryKeys = useCallback( async (email: string) => getOwnAddressAndPrimaryKeysAsync(email, getAddresses, getAddressKeys), [getAddresses, getAddressKeys] ); const getPrivateAddressKeys = useCallback( async (email: string) => { const result = await getOwnAddressKeysAsync(email, getAddresses, getAddressKeys); return result?.privateKeys || []; }, [getAddresses, getAddressKeys] ); const getVerificationKey = useCallback( async (email?: string) => { // If UID is empty, it means user is not logged in. // We don't support checking signatures for public session yet // so lets simply return no keys instead of firing exceptions. if (!email || !UID) { return []; } const result = await getOwnAddressKeysAsync(email, getAddresses, getAddressKeys); return result?.publicKeys || []; }, [getAddresses, getAddressKeys] ); const sign = useCallback( async (payload: string | Uint8Array, keys?: { privateKey: PrivateKeyReference; address: Address }) => { const { privateKey, address } = keys || (await getPrimaryAddressKey()); const signature = await signMessage(payload, [privateKey]); return { signature, address }; }, [getPrimaryAddressKey] ); /** * Decrypts share passphrase. By default decrypts with the same user's keys who encrypted. * Keys can be passed explicitly if user is different, i.e. in case of sharing between users. * @param meta share metadata * @param privateKeys keys to use, when the user is not the same who encrypted */ const decryptSharePassphrase = async (meta: ShareWithKey, privateKeys?: PrivateKeyReference[]) => { if (!privateKeys) { const keys = await getOwnAddressKeys(meta.creator); if (!keys) { throw new Error('Address key was not found'); } privateKeys = keys.privateKeys; } return decryptSharePassphraseAsync(meta, privateKeys, getVerificationKey); }; return { getPrimaryAddressKey, getOwnAddressAndPrimaryKeys, getPrivateAddressKeys, getVerificationKey, getPrimaryAddress, sign, decryptSharePassphrase, }; } export default useDriveCrypto;
3,027
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_devices/index.ts
export { default as useDevicesListing, DevicesListingProvider as DevicesProvider } from './useDevicesListing'; export * from './interface';
3,028
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_devices/interface.ts
export interface Device { id: string; volumeId: string; shareId: string; linkId: string; name: string; modificationTime: number; haveLegacyName: boolean; } export type DevicesState = { [deviceId: string]: Device; };
3,029
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_devices/useDevicesActions.ts
import { useApi, usePreventLeave } from '@proton/components'; import { queryDeviceDeletion, queryDeviceRename } from '@proton/shared/lib/api/drive/devices'; import useDevicesListing from './useDevicesListing'; /** * useDevicesActions provides actions for manipulating with devices. */ export default function useDevicesActions() { const { preventLeave } = usePreventLeave(); const { renameCachedDevice, removeCachedDevice } = useDevicesListing(); const api = useApi(); const remove = async (deviceId: string, abortSignal?: AbortSignal) => { await preventLeave( api({ ...queryDeviceDeletion(deviceId), signal: abortSignal, }).then(() => { removeCachedDevice(deviceId); }) ); }; const rename = async ( params: { deviceId: string; newName: string; haveLegacyName: boolean }, abortSignal?: AbortSignal ) => { if (params.haveLegacyName) { await preventLeave( api({ ...queryDeviceRename(params.deviceId, { Name: '' }), signal: abortSignal, }).then(() => { renameCachedDevice(params.deviceId, params.newName); }) ); } }; return { remove, rename, }; }
3,030
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_devices/useDevicesApi.ts
import { queryDevices } from '@proton/shared/lib/api/drive/devices'; import { DevicesResult } from '@proton/shared/lib/interfaces/drive/device'; import { deviceInfoToDevices, useDebouncedRequest } from '../_api'; import { DevicesState } from './interface'; export default function useDevicesApi() { const debouncedRequest = useDebouncedRequest(); /* eslint-disable-next-line */ const loadDevices = async (abortSignal?: AbortSignal): Promise<DevicesState> => { const res = await debouncedRequest<DevicesResult>({ ...queryDevices(), signal: abortSignal, }); const responseDevices = res ? res.Devices : []; const devices = responseDevices.map(deviceInfoToDevices).reduce((acc, device) => { const { id } = device; acc[id] = device; return acc; }, {} as DevicesState); return devices; }; return { loadDevices, }; }
3,031
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_devices/useDevicesListing.test.tsx
import { act, renderHook } from '@testing-library/react-hooks'; import { sendErrorReport } from '../../utils/errorHandling'; import { VolumesStateProvider } from '../_volumes/useVolumesState'; import { Device } from './interface'; import { useDevicesListingProvider } from './useDevicesListing'; const SHARE_ID_0 = 'shareId0'; const SHARE_ID_1 = 'shareId1'; const DEVICE_0: Device = { id: '1', volumeId: '1', shareId: SHARE_ID_0, linkId: 'linkId0', name: 'HOME-DESKTOP', modificationTime: Date.now(), haveLegacyName: true, }; const DEVICE_1: Device = { id: '2', volumeId: '1', shareId: SHARE_ID_1, linkId: 'linkId1', name: 'Macbook Pro', modificationTime: Date.now(), haveLegacyName: true, }; const DEVICE_2: Device = { id: '3', volumeId: '1', shareId: SHARE_ID_1, linkId: 'linkId1', name: '', modificationTime: Date.now(), haveLegacyName: false, }; const mockDevicesPayload = [DEVICE_0, DEVICE_1]; jest.mock('../../utils/errorHandling'); const mockedSendErrorReport = jest.mocked(sendErrorReport); const mockedCreateNotification = jest.fn(); jest.mock('@proton/components/hooks', () => { return { useNotifications: jest.fn(() => ({ createNotification: mockedCreateNotification })), }; }); const mockedLoadDevices = jest.fn().mockResolvedValue(mockDevicesPayload); jest.mock('./useDevicesApi', () => { const useDeviceApi = () => { return { loadDevices: mockedLoadDevices, }; }; return useDeviceApi; }); const mockedGetLink = jest.fn(); jest.mock('../_links', () => { const useLink = jest.fn(() => ({ getLink: mockedGetLink, })); return { useLink }; }); describe('useLinksState', () => { let hook: { current: ReturnType<typeof useDevicesListingProvider>; }; beforeEach(() => { const wrapper = ({ children }: { children: React.ReactNode }) => ( <VolumesStateProvider>{children}</VolumesStateProvider> ); mockedCreateNotification.mockClear(); mockedGetLink.mockClear(); mockedSendErrorReport.mockClear(); const { result } = renderHook(() => useDevicesListingProvider(), { wrapper }); hook = result; }); it('finds device by shareId', async () => { await act(async () => { await hook.current.loadDevices(new AbortController().signal); const device = hook.current.getDeviceByShareId(SHARE_ID_0); expect(device).toEqual(DEVICE_0); }); }); it('lists loaded devices', async () => { await act(async () => { await hook.current.loadDevices(new AbortController().signal); const cachedDevices = hook.current.cachedDevices; const targetList = [DEVICE_0, DEVICE_1]; expect(cachedDevices).toEqual(targetList); }); }); it('should call getLink to get root link', async () => { mockedLoadDevices.mockResolvedValue([DEVICE_2]); await act(async () => { const name = 'rootName'; mockedGetLink.mockResolvedValue({ name }); await hook.current.loadDevices(new AbortController().signal); const cachedDevices = hook.current.cachedDevices; const targetList = [{ ...DEVICE_2, name }]; expect(cachedDevices).toEqual(targetList); expect(mockedGetLink).toHaveBeenCalled(); expect(mockedCreateNotification).not.toHaveBeenCalled(); expect(mockedSendErrorReport).not.toHaveBeenCalled(); }); }); it('should notify the user if there is a failure loading a link', async () => { mockedLoadDevices.mockResolvedValue([DEVICE_2]); await act(async () => { mockedGetLink.mockRejectedValue('error'); await hook.current.loadDevices(new AbortController().signal); const cachedDevices = hook.current.cachedDevices; const targetList: Device[] = []; expect(cachedDevices).toEqual(targetList); expect(mockedGetLink).toHaveBeenCalled(); expect(mockedCreateNotification).toHaveBeenCalled(); expect(mockedSendErrorReport).toHaveBeenCalled(); }); }); });
3,032
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_devices/useDevicesListing.tsx
import { createContext, useContext, useEffect, useState } from 'react'; import { c } from 'ttag'; import { useNotifications } from '@proton/components/hooks'; import { useLoading } from '@proton/hooks'; import { sendErrorReport } from '../../utils/errorHandling'; import { useLink } from '../_links'; import { useVolumesState } from '../_volumes'; import { Device } from './interface'; import useDevicesApi from './useDevicesApi'; export function useDevicesListingProvider() { const devicesApi = useDevicesApi(); const { getLink } = useLink(); const volumesState = useVolumesState(); const [state, setState] = useState<Map<string, Device>>(new Map()); const [isLoading, withLoading] = useLoading(); const { createNotification } = useNotifications(); const loadDevices = (abortSignal: AbortSignal) => withLoading(async () => { const devices = await devicesApi.loadDevices(abortSignal); if (devices) { const devicesMap = new Map(); let hasError = false; for (const key in devices) { const { volumeId, shareId, linkId, name } = devices[key]; try { volumesState.setVolumeShareIds(volumeId, [shareId]); devices[key] = { ...devices[key], name: name || (await getLink(abortSignal, shareId, linkId)).name, }; devicesMap.set(key, devices[key]); } catch (e) { hasError = true; // Send an error report for this sendErrorReport( new Error('Decrypting device failed', { cause: { e, volumeId, shareId, linkId } }) ); } } if (hasError) { createNotification({ type: 'error', text: c('Error').t`Error decrypting a computer`, }); } setState(devicesMap); } }); const getState = () => { return [...state.values()]; }; const getDeviceByShareId = (shareId: string) => { return getState().find((device) => { return device.shareId === shareId; }); }; const removeDevice = (deviceId: string) => { const newState = new Map(state); newState.delete(deviceId); setState(newState); }; const renameDevice = (deviceId: string, name: string) => { const newState = new Map(state); const device = newState.get(deviceId); if (!device) { return; } newState.set(deviceId, { ...device, name, }); setState(newState); }; return { isLoading, loadDevices, cachedDevices: getState(), getDeviceByShareId, renameDevice, removeDevice, }; } const LinksListingContext = createContext<{ isLoading: boolean; cachedDevices: ReturnType<typeof useDevicesListingProvider>['cachedDevices']; getDeviceByShareId: ReturnType<typeof useDevicesListingProvider>['getDeviceByShareId']; removeCachedDevice: ReturnType<typeof useDevicesListingProvider>['removeDevice']; renameCachedDevice: ReturnType<typeof useDevicesListingProvider>['renameDevice']; } | null>(null); export function DevicesListingProvider({ children }: { children: React.ReactNode }) { const value = useDevicesListingProvider(); useEffect(() => { const ac = new AbortController(); value.loadDevices(ac.signal).catch(sendErrorReport); return () => { ac.abort(); }; }, []); return ( <LinksListingContext.Provider value={{ isLoading: value.isLoading, cachedDevices: value.cachedDevices, getDeviceByShareId: value.getDeviceByShareId, removeCachedDevice: value.removeDevice, renameCachedDevice: value.renameDevice, }} > {children} </LinksListingContext.Provider> ); } export default function useDevicesListing() { const state = useContext(LinksListingContext); if (!state) { throw new Error('Trying to use uninitialized LinksListingProvider'); } return state; }
3,033
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/ThumbnailDownloadProvider.tsx
import React, { createContext, useContext, useEffect, useMemo, useRef } from 'react'; import { VERIFICATION_STATUS } from '@proton/crypto'; import { MAX_THREADS_PER_DOWNLOAD } from '@proton/shared/lib/drive/constants'; import useNavigate from '../../hooks/drive/useNavigate'; import { logError } from '../../utils/errorHandling'; import { createAsyncQueue } from '../../utils/parallelRunners'; import { useLink } from '../_links'; interface DownloadProviderState { /** * Adds a thumbnail to the download queue. * * @param domRef If provided, will cancel the query if `ref.current` is null * when the queue processes the thumbnail. This is useful to * avoid processing items which are no longer visible. */ addToDownloadQueue: ( shareId: string, linkId: string, activeRevisionId?: string, domRef?: React.MutableRefObject<unknown> ) => void; } const ThumbnailsDownloadContext = createContext<DownloadProviderState | null>(null); const getDownloadIdString = ({ shareId, linkId, activeRevisionId, }: { shareId: string; linkId: string; activeRevisionId?: string; }) => { return shareId + linkId + (activeRevisionId || ''); }; /* ThumbnailsDownloadProvider is used to keep the number of simultaneous requests sent by a browser under control. Before implementing this, we had all thumbnails request being sent at one single moment, which filled up the browser request queue. Thus, having a folder with lots of images, you couldn't make any request, unless ALL thumbnail are loaded and the browser queue is freed (Example: one couldn't load a file preview right away after clicking on a file, and had to wait untill all thumbnails are done loading). This provider ensured that we load thumbnails by small portions, leaving a window for other requests to get to a queue in between them. */ export const ThumbnailsDownloadProvider = ({ children, downloadThumbnail, }: { downloadThumbnail: ( signal: AbortSignal, shareId: string, linkId: string, downloadUrl: string, downloadToken: string ) => Promise<{ contents: Promise<Uint8Array[]>; verifiedPromise: Promise<VERIFICATION_STATUS>; }>; children: React.ReactNode; }) => { const { loadLinkThumbnail } = useLink(); const navigation = useNavigate(); const asyncQueue = useMemo(() => createAsyncQueue(MAX_THREADS_PER_DOWNLOAD), []); const queueLinkCache = useRef<Set<string>>(new Set()); const controls = useRef<Record<string, AbortController>>({}); const cancelDownloads = () => { queueLinkCache.current.forEach((id) => { controls.current[id]?.abort(); }); queueLinkCache.current = new Set(); asyncQueue.clearQueue(); }; useEffect(() => { const handlerId = navigation.addListener(() => { cancelDownloads(); }); return () => navigation.removeListener(handlerId); }, []); const handleThumbnailDownload = (shareId: string, linkId: string, downloadId: string) => { const ac = new AbortController(); controls.current[downloadId] = ac; return loadLinkThumbnail(ac.signal, shareId, linkId, async (downloadUrl: string, downloadToken: string) => { return downloadThumbnail(ac.signal, shareId, linkId, downloadUrl, downloadToken); }) .catch(logError) .finally(() => { delete controls.current[downloadId]; }); }; // See JSDoc comment in the interface on top of this file. const addToDownloadQueue = ( shareId: string, linkId: string, activeRevisionId?: string, domRef?: React.MutableRefObject<unknown> ) => { const downloadIdString = getDownloadIdString({ shareId, linkId, activeRevisionId, }); if (queueLinkCache.current.has(downloadIdString) || (domRef && !domRef.current)) { return; } queueLinkCache.current.add(downloadIdString); asyncQueue.addToQueue(() => { if (domRef && !domRef.current) { // No download was initiated, so removed it from the cache queueLinkCache.current.delete(downloadIdString); return Promise.resolve(); } return handleThumbnailDownload(shareId, linkId, downloadIdString); }); }; return ( <ThumbnailsDownloadContext.Provider value={{ addToDownloadQueue, }} > {children} </ThumbnailsDownloadContext.Provider> ); }; export const useThumbnailsDownload = () => { const state = useContext(ThumbnailsDownloadContext); if (!state) { throw new Error('Trying to use uninitialized ThumbnailsDonwloadProvider'); } return state; };
3,034
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/architecture.md
# Download Architecture The download code itself is in the download subfolder. There is the code to download only one block, blocks of one file, recursively the whole folder, or the selection of files in the folder (at least that is limited by GUI, technically speaking, there is no limit to download files from different folders or even shares). But the caller should avoid direct access to those functions and use the main entry point `initDownload`, which accepts an array of links to be downloaded, and it chooses the proper path based on the provided list. In the case of - one file, `downloadLinkFile.ts` is used, which is using `downloadBlocks.ts` and `downloadBlock.ts` helpers. The file is streamed right back in the stream returned from the `start` method provided from `initDownload`. - one folder, `downloadLinkFolder.ts` is used, which is loading the folder children and recursively calls `downloadLinkFile.ts` mentioned above. The file streams are added to the archive iterator, which provides a stream of zip archive back to the caller, also as a response from calling `start`. - multiple files or folders, `downloadLinks.ts` is used. That calls for all passed links `downloadLinkFile.ts` or uses helper from `downloadLinkFolder.ts` to get the whole folder structure and creates an archive similarly as `downloadLinkFolder.ts` does. The difference is, in this case, files are at the root of the archive, whereas in the previous case, all children are at the root of the archive (the folder itself is not part of the archive). In all cases, the returned stream is passed down to `fileSaver.ts`. It is singleton handling service worker and passing the stream to that service worker, which streams the content to the browser. It works by creating a special local path with the provided stream and opening it by injecting an iframe to that local endpoint (living in the service worker only) to the main page. When this method fails, or the file is too small for such a complex solution, file saver fallbacks to buffering solution to buffer the whole file in memory and download in one go at the end. Note this is not possible for huge files, especially when device has not enough memory. The download code is pure JavaScript, no React involved, and thus no API handlers. Everything needs to be passed as a callback, such as `getChildren`, `getBlocks`, or `getKeys` (and event handlers). To make it more convenient to call from other places, `useDownload.ts` hook exists to connect pure download code with our API. This hook is then used by `DownloadProvider`. The provider is basically just the queue (containing also progresses and controls) of download transfers shown in the transfer manager. If the transfer should not be visible in the transfer manager, the download provider should not be used. For example, thumbnails are handled by `ThumbnailDownloadProvider.ts` (which uses directly `downloadBlock.ts`), or preview uses the `useDownload.ts` helper hook. ```mermaid graph TD PreviewContainer DownloadButton subgraph "TransferManager folder" TransferManager end subgraph "DownloadShared folder" DownloadSharedContainer useDownloadPublic DownloadSharedContainer --> useDownloadPublic end subgraph "downloads folder" useDownload subgraph "ThumbnailProvider folder" ThumbnailProvider end subgraph "DownloadProvider folder" DownloadProvider useDownloadProvider useDownloadQueue useDownloadControl DownloadProvider --> useDownloadProvider useDownloadProvider --> useDownloadQueue useDownloadProvider --> useDownloadControl --> useDownloadQueue end subgraph "download folder" download downloadLinks downloadLinkFolder downloadLinkFile downloadBlocks concurrentIterator archiveGenerator download --> downloadLinks download --> downloadLinkFolder download --> downloadLinkFile downloadLinkFolder --> downloadLinkFile downloadLinkFolder --> concurrentIterator downloadLinkFolder --> archiveGenerator downloadLinks --> downloadLinkFolder downloadLinks --> downloadLinkFile downloadLinks --> concurrentIterator downloadLinks --> archiveGenerator downloadLinkFile --> downloadBlocks end subgraph "fileSaver folder" fileSaver serviceWorker fileSaver --> serviceWorker end end subgraph "hooks" useDrive[useDrive and others] end PreviewContainer --> DownloadProvider PreviewContainer --> useDownload TransferManager --> DownloadProvider DownloadButton --> DownloadProvider ThumbnailProvider --> downloadBlocks ThumbnailProvider --> useDrive useDownloadPublic --> download useDownload --> download download --> fileSaver useDownloadProvider --> useDownload --> useDrive ```
3,035
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/constants.ts
/** * MAX_DOWNLOADING_BLOCKS limits the number of blocks in the buffer of one file * transfer before asking API for next blocks. */ export const MAX_DOWNLOADING_BLOCKS = 10; /** * MAX_DOWNLOADING_BLOCKS_LOAD limits the number of blocks in the buffer for * all download transfers. Once this is reached, no other download is started. */ export const MAX_DOWNLOADING_BLOCKS_LOAD = 15; /** * MAX_DOWNLOADING_FILES_LOAD limits the number of files downloaded in parallel. * Useful in case we want to allow, lets say up to ten blocks (see constant * MAX_DOWNLOADING_BLOCKS_LOAD above), but total number of files should be lower. * Real scenario is to limit files when there is multiple of empty ones (having * no block file) to avoid fetching all metadata at once. */ export const MAX_DOWNLOADING_FILES_LOAD = 15; /** * MAX_RETRIES_BEFORE_FAIL defines how many times download should have been * attempted before resulting in failure. * It is also controlled by TIME_TO_RESET_RETRIES. */ export const MAX_RETRIES_BEFORE_FAIL = 3; /** * TIME_TO_RESET_RETRIES defines after what time the attempt starts from zero. * Download can fail also by link expiration and that we don't want to fail * but try again and again, until download is fast enough. So only failures * happening quickly after each other results in failure. * Link expires in 30 minutes. That is the limit to download one 4 MB block. */ export const TIME_TO_RESET_RETRIES = 2 * 60 * 1000; // Milliseconds. /** * WAIT_TIME is used for pauses between checks, such as to check if buffer is * still full or not, or if the upload is paused, and so on. */ export const WAIT_TIME = 50; // Milliseconds. /** * MAX_TOO_MANY_REQUESTS_WAIT defines how many seconds is allowed to wait * if server rate limits upload. If server asks to wait longer, we don't * wait and fail right away instead. */ export const MAX_TOO_MANY_REQUESTS_WAIT = 60 * 60; // Seconds.
3,036
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/index.tsx
import { VERIFICATION_STATUS } from '@proton/srp/lib/constants'; import { DownloadProvider } from './DownloadProvider'; import { ThumbnailsDownloadProvider } from './ThumbnailDownloadProvider'; import useDownload from './useDownload'; import usePublicDownload from './usePublicDownload'; export { useDownloadProvider } from './DownloadProvider'; export { useThumbnailsDownload } from './ThumbnailDownloadProvider'; export { default as useDownload } from './useDownload'; export function DownloadsProvider({ children }: { children: React.ReactNode }) { const { initDownload, downloadThumbnail } = useDownload(); const downloadThumbnailsCb = async ( signal: AbortSignal, shareId: string, linkId: string, downloadUrl: string, downloadToken: string ) => { const { contents, abortController, verifiedPromise } = await downloadThumbnail( signal, shareId, linkId, downloadUrl, downloadToken ); if (signal.aborted) { abortController.abort(); } else { signal.addEventListener('abort', () => { abortController.abort(); }); } return { contents, verifiedPromise }; }; return ( <DownloadProvider initDownload={initDownload}> <ThumbnailsDownloadProvider downloadThumbnail={downloadThumbnailsCb}>{children}</ThumbnailsDownloadProvider> </DownloadProvider> ); } export function PublicDownloadsProvider({ children }: { children: React.ReactNode }) { const { initDownload, downloadThumbnail } = usePublicDownload(); const downloadThumbnailsCb = async ( signal: AbortSignal, shareId: string, linkId: string, downloadUrl: string, downloadToken: string ) => { const contents = downloadThumbnail(signal, shareId, linkId, { BareURL: downloadUrl, Token: downloadToken, }); return { contents, verifiedPromise: Promise.resolve(VERIFICATION_STATUS.NOT_SIGNED) }; }; return ( <DownloadProvider initDownload={initDownload}> <ThumbnailsDownloadProvider downloadThumbnail={downloadThumbnailsCb}>{children}</ThumbnailsDownloadProvider> </DownloadProvider> ); } export * from './interface';
3,037
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/interface.ts
import { ReadableStream } from 'web-streams-polyfill'; import { PrivateKeyReference, PublicKeyReference, SessionKey } from '@proton/crypto'; import { DriveFileBlock } from '@proton/shared/lib/interfaces/drive/file'; import { DecryptedLink, SignatureIssues } from '../_links'; export interface LinkDownload { isFile: boolean; shareId: string; linkId: string; name: string; mimeType: string; size: number; revisionId?: string; signatureAddress?: string; signatureIssues?: SignatureIssues; buffer?: Uint8Array[]; } export type DownloadControls = { start: () => Promise<void>; pause: () => void; resume: () => void; cancel: () => void; }; export type DownloadStreamControls = Omit<DownloadControls, 'start'> & { start: () => ReadableStream<Uint8Array>; }; export type DownloadEventCallbacks = { // Called when the total size is known. onInit?: OnInitCallback; // Called when relative progress is changed. // Both up and down, e.g., when something is retried. onProgress?: OnProgressCallback; // Called when signature verification fails. // It can be used to ask user the issue and allow the user to abort // the download, or ignore the problem and continue. onSignatureIssue?: OnSignatureIssueCallback; // Called when error happened. // The transfer is cancelled. onError?: OnErrorCallback; // Called when network error happened. // The transfer is paused and it awaits for instructuion to continue. onNetworkError?: OnErrorCallback; // Called when the whole transfer is finished. onFinish?: () => void; }; export type DownloadBaseCallbacks = { getChildren: GetChildrenCallback; getBlocks: GetBlocksCallback; getKeys: GetKeysCallback; }; export type DownloadCallbacks = DownloadEventCallbacks & DownloadBaseCallbacks; export type OnInitCallback = ( size: number, // linkSizes is map of link ID to its size. // Currently we collect only sizes of top level items, that // is exluding the whole tree of the folder item. linkSizes: { [linkId: string]: number } ) => void; export type OnProgressCallback = ( // linkIds for which the progress should be counted. // It should contain link itself and all the parents so the whole directory // tree is properly counted. linkIds: string[], bytes: number ) => void; export type OnSignatureIssueCallback = ( abortSignal: AbortSignal, link: LinkDownload, signatureIssues: SignatureIssues ) => Promise<void>; type OnErrorCallback = (err: any) => void; export type ChildrenLinkMeta = Pick< DecryptedLink, 'isFile' | 'linkId' | 'name' | 'mimeType' | 'size' | 'fileModifyTime' | 'signatureAddress' | 'signatureIssues' >; export type GetChildrenCallback = ( abortSignal: AbortSignal, shareId: string, linkId: string ) => Promise<ChildrenLinkMeta[]>; type GetBlocksCallback = ( abortSignal: AbortSignal, shareId: string, linkId: string, pagination: Pagination, revisionId?: string ) => Promise<{ blocks: DriveFileBlock[]; thumbnailHashes: string[]; manifestSignature: string }>; type GetKeysCallback = (abortSignal: AbortSignal, link: LinkDownload) => Promise<DecryptFileKeys>; export type Pagination = { FromBlockIndex: number; PageSize: number }; export type DecryptFileKeys = { privateKey: PrivateKeyReference; sessionKeys?: SessionKey; addressPublicKeys?: PublicKeyReference[]; }; export type InitDownloadCallback = ( name: string, list: LinkDownload[], eventCallbacks: DownloadEventCallbacks ) => DownloadControls; export type DownloadSignatureIssueModal = React.FunctionComponent<DownloadSignatureIssueModalProps>; interface DownloadSignatureIssueModalProps { isFile: boolean; name: string; downloadName: string; signatureAddress?: string; signatureIssues: SignatureIssues; apply: (strategy: TransferSignatureIssueStrategy, all: boolean) => void; cancelAll: () => void; } export enum TransferSignatureIssueStrategy { Abort = 'abort', Continue = 'continue', // Following strategies are not used yet. DeleteFile = 'delete', ResignFile = 'resign', }
3,038
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/useDownload.ts
import { c } from 'ttag'; import { ReadableStream } from 'web-streams-polyfill'; import { queryFileRevision, queryFileRevisionThumbnail } from '@proton/shared/lib/api/drive/files'; import { DriveFileBlock, DriveFileRevisionResult, DriveFileRevisionThumbnailResult, } from '@proton/shared/lib/interfaces/drive/file'; import { streamToBuffer } from '../../utils/stream'; import { useDebouncedRequest } from '../_api'; import { useDriveCrypto } from '../_crypto'; import { DecryptedLink, SignatureIssues, useLink, useLinksListing } from '../_links'; import { ThumbnailType } from '../_uploads/media'; import { waitFor } from '../_utils'; import initDownloadPure, { initDownloadStream } from './download/download'; import initDownloadLinkFile from './download/downloadLinkFile'; import downloadThumbnailPure from './download/downloadThumbnail'; import { DecryptFileKeys, DownloadControls, DownloadEventCallbacks, DownloadStreamControls, LinkDownload, OnSignatureIssueCallback, Pagination, } from './interface'; /** * useDownload provides pure initDownload enhanced by retrieving information * about user's own folders and files from the app cache. If data is missing * in the app cache, it is downloaded from the server. */ export default function useDownload() { const debouncedRequest = useDebouncedRequest(); const { getVerificationKey } = useDriveCrypto(); const { getLink, getLinkPrivateKey, getLinkSessionKey, setSignatureIssues } = useLink(); const { loadChildren, getCachedChildren } = useLinksListing(); const getChildren = async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<DecryptedLink[]> => { await loadChildren(abortSignal, shareId, linkId, false, false); // Wait for all links to be loaded before getting them from cache await waitFor(() => !getCachedChildren(abortSignal, shareId, linkId).isDecrypting); const { links } = getCachedChildren(abortSignal, shareId, linkId); return links; }; const getBlocks = async ( abortSignal: AbortSignal, shareId: string, linkId: string, pagination: Pagination, revisionId?: string ): Promise<{ blocks: DriveFileBlock[]; thumbnailHashes: string[]; manifestSignature: string }> => { let link = await getLink(abortSignal, shareId, linkId); revisionId ||= link.activeRevision?.id; if (!revisionId) { throw new Error(`Invalid link metadata, expected file`); } const { Revision } = await debouncedRequest<DriveFileRevisionResult>( queryFileRevision(shareId, linkId, revisionId, pagination), abortSignal ); return { blocks: Revision.Blocks, // We sort hashes to have the Type 1 always at first place. This is necessary for signature verification. thumbnailHashes: Revision.Thumbnails.sort((a, b) => a.Type - b.Type).map((Thumbnail) => Thumbnail.Hash), manifestSignature: Revision.ManifestSignature, }; }; const getKeysWithSignatures = async ( abortSignal: AbortSignal, shareId: string, linkId: string, revisionId?: string ): Promise<[DecryptFileKeys, SignatureIssues?]> => { const [privateKey, sessionKey] = await Promise.all([ getLinkPrivateKey(abortSignal, shareId, linkId), getLinkSessionKey(abortSignal, shareId, linkId), ]); // Getting keys above might find signature issue. Lets get fresh link // after that (not in parallel) to have fresh signature issues on it. const link = await getLink(abortSignal, shareId, linkId); // We need to get address from the asked revision to prevent signature issues // This should be improved to prevent fetching the revision twice (see getBlocks) const revisionSignatureAddress = revisionId && revisionId !== link.activeRevision?.id ? await debouncedRequest<DriveFileRevisionResult>( queryFileRevision(shareId, linkId, revisionId), abortSignal ).then(({ Revision }) => Revision.SignatureAddress) : link.activeRevision?.signatureAddress; if (!sessionKey) { throw new Error('Session key missing on file link'); } if (!revisionSignatureAddress) { throw new Error('Signature address missing on file link'); } const addressPublicKeys = await getVerificationKey(revisionSignatureAddress); return [ { privateKey: privateKey, sessionKeys: sessionKey, addressPublicKeys, }, link.signatureIssues, ]; }; /** * getKeysUnsafe only returns keys without checking signature issues. * Use only on places when its keys signatures are not important. */ const getKeysUnsafe = async (abortSignal: AbortSignal, shareId: string, linkId: string) => { const [keys] = await getKeysWithSignatures(abortSignal, shareId, linkId); return keys; }; const getKeysGenerator = (onSignatureIssue?: OnSignatureIssueCallback) => { return async (abortSignal: AbortSignal, link: LinkDownload) => { const [keys, signatureIssues] = await getKeysWithSignatures( abortSignal, link.shareId, link.linkId, link.revisionId ); if (signatureIssues) { await onSignatureIssue?.(abortSignal, link, signatureIssues); } return keys; }; }; const initDownload = ( name: string, list: LinkDownload[], eventCallbacks: DownloadEventCallbacks ): DownloadControls => { return initDownloadPure(name, list, { getChildren, getBlocks, getKeys: getKeysGenerator(eventCallbacks.onSignatureIssue), ...eventCallbacks, onSignatureIssue: async (abortSignal, link, signatureIssues) => { await setSignatureIssues(abortSignal, link.shareId, link.linkId, signatureIssues); return eventCallbacks.onSignatureIssue?.(abortSignal, link, signatureIssues); }, }); }; const downloadStream = ( list: LinkDownload[], eventCallbacks?: DownloadEventCallbacks ): { controls: DownloadStreamControls; stream: ReadableStream<Uint8Array> } => { const controls = initDownloadStream(list, { getChildren, getBlocks, getKeys: getKeysGenerator(eventCallbacks?.onSignatureIssue), ...eventCallbacks, onSignatureIssue: async (abortSignal, link, signatureIssues) => { await setSignatureIssues(abortSignal, link.shareId, link.linkId, signatureIssues); return eventCallbacks?.onSignatureIssue?.(abortSignal, link, signatureIssues); }, }); const stream = controls.start(); return { controls, stream }; }; const downloadThumbnail = ( abortSignal: AbortSignal, shareId: string, linkId: string, url: string, token: string ) => { return downloadThumbnailPure(url, token, () => getKeysUnsafe(abortSignal, shareId, linkId)); }; const checkFirstBlockSignature = async ( abortSignal: AbortSignal, shareId: string, linkId: string, revisionId?: string ) => { const link = await getLink(abortSignal, shareId, linkId); if (!link.isFile) { return; } return new Promise<SignatureIssues | undefined>((resolve, reject) => { const controls = initDownloadLinkFile( { ...link, revisionId, shareId, }, { getChildren, getBlocks: (abortSignal) => getBlocks(abortSignal, shareId, linkId, { FromBlockIndex: 1, PageSize: 1 }, revisionId), getKeys: getKeysGenerator(), onError: reject, onNetworkError: reject, onSignatureIssue: async (abortSignal, _link, signatureIssues) => { // Ignore manifest as that needs to download the whole file. if (signatureIssues.manifest) { delete signatureIssues.manifest; if (Object.entries(signatureIssues).length === 0) { return; } } await setSignatureIssues(abortSignal, shareId, linkId, signatureIssues); resolve(signatureIssues); }, } ); abortSignal.addEventListener('abort', () => { controls.cancel(); }); streamToBuffer(controls.start()) .then(() => resolve(undefined)) .catch(reject); }); }; const getThumbnailFromBlobUrl = async (thumbnailUrl: string) => { return fetch(thumbnailUrl) .then((r) => r.blob()) .then((blob) => blob.stream() as ReadableStream<Uint8Array>) .then((buffer) => streamToBuffer(buffer)); }; const getPreviewThumbnail = async (abortSignal: AbortSignal, shareId: string, linkId: string) => { const { activeRevision, cachedThumbnailUrl } = await getLink(abortSignal, shareId, linkId); if (!activeRevision?.id) { throw new Error(c('Error').t`The original file has missing active revision`); } const res = (await debouncedRequest( queryFileRevisionThumbnail(shareId, linkId, activeRevision.id, ThumbnailType.HD_PREVIEW), abortSignal ).catch((err) => { if (err.data.Code === 2501) { if (cachedThumbnailUrl) { return; } return debouncedRequest( queryFileRevisionThumbnail(shareId, linkId, activeRevision.id, ThumbnailType.PREVIEW) ); } return err; })) as DriveFileRevisionThumbnailResult; if (!res && cachedThumbnailUrl) { return getThumbnailFromBlobUrl(cachedThumbnailUrl); } const thumbnail = await downloadThumbnail( abortSignal, shareId, linkId, res.ThumbnailBareURL, res.ThumbnailToken ); return thumbnail.contents; }; return { initDownload, downloadStream, downloadThumbnail, getPreviewThumbnail, checkFirstBlockSignature, }; }
3,039
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/usePublicDownload.ts
import { ReadableStream } from 'web-streams-polyfill'; import { querySharedURLFileRevision } from '@proton/shared/lib/api/drive/sharing'; import { DriveFileBlock } from '@proton/shared/lib/interfaces/drive/file'; import { SharedURLRevision, ThumbnailURLInfo } from '@proton/shared/lib/interfaces/drive/sharing'; import { usePublicSession } from '../_api'; import { DecryptedLink, useLink, usePublicLinksListing } from '../_links'; import initDownloadPure, { initDownloadStream } from './download/download'; import downloadThumbnailPure from './download/downloadThumbnail'; import { DownloadControls, DownloadEventCallbacks, DownloadStreamControls, LinkDownload, Pagination, } from './interface'; /** * usePublicDownload provides pure initDownload enhanced by retrieving * information about user's own folders and files from the app cache * similarly like useDownload for private app, but this one is for public * sessions. */ export default function usePublicDownload() { const { request } = usePublicSession(); const { getLinkPrivateKey, getLinkSessionKey } = useLink(); const { loadChildren, getCachedChildren } = usePublicLinksListing(); const getChildren = async (abortSignal: AbortSignal, token: string, linkId: string): Promise<DecryptedLink[]> => { await loadChildren(abortSignal, token, linkId, false); const { links } = getCachedChildren(abortSignal, token, linkId); return links; }; const getBlocks = async ( abortSignal: AbortSignal, token: string, linkId: string, pagination: Pagination ): Promise<{ blocks: DriveFileBlock[]; thumbnailHashes: string[]; manifestSignature: string }> => { const { Revision } = await request<{ Revision: SharedURLRevision }>( querySharedURLFileRevision(token, linkId, pagination), abortSignal ); return { blocks: Revision.Blocks, thumbnailHashes: Revision.Thumbnails.map((Thumbnail) => Thumbnail.Hash), manifestSignature: Revision.ManifestSignature, }; }; const getKeys = async (abortSignal: AbortSignal, token: string, linkId: string) => { const [privateKey, sessionKeys] = await Promise.all([ getLinkPrivateKey(abortSignal, token, linkId), getLinkSessionKey(abortSignal, token, linkId), ]); if (!sessionKeys) { throw new Error('Session key missing on file link'); } return { privateKey, sessionKeys, }; }; const initDownload = ( name: string, list: LinkDownload[], eventCallbacks: DownloadEventCallbacks ): DownloadControls => { return initDownloadPure(name, list, { getChildren, getBlocks, getKeys: (abortSignal: AbortSignal, link: LinkDownload) => getKeys( abortSignal, link.shareId, // Token in this context. link.linkId ), ...eventCallbacks, onSignatureIssue: undefined, }); }; const downloadThumbnail = async ( abortSignal: AbortSignal, token: string, linkId: string, params: ThumbnailURLInfo ) => { const privateKey = await getLinkPrivateKey(abortSignal, token, linkId); const sessionKey = await getLinkSessionKey(abortSignal, token, linkId); if (!privateKey || !sessionKey) { throw new Error('No keys found to decrypt the thumbnail'); } const { contents } = await downloadThumbnailPure(params.BareURL, params.Token, async () => ({ sessionKeys: sessionKey, privateKey, addressPublicKeys: [], })); return contents; }; const downloadStream = ( list: LinkDownload[], eventCallbacks?: DownloadEventCallbacks ): { controls: DownloadStreamControls; stream: ReadableStream<Uint8Array> } => { const controls = initDownloadStream(list, { getChildren, getBlocks, getKeys: async (abortSignal: AbortSignal, link: LinkDownload) => { return getKeys(abortSignal, link.shareId, link.linkId); }, ...eventCallbacks, }); const stream = controls.start(); return { controls, stream }; }; return { initDownload, downloadThumbnail, downloadStream, }; }
3,040
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/DownloadProvider.tsx
import { createContext, useContext } from 'react'; import * as React from 'react'; import { TransferProgresses } from '../../../components/TransferManager/transfer'; import { InitDownloadCallback, LinkDownload } from '../interface'; import { Download, DownloadLinksProgresses, UpdateFilter } from './interface'; import useDownload from './useDownloadProvider'; interface DownloadProviderState { downloads: Download[]; hasDownloads: boolean; download: (links: LinkDownload[]) => Promise<void>; pauseDownloads: (idOrFilter: UpdateFilter) => void; resumeDownloads: (idOrFilter: UpdateFilter) => void; cancelDownloads: (idOrFilter: UpdateFilter) => void; restartDownloads: (idOrFilter: UpdateFilter) => void; removeDownloads: (idOrFilter: UpdateFilter) => void; clearDownloads: () => void; getDownloadsProgresses: () => TransferProgresses; getDownloadsLinksProgresses: () => DownloadLinksProgresses; } const DownloadContext = createContext<DownloadProviderState | null>(null); export const DownloadProvider = ({ initDownload, children, }: { initDownload: InitDownloadCallback; children: React.ReactNode; }) => { const { downloads, hasDownloads, download, getProgresses, getLinksProgress, pauseDownloads, resumeDownloads, cancelDownloads, restartDownloads, removeDownloads, clearDownloads, downloadIsTooBigModal, signatureIssueModal, } = useDownload(initDownload); return ( <DownloadContext.Provider value={{ downloads, hasDownloads, download, pauseDownloads, resumeDownloads, cancelDownloads, restartDownloads, removeDownloads, clearDownloads, getDownloadsProgresses: getProgresses, getDownloadsLinksProgresses: getLinksProgress, }} > {children} {downloadIsTooBigModal} {signatureIssueModal} </DownloadContext.Provider> ); }; export const useDownloadProvider = (): DownloadProviderState => { const state = useContext(DownloadContext); if (!state) { throw new Error('Trying to use uninitialized DownloadProvider'); } return state; };
3,041
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/index.ts
export { DownloadProvider, useDownloadProvider } from './DownloadProvider';
3,042
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/interface.ts
import { VERIFICATION_STATUS } from '@proton/crypto'; import { TransferMeta, TransferState } from '../../../components/TransferManager/transfer'; import { LinkDownload } from '../interface'; export interface Download { // ID of the download for referencing (such as pausing and so on). id: string; links: LinkDownload[]; meta: TransferMeta; // To be compatible with Download of TransferManager. startDate: Date; state: TransferState; resumeState?: TransferState; // resumeState is set only when state is paused. error?: Error; // Signature link and status is set only when state is set to SignatureIssue. // Note that download can be of several links (for example the whole folder) // and only one of them can have signature issue. We need to know which one // exactly has the issue. signatureIssueLink?: LinkDownload; signatureStatus?: VERIFICATION_STATUS; } export interface DownloadProgresses { [downloadId: string]: { progress: number; links: DownloadLinksProgresses; }; } export interface DownloadLinksProgresses { [linkId: string]: { total?: number; progress: number; }; } export type UpdateFilter = string | ((params: UpdateCallbackParams) => boolean); export type UpdateState = TransferState | ((params: UpdateCallbackParams) => TransferState); export type UpdateCallback = (params: UpdateCallbackParams) => void; export type UpdateData = { size?: number; error?: Error; signatureIssueLink?: LinkDownload; signatureStatus?: VERIFICATION_STATUS; }; export type UpdateCallbackParams = { id: string; state: TransferState; resumeState?: TransferState; }; export class DownloadUserError extends Error { constructor(message: string) { super(message); this.name = 'DownloadUserError'; } }
3,043
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/useDownloadControl.test.ts
import { act, renderHook } from '@testing-library/react-hooks'; import { FILE_CHUNK_SIZE, SupportedMimeTypes } from '@proton/shared/lib/drive/constants'; import { TransferState } from '../../../components/TransferManager/transfer'; import { LinkDownload } from '../interface'; import { Download } from './interface'; import useDownloadControl from './useDownloadControl'; function makeDownload(id: string, state: TransferState, links: LinkDownload[], setSize = true): Download { return { id, startDate: new Date(), state, links, meta: { filename: links.length === 1 ? links[0].name : `My files.zip`, mimeType: links.length === 1 ? links[0].mimeType : SupportedMimeTypes.zip, size: setSize ? links.reduce((sum, link) => sum + link.size, 0) : undefined, }, }; } function makeDownloadLink(name: string, size = 2 * FILE_CHUNK_SIZE): LinkDownload { return { isFile: true, shareId: 'shareId', linkId: 'linkId', name, mimeType: 'text/plain', size, signatureAddress: 'address', }; } describe('useDownloadControl', () => { const mockUpdateWithCallback = jest.fn(); const mockRemoveFromQueue = jest.fn(); const mockClearQueue = jest.fn(); const testDownloads: Download[] = [ makeDownload('init', TransferState.Initializing, [makeDownloadLink('init.txt')]), makeDownload('pending', TransferState.Pending, [makeDownloadLink('pending.txt')]), makeDownload('progress', TransferState.Progress, [makeDownloadLink('progress.txt', 2 * FILE_CHUNK_SIZE + 42)]), makeDownload('progressMulti', TransferState.Progress, [ makeDownloadLink('progress1.txt'), makeDownloadLink('progress2.txt'), makeDownloadLink('progress3.txt'), ]), makeDownload('big', TransferState.Progress, [makeDownloadLink('big.txt', 100 * FILE_CHUNK_SIZE)]), makeDownload('done', TransferState.Done, [makeDownloadLink('done.txt')]), ]; beforeEach(() => { mockUpdateWithCallback.mockClear(); mockRemoveFromQueue.mockClear(); mockClearQueue.mockClear(); }); it('calculates download block load', () => { const { result: hook } = renderHook(() => useDownloadControl(testDownloads, mockUpdateWithCallback, mockRemoveFromQueue, mockClearQueue) ); const controls = { start: jest.fn(), pause: jest.fn(), resume: jest.fn(), cancel: jest.fn() }; act(() => { hook.current.add('progress', controls); hook.current.updateProgress('progress', ['linkId'], FILE_CHUNK_SIZE); expect(hook.current.calculateDownloadBlockLoad()).toBe( // 2 progress (one chunk done above, one and a bit to go) + 2*3 progressMulti + 100 big 2 + 6 + 100 ); }); }); it('does not calculate download block load', () => { const downloads = [ ...testDownloads, makeDownload('withoutSize', TransferState.Progress, [makeDownloadLink('withoutSize.txt')], false), ]; const { result: hook } = renderHook(() => useDownloadControl(downloads, mockUpdateWithCallback, mockRemoveFromQueue, mockClearQueue) ); const controls = { start: jest.fn(), pause: jest.fn(), resume: jest.fn(), cancel: jest.fn() }; act(() => { hook.current.add('progress', controls); hook.current.updateProgress('progress', ['linkId'], FILE_CHUNK_SIZE); expect(hook.current.calculateDownloadBlockLoad()).toBe(undefined); }); }); it('keeps link progresses', () => { const { result: hook } = renderHook(() => useDownloadControl(testDownloads, mockUpdateWithCallback, mockRemoveFromQueue, mockClearQueue) ); const controls = { start: jest.fn(), pause: jest.fn(), resume: jest.fn(), cancel: jest.fn() }; act(() => { hook.current.add('progress', controls); // Start some progress before we know size. hook.current.updateProgress('progress', ['linkId1'], 10); hook.current.updateProgress('progress', ['linkId2'], 20); hook.current.updateLinkSizes('progress', { linkId1: 12, linkId2: 34 }); // Continue some progress after we know size. hook.current.updateProgress('progress', ['linkId2'], 5); expect(hook.current.getLinksProgress()).toMatchObject({ linkId1: { progress: 10, total: 12 }, linkId2: { progress: 25, total: 34 }, }); }); }); });
3,044
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/useDownloadControl.ts
import { useCallback, useRef } from 'react'; import { FILE_CHUNK_SIZE } from '@proton/shared/lib/drive/constants'; import { TransferState } from '../../../components/TransferManager/transfer'; import { isTransferFinished, isTransferPending, isTransferProgress } from '../../../utils/transfer'; import { DownloadControls } from '../interface'; import { Download, DownloadLinksProgresses, DownloadProgresses, UpdateCallback, UpdateFilter, UpdateState, } from './interface'; export default function useDownloadControl( downloads: Download[], updateWithCallback: (idOrFilter: UpdateFilter, newState: UpdateState, callback: UpdateCallback) => void, removeFromQueue: (idOrFilter: UpdateFilter, callback: UpdateCallback) => void, clearQueue: () => void ) { // Controls keep references to ongoing downloads to have ability // to pause or cancel them. const controls = useRef<{ [id: string]: DownloadControls }>({}); const progresses = useRef<DownloadProgresses>({}); const add = (id: string, downloadControls: DownloadControls) => { controls.current[id] = downloadControls; progresses.current[id] = { progress: 0, links: {} }; }; const remove = (id: string) => { delete controls.current[id]; delete progresses.current[id]; }; const getLinkProgress = (id: string, linkId: string) => { if (!progresses.current[id].links[linkId]) { progresses.current[id].links[linkId] = { progress: 0, }; } return progresses.current[id].links[linkId]; }; const updateLinkSizes = (id: string, linkSizes: { [linkId: string]: number }) => { // Progress might be updated even when transfer is already finished and // thus progress is not here anymore. In such case it is OK to simply // ignore the call to not crash. if (progresses.current[id] === undefined) { return; } Object.entries(linkSizes || {}).forEach(([linkId, size]) => { getLinkProgress(id, linkId).total = size; }); }; const updateProgress = (id: string, linkIds: string[], increment: number) => { // Progress might be updated even when transfer is already finished and // thus progress is not here anymore. In such case it is OK to simply // ignore the call to not crash. if (progresses.current[id] === undefined) { return; } progresses.current[id].progress += increment; // Because increment can be float, some aritmetic operation can result // in -0.0000000001 which would be then displayed as -0 after rounding. if (progresses.current[id].progress < 0) { progresses.current[id].progress = 0; } linkIds.forEach((linkId) => (getLinkProgress(id, linkId).progress += increment)); }; const getProgresses = () => Object.fromEntries(Object.entries(progresses.current).map(([linkId, { progress }]) => [linkId, progress])); const getLinksProgress = (): DownloadLinksProgresses => Object.values(progresses.current).reduce((aggregatedLinks, { links }) => { // What if some link is downloaded more than once? // Probably very rare case which we can safely ignore. The worst case // scenario is that we show randomly progress of one download, but // thanks to browser caching it will not be that much off. return { ...aggregatedLinks, ...links }; }, {}); /** * calculateDownloadLoad returns based on progresses of ongoing downloads * how many data is currently being remaining to be downloaded. * If the size is not known yet (for example, folder which has not loaded * all children), it returns undefined. That should be taken as high load * and not start any other download for now. * Otherwise it returns number of remaining blocks of active transfers. */ const calculateDownloadBlockLoad = (): number | undefined => { const progressingDownloads = downloads.filter(isTransferProgress); if (progressingDownloads.some(({ meta: { size } }) => size === undefined)) { return undefined; } return progressingDownloads.reduce((sum: number, download) => { const downloadedSize = progresses.current[download.id]?.progress || 0; return sum + Math.ceil(((download.meta.size as number) - downloadedSize) / FILE_CHUNK_SIZE); }, 0); }; const pauseDownloads = useCallback( (idOrFilter: UpdateFilter) => { updateWithCallback(idOrFilter, TransferState.Paused, ({ id, state }) => { if (isTransferProgress({ state }) || isTransferPending({ state })) { (controls.current[id] as DownloadControls)?.pause?.(); } }); }, [updateWithCallback] ); const resumeDownloads = useCallback( (idOrFilter: UpdateFilter) => { updateWithCallback( idOrFilter, ({ resumeState }) => { return resumeState || TransferState.Progress; }, ({ id }) => { (controls.current[id] as DownloadControls)?.resume?.(); } ); }, [updateWithCallback] ); const cancelDownloads = useCallback( (idOrFilter: UpdateFilter) => { // Do not cancel already finished transfers. updateWithCallback( idOrFilter, ({ state }) => (isTransferFinished({ state }) ? state : TransferState.Canceled), ({ state, id }) => !isTransferFinished({ state }) && controls.current[id]?.cancel() ); }, [updateWithCallback] ); const removeDownloads = useCallback( (idOrFilter: UpdateFilter) => { // We should never simply remove downloads, but cancel it first, so // it does not continue on background without our knowledge. cancelDownloads(idOrFilter); removeFromQueue(idOrFilter, ({ id }) => remove(id)); }, [removeFromQueue] ); const clearDownloads = useCallback(() => { Object.entries(controls.current).map(([, downloadControls]) => downloadControls.cancel()); controls.current = {}; progresses.current = {}; clearQueue(); }, [clearQueue]); return { add, remove, updateLinkSizes, updateProgress, getProgresses, getLinksProgress, calculateDownloadBlockLoad, pauseDownloads, resumeDownloads, cancelDownloads, removeDownloads, clearDownloads, }; }
3,045
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/useDownloadProvider.tsx
import { useCallback, useEffect } from 'react'; import { c } from 'ttag'; import { useNotifications, useOnline, usePreventLeave } from '@proton/components'; import { HTTP_ERROR_CODES } from '@proton/shared/lib/errors'; import { TransferState } from '../../../components/TransferManager/transfer'; import { useDownloadIsTooBigModal } from '../../../components/modals/DownloadIsTooBigModal'; import { logError, sendErrorReport } from '../../../utils/errorHandling'; import { bufferToStream } from '../../../utils/stream'; import { isTransferCancelError, isTransferOngoing, isTransferPausedByConnection, isTransferProgress, } from '../../../utils/transfer'; import { SignatureIssues } from '../../_links'; import { MAX_DOWNLOADING_BLOCKS_LOAD } from '../constants'; import FileSaver from '../fileSaver/fileSaver'; import { InitDownloadCallback, LinkDownload } from '../interface'; import { UpdateFilter } from './interface'; import useDownloadControl from './useDownloadControl'; import useDownloadQueue from './useDownloadQueue'; import useDownloadSignatureIssue from './useDownloadSignatureIssue'; export default function useDownloadProvider(initDownload: InitDownloadCallback) { const onlineStatus = useOnline(); const { createNotification } = useNotifications(); const { preventLeave } = usePreventLeave(); const [downloadIsTooBigModal, showDownloadIsTooBigModal] = useDownloadIsTooBigModal(); const queue = useDownloadQueue(); const control = useDownloadControl(queue.downloads, queue.updateWithCallback, queue.remove, queue.clear); const { handleSignatureIssue, signatureIssueModal } = useDownloadSignatureIssue( queue.downloads, queue.updateState, queue.updateWithData, control.cancelDownloads ); /** * download should be considered as main entry point for download files * in Drive app. It does all necessary checks, such as checking if the * same files are not currently already downloading, and it adds transfer * to the queue. */ const download = async (links: LinkDownload[]) => { await queue.add(links).catch((err: any) => { if ((err as Error).name === 'DownloadUserError') { createNotification({ text: err.message, type: 'error', }); } else { createNotification({ text: c('Notification').t`Failed to download files: ${err}`, type: 'error', }); console.error(err); } }); }; const restartDownloads = useCallback( async (idOrFilter: UpdateFilter) => { queue.updateWithData(idOrFilter, TransferState.Pending); }, [queue.downloads, queue.updateState] ); // Effect to start next download if there is enough capacity to do so. useEffect(() => { const { nextDownload } = queue; if (!nextDownload) { return; } // If link contains the whole buffer already, there is no need to // calculate load or wait for anything else. It can be downloaded // right away. if (nextDownload.links.every(({ buffer }) => !!buffer)) { const buffer = nextDownload.links.flatMap(({ buffer }) => buffer); const stream = bufferToStream(buffer as Uint8Array[]); void preventLeave(FileSaver.saveAsFile(stream, nextDownload.meta)).catch(logError); queue.updateState(nextDownload.id, TransferState.Done); return; } const loadSizes = queue.downloads.filter(isTransferProgress).map((download) => download.meta.size); if (loadSizes.some((size) => size === undefined)) { return; } const load = control.calculateDownloadBlockLoad(); if (load === undefined || load > MAX_DOWNLOADING_BLOCKS_LOAD) { return; } // Set progress right away to not start the download more than once. queue.updateState(nextDownload.id, TransferState.Progress); const controls = initDownload(nextDownload.meta.filename, nextDownload.links, { onInit: (size: number, linkSizes: { [linkId: string]: number }) => { // Keep the previous state for cases when the download is paused. queue.updateWithData(nextDownload.id, ({ state }) => state, { size }); control.updateLinkSizes(nextDownload.id, linkSizes); if (FileSaver.isFileTooBig(size)) { void showDownloadIsTooBigModal({ onCancel: () => control.cancelDownloads(nextDownload.id) }); } }, onProgress: (linkIds: string[], increment: number) => { control.updateProgress(nextDownload.id, linkIds, increment); }, onNetworkError: (error: any) => { queue.updateWithData(nextDownload.id, TransferState.NetworkError, { error }); }, onSignatureIssue: async ( abortSignal: AbortSignal, link: LinkDownload, signatureIssues: SignatureIssues ) => { await handleSignatureIssue(abortSignal, nextDownload, link, signatureIssues); }, }); control.add(nextDownload.id, controls); void preventLeave( controls .start() .then(() => { queue.updateState(nextDownload.id, TransferState.Done); }) .catch((error: any) => { if (isTransferCancelError(error)) { queue.updateState(nextDownload.id, TransferState.Canceled); } else { queue.updateWithData(nextDownload.id, TransferState.Error, { error }); sendErrorReport(error); } // If the error is 429 (rate limited), we should not continue // with other downloads in the queue and fail fast, otherwise // it just triggers more strict jails and leads to nowhere. if (error?.status === HTTP_ERROR_CODES.TOO_MANY_REQUESTS) { control.cancelDownloads(isTransferOngoing); } }) .finally(() => { control.remove(nextDownload.id); }) ); }, [queue.nextDownload, queue.downloads]); useEffect(() => { if (onlineStatus) { const ids = queue.downloads.filter(isTransferPausedByConnection).map(({ id }) => id); control.resumeDownloads(({ id }) => ids.includes(id)); } }, [onlineStatus]); return { downloads: queue.downloads, hasDownloads: queue.hasDownloads, download, getProgresses: control.getProgresses, getLinksProgress: control.getLinksProgress, pauseDownloads: control.pauseDownloads, resumeDownloads: control.resumeDownloads, cancelDownloads: control.cancelDownloads, restartDownloads, removeDownloads: control.removeDownloads, clearDownloads: control.clearDownloads, downloadIsTooBigModal, signatureIssueModal, }; }
3,046
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/useDownloadQueue.test.ts
import { act, renderHook } from '@testing-library/react-hooks'; import { TransferState } from '../../../components/TransferManager/transfer'; import { LinkDownload } from '../interface'; import { Download, UpdateCallback, UpdateData, UpdateFilter, UpdateState } from './interface'; import useDownloadQueue from './useDownloadQueue'; function makeDownloadLink(name: string, isFile = true): LinkDownload { return { isFile, shareId: 'shareId', linkId: name, name, mimeType: isFile ? 'text/plain' : 'Folder', size: 1234, signatureAddress: 'address', }; } describe('useDownloadQueue', () => { let hook: { current: { downloads: Download[]; add: (links: LinkDownload[]) => Promise<void>; updateState: (idOrFilter: UpdateFilter, newStateOrCallback: UpdateState) => void; updateWithData: (idOrFilter: UpdateFilter, newStateOrCallback: UpdateState, data: UpdateData) => void; updateWithCallback: ( idOrFilter: UpdateFilter, newStateOrCallback: UpdateState, callback: UpdateCallback ) => void; remove: (idOrFilter: UpdateFilter, callback?: UpdateCallback) => void; }; }; let fileTransferId: string; let folderTransferId: string; let singleTransferIds: string[]; beforeEach(async () => { const { result } = renderHook(() => useDownloadQueue()); hook = result; await act(async () => { await hook.current.add([makeDownloadLink('file.txt')]); await hook.current.add([makeDownloadLink('folder', false)]); await hook.current.add([makeDownloadLink('file.txt'), makeDownloadLink('folder', false)]); }); fileTransferId = hook.current.downloads[0].id; folderTransferId = hook.current.downloads[1].id; singleTransferIds = [fileTransferId, folderTransferId]; }); it('adding same file transfer fails', async () => { await act(async () => { const promise = hook.current.add([makeDownloadLink('file.txt')]); await expect(promise).rejects.toThrowError('File "file.txt" is already downloading'); }); }); it('adding same folder transfer fails', async () => { await act(async () => { const promise = hook.current.add([makeDownloadLink('folder', false)]); await expect(promise).rejects.toThrowError('Folder "folder" is already downloading'); }); }); it('adding same files transfer fails', async () => { await act(async () => { const promise = hook.current.add([makeDownloadLink('file.txt'), makeDownloadLink('folder', false)]); await expect(promise).rejects.toThrowError('File selection is already downloading'); }); }); it('adding different transfer', async () => { await act(async () => { const promise = hook.current.add([makeDownloadLink('file2.txt')]); await expect(promise).resolves.toBe(undefined); }); expect(hook.current.downloads.length).toBe(4); }); it('updates state using id', () => { act(() => { hook.current.updateState(fileTransferId, TransferState.Canceled); }); expect(hook.current.downloads.map(({ state }) => state)).toMatchObject([ TransferState.Canceled, TransferState.Pending, TransferState.Pending, ]); }); it('updates state using filter', () => { act(() => { hook.current.updateState(({ id }) => singleTransferIds.includes(id), TransferState.Canceled); }); expect(hook.current.downloads.map(({ state }) => state)).toMatchObject([ TransferState.Canceled, TransferState.Canceled, TransferState.Pending, ]); }); it('updates state using callback', () => { act(() => { hook.current.updateState( () => true, ({ id }) => (id === fileTransferId ? TransferState.Error : TransferState.Canceled) ); }); expect(hook.current.downloads.map(({ state }) => state)).toMatchObject([ TransferState.Error, TransferState.Canceled, TransferState.Canceled, ]); }); it('updates state with data', () => { act(() => { hook.current.updateWithData(fileTransferId, TransferState.Error, { size: 42, error: new Error('nope'), signatureIssueLink: makeDownloadLink('name'), signatureStatus: 2, }); }); expect(hook.current.downloads[0]).toMatchObject({ state: TransferState.Error, error: new Error('nope'), meta: { filename: 'file.txt', mimeType: 'text/plain', size: 42, }, signatureIssueLink: makeDownloadLink('name'), signatureStatus: 2, }); }); it('updates state with callback', () => { const mockCallback = jest.fn(); act(() => { hook.current.updateWithCallback( ({ id }) => singleTransferIds.includes(id), TransferState.Progress, mockCallback ); }); expect(mockCallback.mock.calls).toMatchObject([ [{ meta: { filename: 'file.txt' } }], [{ meta: { filename: 'folder.zip' } }], ]); }); it('removes transfer from the queue using id', () => { const mockCallback = jest.fn(); act(() => { hook.current.remove(fileTransferId, mockCallback); }); expect(mockCallback.mock.calls).toMatchObject([[{ meta: { filename: 'file.txt' } }]]); expect(hook.current.downloads).toMatchObject([ { links: [{ linkId: 'folder' }], }, { links: [{ linkId: 'file.txt' }, { linkId: 'folder' }], }, ]); }); it('removes transfer from the queue using filter', () => { const mockCallback = jest.fn(); act(() => { hook.current.remove(({ id }) => singleTransferIds.includes(id), mockCallback); }); expect(mockCallback.mock.calls).toMatchObject([ [{ meta: { filename: 'file.txt' } }], [{ meta: { filename: 'folder.zip' } }], ]); expect(hook.current.downloads).toMatchObject([ { links: [{ linkId: 'file.txt' }, { linkId: 'folder' }], }, ]); }); });
3,047
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/useDownloadQueue.ts
import { useCallback, useMemo, useState } from 'react'; import { c } from 'ttag'; import { generateUID } from '@proton/components'; import { SupportedMimeTypes } from '@proton/shared/lib/drive/constants'; import { TransferMeta, TransferState } from '../../../components/TransferManager/transfer'; import { isTransferFinished, isTransferPending } from '../../../utils/transfer'; import { LinkDownload } from '../interface'; import { Download, DownloadUserError, UpdateCallback, UpdateCallbackParams, UpdateData, UpdateFilter, UpdateState, } from './interface'; export default function useDownloadQueue() { const [downloads, setDownloads] = useState<Download[]>([]); const hasDownloads = useMemo((): boolean => { return downloads.length > 0; }, [downloads]); const nextDownload = useMemo(() => { return downloads.find((download) => isTransferPending(download)); }, [downloads]); const add = useCallback(async (links: LinkDownload[]): Promise<void> => { return new Promise((resolve, reject) => { setDownloads((downloads) => { if (isAlreadyDownloading(downloads, links)) { reject(new DownloadUserError(generateAlreadyDownloadingError(links))); return downloads; } const download = generateDownload(links); resolve(); return [...downloads, download]; }); }); }, []); const update = useCallback( ( idOrFilter: UpdateFilter, newStateOrCallback: UpdateState, { size, error, signatureIssueLink, signatureStatus }: UpdateData = {}, callback?: UpdateCallback ) => { const filter = convertFilterToFunction(idOrFilter); const newStateCallback = convertNewStateToFunction(newStateOrCallback); const updateDownload = (download: Download): Download => { if (filter(download)) { callback?.(download); const newState = newStateCallback(download); // If pause is set twice, prefer resumeState set already before // to not be locked in paused state forever. download.resumeState = newState === TransferState.Paused ? download.resumeState || download.state : undefined; download.state = newState; if (size !== undefined) { download.meta.size = size; } download.error = error; download.signatureIssueLink = signatureIssueLink; download.signatureStatus = signatureStatus; } return download; }; setDownloads((downloads) => [...downloads.map(updateDownload)]); }, [] ); const updateState = useCallback( (idOrFilter: UpdateFilter, newStateOrCallback: UpdateState) => { update(idOrFilter, newStateOrCallback); }, [update] ); const updateWithData = useCallback( (idOrFilter: UpdateFilter, newStateOrCallback: UpdateState, data: UpdateData = {}) => { update(idOrFilter, newStateOrCallback, data); }, [update] ); const updateWithCallback = useCallback( (idOrFilter: UpdateFilter, newStateOrCallback: UpdateState, callback: UpdateCallback) => { update(idOrFilter, newStateOrCallback, {}, callback); }, [update] ); const remove = useCallback((idOrFilter: UpdateFilter, callback?: UpdateCallback) => { const filter = convertFilterToFunction(idOrFilter); const invertFilter: UpdateFilter = (item) => !filter(item); setDownloads((downloads) => { if (callback) { downloads.filter(filter).forEach((download) => callback(download)); } return [...downloads.filter(invertFilter)]; }); }, []); const clear = useCallback(() => { setDownloads([]); }, []); return { downloads, hasDownloads, nextDownload, add, updateState, updateWithData, updateWithCallback, remove, clear, }; } export function convertFilterToFunction(filterOrId: UpdateFilter) { return typeof filterOrId === 'function' ? filterOrId : ({ id }: UpdateCallbackParams) => id === filterOrId; } function convertNewStateToFunction(newStateOrCallback: UpdateState) { return typeof newStateOrCallback === 'function' ? newStateOrCallback : () => newStateOrCallback; } function isAlreadyDownloading(downloads: Download[], links: LinkDownload[]): boolean { return downloads.some((download) => { // User can download the same files again after previous one was // already finished, either canceled, failed, or downloaded. if (isTransferFinished(download)) { return false; } if (download.links.length !== links.length) { return false; } const ids = download.links.map((link) => link.shareId + link.linkId); return links.every((link) => ids.includes(link.shareId + link.linkId)); }); } function generateAlreadyDownloadingError(links: LinkDownload[]): string { if (links.length > 1) { return c('Error').t`File selection is already downloading`; } const { name } = links[0]; if (!links[0].isFile) { return c('Error').t`Folder "${name}" is already downloading`; } return c('Error').t`File "${name}" is already downloading`; } function generateDownload(links: LinkDownload[]): Download { return { id: generateUID(), startDate: new Date(), state: TransferState.Pending, links, meta: generateDownloadMeta(links), }; } function generateDownloadMeta(links: LinkDownload[]): TransferMeta { if (links.length === 1) { const link = links[0]; if (link.isFile) { return { filename: link.name, mimeType: link.mimeType, size: link.size, }; } return { filename: `${link.name}.zip`, mimeType: SupportedMimeTypes.zip, }; } return { filename: generateMyFilesName(), mimeType: SupportedMimeTypes.zip, }; } function generateMyFilesName(): string { const date = new Date().toISOString().substring(0, 19); // translator: Name of the download archive when selected multiple files, example: My files 2021-10-11T12:13:14.zip. return c('Title').t`Download ${date}.zip`; }
3,048
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/DownloadProvider/useDownloadSignatureIssue.tsx
import { useCallback, useEffect, useRef, useState } from 'react'; import { TransferCancel, TransferState } from '../../../components/TransferManager/transfer'; import { useSignatureIssueModal } from '../../../components/modals/SignatureIssueModal'; import { waitUntil } from '../../../utils/async'; import { isTransferActive, isTransferSignatureIssue } from '../../../utils/transfer'; import { SignatureIssues } from '../../_links'; import { LinkDownload, TransferSignatureIssueStrategy } from '../interface'; import { Download, UpdateData, UpdateFilter, UpdateState } from './interface'; // Empty string is ensured to not conflict with any upload ID or folder name. // No upload has empty ID. const SIGNATURE_ISSUE_STRATEGY_ALL_ID = ''; export default function useDownloadSignatureIssue( downloads: Download[], updateState: (filter: UpdateFilter, newState: UpdateState) => void, updateWithData: (filter: UpdateFilter, newState: UpdateState, data: UpdateData) => void, cancelDownloads: (filter: UpdateFilter) => void ) { const [signatureIssueModal, showSignatureIssueModal] = useSignatureIssueModal(); // There should be only one modal to choose conflict strategy. const isSignatureIssueModalOpen = useRef(false); // Signature issue strategy is set per download and link as one download // (e.g., folder) can contain many files and we need to report on every // single one. If user wants to apply the chosen strategy for all cases, // SIGNATURE_ISSUE_STRATEGY_ALL_ID key is used. // Strategies are cleared once all downloads are finished so user is asked // again (consider that user could do another download after an hour). const signatureIssueStrategies = useRef<{ [downloadAndLinkId: string]: TransferSignatureIssueStrategy }>({}); // List of all issues which needs to be handled. const [signatureIssues, setSignatureIssues] = useState< { download: Download; link: LinkDownload; linkSignatureIssues: SignatureIssues }[] >([]); useEffect(() => { // "Apply to all" should be active till the last transfer is active. // Once all transfers finish, user can start another minutes or hours // later and that means we should ask again. const hasNoActiveUpload = !downloads.find(isTransferActive); if (hasNoActiveUpload) { signatureIssueStrategies.current = {}; } }, [downloads]); /** * handleSignatureIssue either returns the strategy right away, or it sets * the state of the download to signature issue which will open * SignatureIssueModal to ask user what to do next. Handler waits till the * user selects the strategy. */ const handleSignatureIssue = useCallback( ( abortSignal: AbortSignal, download: Download, link: LinkDownload, signatureIssues: SignatureIssues ): Promise<TransferSignatureIssueStrategy> => { const getStrategy = (): TransferSignatureIssueStrategy | undefined => { return ( signatureIssueStrategies.current[SIGNATURE_ISSUE_STRATEGY_ALL_ID] || signatureIssueStrategies.current[download.id + link.linkId] ); }; const strategy = getStrategy(); if (strategy) { return Promise.resolve(strategy); } setSignatureIssues((issues) => [...issues, { download, link, linkSignatureIssues: signatureIssues }]); updateWithData(download.id, TransferState.SignatureIssue, {}); return new Promise((resolve, reject) => { waitUntil(() => !!getStrategy(), abortSignal) .then(() => { const strategy = getStrategy() as TransferSignatureIssueStrategy; resolve(strategy); }) .catch(() => { reject(new TransferCancel({ message: 'Download was canceled' })); }); }); }, [updateState] ); const openSignatureIssueModal = ( downloadId: string, linkId: string, params: { isFile: boolean; name: string; downloadName: string; signatureIssues: SignatureIssues; signatureAddress?: string; } ) => { isSignatureIssueModalOpen.current = true; const apply = (strategy: TransferSignatureIssueStrategy, all: boolean) => { isSignatureIssueModalOpen.current = false; signatureIssueStrategies.current[all ? SIGNATURE_ISSUE_STRATEGY_ALL_ID : downloadId + linkId] = strategy; if (all) { setSignatureIssues([]); if (strategy === TransferSignatureIssueStrategy.Abort) { cancelDownloads(isTransferSignatureIssue); return; } updateState(isTransferSignatureIssue, TransferState.Progress); return; } if (strategy === TransferSignatureIssueStrategy.Abort) { setSignatureIssues((signatureIssues) => signatureIssues.filter((issue) => issue.download.id !== downloadId) ); cancelDownloads(downloadId); return; } setSignatureIssues((signatureIssues) => { const newSignatureIssues = signatureIssues.filter( (issue) => issue.download.id !== downloadId || issue.link.linkId !== linkId ); if (!newSignatureIssues.some((issue) => issue.download.id === downloadId)) { updateState(downloadId, TransferState.Progress); } return newSignatureIssues; }); }; const cancelAll = () => { isSignatureIssueModalOpen.current = false; signatureIssueStrategies.current[SIGNATURE_ISSUE_STRATEGY_ALL_ID] = TransferSignatureIssueStrategy.Abort; setSignatureIssues([]); cancelDownloads(isTransferActive); }; void showSignatureIssueModal({ apply, cancelAll, ...params }); }; // Modals are openned on this one place only to not have race condition // issue and ensure only one modal is openned. useEffect(() => { if (isSignatureIssueModalOpen.current || !signatureIssues.length) { return; } const { download, link, linkSignatureIssues } = signatureIssues[0]; openSignatureIssueModal(download.id, link.linkId, { isFile: link.isFile, name: link.name, downloadName: download.meta.filename, signatureIssues: linkSignatureIssues, signatureAddress: link.signatureAddress, }); }, [signatureIssues]); return { handleSignatureIssue, signatureIssueModal, }; }
3,049
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/archiveGenerator.test.ts
import { fromUnixTime } from 'date-fns'; import { ReadableStream } from 'web-streams-polyfill'; import { asyncGeneratorToArray } from '../../../utils/test/generator'; import ArchiveGenerator from './archiveGenerator'; type TestLink = { isFile: boolean; name: string; fileModifyTime?: number; path?: string[]; expectedName?: string; expectedPath?: string; expectedLastModified?: Date; }; async function* generateLinks(links: TestLink[]) { for (let link of links) { if (link.isFile) { yield { isFile: link.isFile, name: link.name, parentPath: link.path || [], fileModifyTime: link.fileModifyTime, stream: new ReadableStream<Uint8Array>(), }; } else { yield { isFile: link.isFile, name: link.name, parentPath: link.path || [], }; } } } describe('ArchiveGenerator', () => { beforeEach(() => { jest.clearAllMocks(); }); const checkWritingLinks = async (links: TestLink[]) => { const archiver = new ArchiveGenerator(); const transformedLinks = await asyncGeneratorToArray(archiver.transformLinksToZipItems(generateLinks(links))); expect(transformedLinks).toMatchObject( links.map((link) => { const path = link.expectedPath || (link.path || []).join('/'); const fileName = link.expectedName || link.name; const name = path ? `${path}/${fileName}` : fileName; const lastModified = link.expectedLastModified || (link.fileModifyTime && fromUnixTime(link.fileModifyTime)); return link.isFile ? { name, input: expect.anything(), lastModified, } : { name, }; }) ); }; it('generates two files and one folder in the root', async () => { await checkWritingLinks([ { isFile: true, name: 'Hello.txt', fileModifyTime: 1692780131 }, { isFile: true, name: 'World.txt', fileModifyTime: 1692780009, expectedLastModified: new Date('2023-08-23T08:40:09.000Z'), }, { isFile: false, name: 'dir' }, ]); }); it('generates two files in the folder', async () => { await checkWritingLinks([ { isFile: false, name: 'dir' }, { isFile: true, name: 'Hello.txt', path: ['dir'] }, { isFile: true, name: 'World.txt', path: ['dir'] }, ]); }); it('generates two files with the same name', async () => { await checkWritingLinks([ { isFile: true, name: 'file.txt' }, { isFile: true, name: 'file.txt', expectedName: 'file (1).txt' }, ]); }); it('generates two folder with the same name', async () => { await checkWritingLinks([ { isFile: false, name: 'dir' }, { isFile: false, name: 'dir', expectedName: 'dir (1)' }, ]); }); it('generates file and folder with the same name', async () => { await checkWritingLinks([ { isFile: true, name: 'name' }, { isFile: false, name: 'name', expectedName: 'name (1)' }, ]); }); // If file is written first, then folder is renamed. it('generates file and folder with the same name in the folder', async () => { await checkWritingLinks([ { isFile: false, name: 'dir' }, { isFile: true, name: 'name', path: ['dir'] }, { isFile: false, name: 'name', path: ['dir'], expectedName: 'name (1)' }, { isFile: false, name: 'subfolder', path: ['dir', 'name'], expectedPath: 'dir/name (1)' }, ]); }); // If folder is written first, then file is renamed. it('generates folder and file with the same name in the folder', async () => { await checkWritingLinks([ { isFile: false, name: 'dir' }, { isFile: false, name: 'name', path: ['dir'] }, { isFile: true, name: 'name', path: ['dir'], expectedName: 'name (1)' }, { isFile: false, name: 'subfolder', path: ['dir', 'name'] }, ]); }); it('generates many files with with the same name but different case', async () => { await checkWritingLinks([ { isFile: true, name: 'file.txt' }, { isFile: true, name: 'File.txt', expectedName: 'File (1).txt' }, { isFile: true, name: 'FILE.txt', expectedName: 'FILE (2).txt' }, { isFile: true, name: 'file.TXT', expectedName: 'file (3).TXT' }, { isFile: true, name: 'FILE.TXT', expectedName: 'FILE (4).TXT' }, { isFile: true, name: 'File.Txt', expectedName: 'File (5).Txt' }, { isFile: true, name: 'FilE.TxT', expectedName: 'FilE (6).TxT' }, ]); }); });
3,050
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/archiveGenerator.ts
import { makeZip } from 'client-zip'; import { fromUnixTime } from 'date-fns'; import { ReadableStream, TransformStream } from 'web-streams-polyfill'; import { isWindows } from '@proton/shared/lib/helpers/browser'; import { adjustName, adjustWindowsLinkName, splitLinkName } from '../../_links'; import { StartedNestedLinkDownload } from './interface'; function getPathString(path: string[]): string { return path.length > 0 ? `/${path.join('/')}` : ''; } /** * Archive iterates over provided generator of folders and file streams which * are written into the archive stream. */ export default class ArchiveGenerator { stream: ReadableStream<Uint8Array>; private writer: WritableStream<Uint8Array>; private canceled: boolean; private includedFiles: { path: string; name: string }[]; private includedFolderPaths: Set<string>; private originalToAdjustedPath: Map<string, string>; constructor() { let { readable, writable } = new TransformStream(); this.stream = readable; this.writer = writable; this.canceled = false; this.includedFiles = []; this.includedFolderPaths = new Set(); this.originalToAdjustedPath = new Map(); } async writeLinks(links: AsyncGenerator<StartedNestedLinkDownload>) { const zipStream = makeZip(this.transformLinksToZipItems(links), { buffersAreUTF8: true, }); await zipStream.pipeTo(this.writer); } async *transformLinksToZipItems(links: AsyncGenerator<StartedNestedLinkDownload>) { for await (const link of links) { if (this.canceled) { return; } if (link.isFile) { const name = this.adjustFilePath(link.parentPath, link.name); yield { name: name.slice(1), // Windows doesn't like leading root slash. input: link.stream, lastModified: link.fileModifyTime && fromUnixTime(link.fileModifyTime), }; } else { const name = this.adjustFolderPath(link.parentPath, link.name); yield { name: name.slice(1), // Windows doesn't like leading root slash. }; } } } private adjustFolderPath(path: string[], name: string): string { const pathString = getPathString(path); const fullPath = `${pathString}/${name}`; const parentPath = this.originalToAdjustedPath.get(pathString) || ''; const fixedName = isWindows() ? adjustWindowsLinkName(name) : name; const deduplicate = (index = 0): string => { const adjustedName = `${adjustName(index, fixedName)}`; const adjustedPath = `${parentPath}/${adjustedName}`; if ( this.includedFiles.some( (file) => file.path === parentPath && file.name.toLowerCase() === adjustedName.toLowerCase() ) || this.includedFolderPaths.has(adjustedPath.toLowerCase()) ) { return deduplicate(index + 1); } this.originalToAdjustedPath.set(fullPath, adjustedPath); this.includedFolderPaths.add(adjustedPath.toLowerCase()); return adjustedPath; }; return deduplicate(); } private adjustFilePath(path: string[], name: string) { const pathString = getPathString(path); const parentPath = this.originalToAdjustedPath.get(pathString) || ''; const fixedName = isWindows() ? adjustWindowsLinkName(name) : name; const [namePart, extension] = splitLinkName(fixedName); const deduplicate = (index = 0): string => { const adjustedName = adjustName(index, namePart, extension); if ( this.includedFiles.some( (file) => file.path === parentPath && file.name.toLowerCase() === adjustedName.toLowerCase() ) || this.includedFolderPaths.has(`${parentPath}/${adjustedName}`) ) { return deduplicate(index + 1); } this.includedFiles.push({ path: parentPath, name: adjustedName }); return `${parentPath}/${adjustedName}`; }; return deduplicate(); } cancel() { this.canceled = true; } }
3,051
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/concurrentIterator.test.ts
import { FILE_CHUNK_SIZE } from '@proton/shared/lib/drive/constants'; import { wait } from '@proton/shared/lib/helpers/promise'; import { asyncGeneratorToArray } from '../../../utils/test/generator'; import { MAX_DOWNLOADING_BLOCKS_LOAD, MAX_DOWNLOADING_FILES_LOAD } from '../constants'; import { DownloadCallbacks } from '../interface'; import ConcurrentIterator from './concurrentIterator'; import { NestedLinkDownload, StartedNestedLinkDownload } from './interface'; const mockDownloadLinkFile = jest.fn(); jest.mock('./downloadLinkFile', () => { return (...args: any[]) => { return mockDownloadLinkFile(...args); }; }); async function* generateLinks(count: number, size = 123) { for (let i = 0; i < count; i++) { yield { isFile: true, size, } as NestedLinkDownload; } } describe('ConcurrentIterator', () => { const mockStart = jest.fn(); const mockCancel = jest.fn(); beforeEach(() => { jest.clearAllMocks(); mockDownloadLinkFile.mockReturnValue({ start: mockStart, cancel: mockCancel, }); }); const checkPausingGeneratingLinks = async ( generator: AsyncGenerator<StartedNestedLinkDownload>, callsBeforePause: number, totalCalls: number ) => { // Start consuming so we it can generate links and hit the pause. const linksPromise = asyncGeneratorToArray(generator); await wait(500); // 500ms should be enough to generate first batch. // Now its in waiting mode and it waits to finish ongoing downloads. expect(mockDownloadLinkFile).toBeCalledTimes(callsBeforePause); expect(mockStart).toBeCalledTimes(callsBeforePause); mockDownloadLinkFile.mock.calls.forEach(([link, { onProgress, onFinish }]) => { onProgress('linkId', link.size); onFinish(); }); // After finishing the previous batch, wait to generate the rest. // Now without pausing because we don't have more than "two pages". const links = await linksPromise; expect(links.length).toBe(totalCalls); expect(mockDownloadLinkFile).toBeCalledTimes(totalCalls); expect(mockStart).toBeCalledTimes(totalCalls); }; it('pauses when reaching MAX_DOWNLOADING_FILES_LOAD', async () => { const c = new ConcurrentIterator(); const g = c.iterate(generateLinks(MAX_DOWNLOADING_FILES_LOAD * 2), {} as DownloadCallbacks); await checkPausingGeneratingLinks(g, MAX_DOWNLOADING_FILES_LOAD, MAX_DOWNLOADING_FILES_LOAD * 2); }); it('pauses when reaching MAX_DOWNLOADING_BLOCKS_LOAD', async () => { const bigFileSize = FILE_CHUNK_SIZE * MAX_DOWNLOADING_BLOCKS_LOAD * 2; const c = new ConcurrentIterator(); const g = c.iterate(generateLinks(2, bigFileSize), {} as DownloadCallbacks); await checkPausingGeneratingLinks(g, 1, 2); }); it('cancels from pause', async () => { const bigFileSize = FILE_CHUNK_SIZE * MAX_DOWNLOADING_BLOCKS_LOAD * 2; const c = new ConcurrentIterator(); const g = c.iterate(generateLinks(5, bigFileSize), {} as DownloadCallbacks); // Start consuming so we it can generate links and hit the pause. const linksPromise = asyncGeneratorToArray(g); await wait(500); // 500ms should be enough to generate first batch. c.cancel(); const links = await linksPromise; expect(links.length).toBe(1); expect(mockDownloadLinkFile).toBeCalledTimes(1); expect(mockStart).toBeCalledTimes(1); expect(mockCancel).toBeCalledTimes(1); }); });
3,052
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/concurrentIterator.ts
import { generateUID } from '@proton/components'; import { FILE_CHUNK_SIZE } from '@proton/shared/lib/drive/constants'; import { waitUntil } from '../../../utils/async'; import { MAX_DOWNLOADING_BLOCKS_LOAD, MAX_DOWNLOADING_FILES_LOAD } from '../constants'; import { DownloadCallbacks, DownloadStreamControls } from '../interface'; import initDownloadLinkFile from './downloadLinkFile'; import { NestedLinkDownload, StartedNestedLinkDownload } from './interface'; /** * ConcurrentIterator iterates over provided generator of links and starts * download of files in concurrent fashion. */ export default class ConcurrentIterator { private paused: boolean; private canceled: boolean; private fileControlers: Map<string, DownloadStreamControls>; private loadSize: number; constructor() { this.paused = false; this.canceled = false; this.fileControlers = new Map(); this.loadSize = 0; } async *iterate( links: AsyncGenerator<NestedLinkDownload>, callbacks: DownloadCallbacks ): AsyncGenerator<StartedNestedLinkDownload> { for await (const link of links) { if (this.paused) { await waitUntil(() => !this.paused); } if (this.canceled) { return; } if (!link.isFile) { yield link as StartedNestedLinkDownload; } else { await waitUntil( () => (this.loadSize < FILE_CHUNK_SIZE * MAX_DOWNLOADING_BLOCKS_LOAD && this.fileControlers.size < MAX_DOWNLOADING_FILES_LOAD) || this.canceled ); if (this.canceled) { return; } const uniqueId = generateUID(); const controls = initDownloadLinkFile(link, { ...callbacks, // onInit and onFinish are ignored per file when downloading // multiple files - we care only about total onInit or onFinish. onInit: undefined, onProgress: (linkIds: string[], bytes: number) => { callbacks.onProgress?.([...link.parentLinkIds, ...linkIds], bytes); this.loadSize -= bytes; }, onFinish: () => { this.fileControlers.delete(uniqueId); }, }); this.loadSize += link.size; const stream = controls.start(); this.fileControlers.set(uniqueId, controls); yield { ...link, stream, }; } } } pause() { this.paused = true; this.fileControlers.forEach((controls) => controls.pause()); } resume() { this.paused = false; this.fileControlers.forEach((controls) => controls.resume()); } cancel() { this.canceled = true; this.paused = false; // To unpause waits and terminate operation. this.fileControlers.forEach((controls) => controls.cancel()); } }
3,053
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/download.ts
import { SupportedMimeTypes } from '@proton/shared/lib/drive/constants'; import { TransferCancel } from '../../../components/TransferManager/transfer'; import fileSaver from '../fileSaver/fileSaver'; import { DownloadCallbacks, DownloadControls, DownloadStreamControls, LinkDownload } from '../interface'; import initDownloadLinkFile from './downloadLinkFile'; import initDownloadLinkFolder from './downloadLinkFolder'; import initDownloadLinks from './downloadLinks'; /** * SIZE_WAIT_TIME limits how long wait before initializing download by browser. * The download is already progressing during that time, it just waits for info * to properly decide whether to use streaming or buffering solution. * 5 seconds is probably reasonable number to have plenty of time to prefer * buffer solution (more probably faster one) in most cases, but not waiting * too long to not buffer too much of data in memory (to fit in memory), and * also in case of using streaming solution to show the download more quickly * to user (to have nicer UX). // See SizeTimeoutPromise for more information. */ const SIZE_WAIT_TIME = 5000; // ms /** * initDownload prepares controls for downloading either file, folder or set * of files, based on the provided links. * One file link: the file is downloaded directly. * One folder link: archive is generated, all childs are in the root. * Many links: archive is generated, all files are in the root. */ export default function initDownload( name: string, links: LinkDownload[], callbacks: DownloadCallbacks ): DownloadControls { let gotErr: any; const sizePromise = new SizeTimeoutPromise(SIZE_WAIT_TIME); const controls = getControls(links, { ...callbacks, onInit: (size: number, linkSizes) => { callbacks.onInit?.(size, linkSizes); sizePromise.set(size); }, onError: (err: any) => { callbacks.onError?.(err); gotErr = err; }, }); return { ...controls, start: async () => { const stream = controls.start(); const size = await sizePromise.get().catch(() => undefined); await fileSaver .saveAsFile(stream, { filename: name, mimeType: links.length === 1 ? links[0].mimeType : SupportedMimeTypes.zip, size, }) .catch((err) => { callbacks.onError?.(err); controls.cancel(); throw err; }); if (gotErr) { throw gotErr; } callbacks.onFinish?.(); }, cancel: () => { gotErr = new TransferCancel({ message: `Transfer canceled` }); controls.cancel(); }, }; } export function initDownloadStream(links: LinkDownload[], callbacks: DownloadCallbacks) { return getControls(links, callbacks); } function getControls(links: LinkDownload[], callbacks: DownloadCallbacks): DownloadStreamControls { if (links.length === 1) { const link = links[0]; if (link.isFile) { return initDownloadLinkFile(link, callbacks); } return initDownloadLinkFolder(link, callbacks); } return initDownloadLinks(links, callbacks); } /** * Loading deep folder structure can take awhile, but we don't want to wait * till the whole structure is loaded before we start actually downloading * the files. The issue is, FileSaver needs to know the size to properly * decide whether to use service worker (for huge files) or simpler buffer * solution (for small files). We can say if the size is not known in some * reasonable time, then the structure is huge and archive probably big. * If the size is not provided, service worker solution is used by default. * There is still fallback if service worker is not working, so this is good * enough precision. */ class SizeTimeoutPromise { promise: Promise<number>; resolve?: (value: number) => void; constructor(timeout: number) { let reject: (err?: any) => void; this.promise = new Promise((_resolve, _reject) => { this.resolve = _resolve; reject = _reject; }); setTimeout(() => reject(), timeout); } set(size: number) { this.resolve?.(size); } async get() { return this.promise; } }
3,054
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadBlock.test.js
import downloadBlock from './downloadBlock'; describe('download block', () => { it('waits specified time when rate limited', async () => { const mockFetch = jest.fn(() => { // Fail only once. if (mockFetch.mock.calls.length === 1) { return Promise.resolve({ status: 429, response: { headers: new Headers({ 'retry-after': '1' }), }, }); } return Promise.resolve({ body: new ReadableStream({ start(controller) { controller.enqueue([42]); }, }), }); }); global.fetch = mockFetch; await downloadBlock(new AbortController(), 'url', 'token'); expect(mockFetch).toBeCalledTimes(2); }); });
3,055
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadBlock.ts
import { createReadableStreamWrapper } from '@mattiasbuelens/web-streams-adapter'; import { ReadableStream } from 'web-streams-polyfill'; import { retryHandler } from '@proton/shared/lib/api/helpers/retryHandler'; import { getClientID } from '@proton/shared/lib/apps/helper'; import { HTTP_STATUS_CODE } from '@proton/shared/lib/constants'; import { DOWNLOAD_RETRIES_ON_TIMEOUT, DOWNLOAD_TIMEOUT, RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import { HTTP_ERROR_CODES } from '@proton/shared/lib/errors'; import { createApiError, createOfflineError } from '@proton/shared/lib/fetch/ApiError'; import { getAppVersionHeaders } from '@proton/shared/lib/fetch/headers'; import { APP_NAME, APP_VERSION } from '../../../config'; import { MAX_TOO_MANY_REQUESTS_WAIT } from '../constants'; // Stream wrapper has outdated types // @ts-expect-error const toPolyfillReadable = createReadableStreamWrapper(ReadableStream); export default async function downloadBlock( abortController: AbortController, url: string, token: string ): Promise<ReadableStream<Uint8Array>> { const doFetch = async (attempt = 0): Promise<Response> => { let isTimeout = false; const timeoutController = new AbortController(); const timeoutHandle = setTimeout(() => { isTimeout = true; abortController.abort(); }, DOWNLOAD_TIMEOUT); const signalAbortHandle = () => { timeoutController.abort(); clearTimeout(timeoutHandle); }; abortController.signal.addEventListener('abort', signalAbortHandle); const cleanListeners = () => { clearTimeout(timeoutHandle); abortController.signal.removeEventListener('abort', signalAbortHandle); }; return fetch(url, { signal: abortController.signal, method: 'get', credentials: 'omit', headers: { 'pm-storage-token': token, ...getAppVersionHeaders(getClientID(APP_NAME), APP_VERSION), }, }) .then((result) => { cleanListeners(); return result; }) .catch((err: any) => { // Do not move to finally block. We need to clear it before // another fetch attempt is called to not abort it by accident. cleanListeners(); if (isTimeout && attempt < DOWNLOAD_RETRIES_ON_TIMEOUT) { return doFetch(attempt + 1); } if (err.name === 'AbortError') { throw err; } throw createOfflineError({}); }); }; const response = await doFetch(); // Download can be rate limited. Lets wait defined time by server // before making another attempt. if (response.status === HTTP_ERROR_CODES.TOO_MANY_REQUESTS) { return retryHandler({ response } as any, MAX_TOO_MANY_REQUESTS_WAIT).then(() => downloadBlock(abortController, url, token) ); } if (!response.body) { throw Error(`Response has no data`); } if (response.status === HTTP_STATUS_CODE.NOT_FOUND) { throw createApiError( 'Block not found', response, {}, { Code: RESPONSE_CODE.NOT_FOUND, } ); } return toPolyfillReadable(response.body) as ReadableStream<Uint8Array>; }
3,056
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadBlocks.test.ts
import { ReadableStream } from 'web-streams-polyfill'; import { createApiError, createOfflineError } from '@proton/shared/lib/fetch/ApiError'; import { DriveFileBlock } from '@proton/shared/lib/interfaces/drive/file'; import mergeUint8Arrays from '@proton/utils/mergeUint8Arrays'; import { TransferCancel } from '../../../components/TransferManager/transfer'; import { streamToBuffer } from '../../../utils/stream'; import * as constants from '../constants'; import initDownloadBlocks from './downloadBlocks'; const createNotFoundError = () => createApiError('Error', { status: 404, statusText: 'Not found.' } as Response, {}); const TIME_TO_RESET_RETRIES_LOCAL = 50; // Milliseconds. jest.mock('../constants'); const mockConstants = constants as jest.MockedObject<typeof constants>; let offlineURL = ''; let expiredURL = ''; let responseDelay = 0; const createStreamResponse = (chunks: number[][]) => new ReadableStream<Uint8Array>({ start(ctrl) { chunks.forEach((data) => ctrl.enqueue(new Uint8Array(data))); ctrl.close(); }, }); const createGetBlocksResponse = (blocks: DriveFileBlock[], manifestSignature = '') => { return { blocks, manifestSignature, thumbnailHashes: [''], }; }; const mockTransformBlockStream = async (abortSignal: AbortSignal, stream: ReadableStream<Uint8Array>) => { return { hash: [] as unknown as Uint8Array, data: stream, }; }; const mockDownloadBlock = jest.fn( (abortController: AbortController, url: string): Promise<ReadableStream<Uint8Array>> => { return new Promise((resolve, reject) => { setTimeout(() => { if (url === offlineURL) { reject(createOfflineError({})); return; } if (url === expiredURL) { reject(createNotFoundError()); return; } const response = { 'url:1': createStreamResponse([[1, 2], [3]]), 'url:2': createStreamResponse([[4], [5, 6]]), 'url:3': createStreamResponse([[7, 8, 9]]), 'url:4': createStreamResponse([[10, 11]]), }[url]; if (!response) { reject(new Error(`Unexpected url "${url}"`)); return; } resolve(response); }, responseDelay); }); } ); describe('initDownload', () => { beforeAll(() => { jest.spyOn(global.console, 'warn').mockReturnValue(); }); beforeEach(() => { mockDownloadBlock.mockClear(); mockConstants.TIME_TO_RESET_RETRIES = TIME_TO_RESET_RETRIES_LOCAL; responseDelay = 0; }); it('should download data from remote server using block metadata', async () => { const downloadControls = initDownloadBlocks( 'filename', { getBlocks: async () => createGetBlocksResponse([ { Index: 1, BareURL: 'url:1', Token: '1', Hash: 'aewdsh', }, { Index: 2, BareURL: 'url:2', Token: '2', Hash: 'aewdsh', }, { Index: 3, BareURL: 'url:3', Token: '3', Hash: 'aewdsh', }, ]), transformBlockStream: mockTransformBlockStream, }, mockDownloadBlock ); const stream = downloadControls.start(); const buffer = mergeUint8Arrays(await streamToBuffer(stream)); expect(buffer).toEqual(new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9])); }); it('should discard downloaded data and finish download on cancel', async () => { const promise = new Promise<void>((resolve, reject) => { const downloadControls = initDownloadBlocks( 'filename', { getBlocks: async () => createGetBlocksResponse([ { Index: 1, BareURL: 'url:1', Token: '1', Hash: 'aewdsh', }, ]), transformBlockStream: mockTransformBlockStream, onError: reject, onFinish: () => resolve(), }, mockDownloadBlock ); downloadControls.start(); downloadControls.cancel(); }); await expect(promise).rejects.toThrowError(TransferCancel); }); it('should reuse already downloaded data after recovering from network error', async () => { // Make sure to not reset retries counter during the test. mockConstants.TIME_TO_RESET_RETRIES = 10000; offlineURL = 'url:2'; const downloadControls = initDownloadBlocks( 'filename', { getBlocks: async () => createGetBlocksResponse([ { Index: 1, BareURL: 'url:1', Token: '1', Hash: 'aewdsh', }, { Index: 2, BareURL: 'url:2', Token: '2', Hash: 'ewqcd', }, { Index: 3, BareURL: 'url:3', Token: '3', Hash: 'qwesd', }, { Index: 4, BareURL: 'url:4', Token: '4', Hash: 'dqweda', }, ]), transformBlockStream: mockTransformBlockStream, onNetworkError: (err) => { expect(err).toEqual(createOfflineError({})); // Simulate connection is back up and user clicked to resume download. offlineURL = ''; downloadControls.resume(); }, }, mockDownloadBlock ); const stream = downloadControls.start(); const buffer = mergeUint8Arrays(await streamToBuffer(stream)); // Every block is streamed only once and in proper order even during interruption. expect(buffer).toEqual(new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])); // Non-failing blocks are downloaded only once. expect(mockDownloadBlock.mock.calls.map(([, url]) => url)).toEqual([ 'url:1', 'url:2', // First attempt. 'url:3', 'url:4', // First retry (retry tries again all blocks in the queue). 'url:2', 'url:3', 'url:4', // Second retry. 'url:2', 'url:3', 'url:4', // Third retry. 'url:2', 'url:3', 'url:4', // Second attempt after resume and fixing the issue. 'url:2', ]); }); it('should retry on block expiry', async () => { expiredURL = 'url:1'; let shouldValidateBlock = false; const downloadControls = initDownloadBlocks( 'filename', { getBlocks: async () => { if (shouldValidateBlock) { expiredURL = ''; } shouldValidateBlock = true; return createGetBlocksResponse([ { Index: 1, BareURL: 'url:1', Token: '1', Hash: 'aewdsh', }, ]); }, transformBlockStream: mockTransformBlockStream, }, mockDownloadBlock ); const stream = downloadControls.start(); const buffer = mergeUint8Arrays(await streamToBuffer(stream)); // Expired block is streamed once after retry. expect(buffer).toEqual(new Uint8Array([1, 2, 3])); // Expired block gets requested. expect(mockDownloadBlock.mock.calls.map(([, url]) => url)).toEqual(['url:1', 'url:1']); }); it('should re-request expired blocks', async () => { // the download speed is sooo slow that the block will expire 4 times const TIME_BLOCK_EXPIRES = 4; // making sure response time is greater than // consecutive retry counter threshold responseDelay = TIME_TO_RESET_RETRIES_LOCAL * 2; let blockRetryCount = 0; expiredURL = 'url:1'; const downloadControls = initDownloadBlocks( 'filename', { getBlocks: async () => { if (blockRetryCount === TIME_BLOCK_EXPIRES) { expiredURL = ''; } blockRetryCount++; return createGetBlocksResponse([ { Index: 1, BareURL: 'url:1', Token: '1', Hash: 'aewdsh', }, ]); }, transformBlockStream: mockTransformBlockStream, }, mockDownloadBlock ); const stream = downloadControls.start(); const buffer = mergeUint8Arrays(await streamToBuffer(stream)); expect(buffer).toEqual(new Uint8Array([1, 2, 3])); // initial + TIME_BLOCK_EXPIRES expect(mockDownloadBlock.mock.calls.length).toBe(1 + TIME_BLOCK_EXPIRES); }); it('should request new block exactly three times if request fails consequentially', async () => { expiredURL = 'url:1'; const downloadControls = initDownloadBlocks( 'filename', { getBlocks: async () => { return createGetBlocksResponse([ { Index: 1, BareURL: expiredURL, Token: '1', Hash: 'aewdsh', }, ]); }, transformBlockStream: mockTransformBlockStream, }, mockDownloadBlock ); const stream = downloadControls.start(); const bufferPromise = streamToBuffer(stream); await expect(bufferPromise).rejects.toThrowError(); // 1 initial request + 3 retries expect(mockDownloadBlock.mock.calls.length).toBe(4); }); });
3,057
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadBlocks.ts
import { c } from 'ttag'; import { ReadableStream } from 'web-streams-polyfill'; import { getIsConnectionIssue } from '@proton/shared/lib/api/helpers/apiErrorHelper'; import { HTTP_STATUS_CODE } from '@proton/shared/lib/constants'; import { BATCH_REQUEST_SIZE, MAX_THREADS_PER_DOWNLOAD, RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import { base64StringToUint8Array } from '@proton/shared/lib/helpers/encoding'; import runInQueue from '@proton/shared/lib/helpers/runInQueue'; import { DriveFileBlock } from '@proton/shared/lib/interfaces/drive/file'; import mergeUint8Arrays from '@proton/utils/mergeUint8Arrays'; import orderBy from '@proton/utils/orderBy'; import { TransferCancel } from '../../../components/TransferManager/transfer'; import { waitUntil } from '../../../utils/async'; import { logError } from '../../../utils/errorHandling'; import { ValidationError } from '../../../utils/errorHandling/ValidationError'; import { ObserverStream, untilStreamEnd } from '../../../utils/stream'; import { isTransferCancelError } from '../../../utils/transfer'; import { MAX_DOWNLOADING_BLOCKS, MAX_RETRIES_BEFORE_FAIL, TIME_TO_RESET_RETRIES } from '../constants'; import { DownloadCallbacks, DownloadStreamControls } from '../interface'; import downloadBlock from './downloadBlock'; export type DownloadBlocksCallbacks = Omit< DownloadCallbacks, 'getBlocks' | 'onInit' | 'onSignatureIssue' | 'getChildren' | 'getKeys' | 'onProgress' > & { getBlocks: ( abortSignal: AbortSignal, pagination: { FromBlockIndex: number; PageSize: number; }, revisionId?: string ) => Promise<{ blocks: DriveFileBlock[]; thumbnailHashes: string[]; manifestSignature: string }>; transformBlockStream: ( abortSignal: AbortSignal, stream: ReadableStream<Uint8Array>, EncSignature: string ) => Promise<{ hash: Uint8Array; data: ReadableStream<Uint8Array>; }>; checkManifestSignature?: (abortSignal: AbortSignal, hash: Uint8Array, signature: string) => Promise<void>; onProgress?: (bytes: number) => void; }; /** * initDownload prepares download transfer for the DownloadProvider queue. * Download is not started right away, it has to be started explicitly by * DownloadProvider. * How the download itself starts, see start function inside. */ export default function initDownloadBlocks( name: string, { getBlocks, transformBlockStream, checkManifestSignature, onProgress, onError, onNetworkError, onFinish, }: DownloadBlocksCallbacks, downloadBlockCallback = downloadBlock ) { const fileStream = new ObserverStream(); const fsWriter = fileStream.writable.getWriter(); const incompleteProgress = new Map<number, number>(); let abortController = new AbortController(); let paused = false; // start fetches blocks of the file and downloads those blocks in parallel. // Note its not real parallelism. This runs in main thread but that's fine // as the main job is to do requests to the API. Decryption (executed in // transformBlockStream) is passed to web workers, currently created by // openpgpjs library. The data exchanges are a bit of downside, therefore // we want create web workers manually in the future that will do download // and decryption together. See MAX_THREADS_PER_DOWNLOAD for more info. const start = () => { if (abortController.signal.aborted) { throw new TransferCancel({ message: `Transfer canceled` }); } const buffers = new Map<number, { done: boolean; chunks: Uint8Array[] }>(); let fromBlockIndex = 1; let blocks: DriveFileBlock[] = []; let activeIndex = 1; const hashes: Uint8Array[] = []; let thumbnailHashes: Uint8Array[] = []; let manifestSignature: string; const hasMorePages = (currentPageLength: number) => currentPageLength === BATCH_REQUEST_SIZE; const getBlocksPaged = async (pagination: { FromBlockIndex: number; PageSize: number }) => { try { const result = await getBlocks(abortController.signal, pagination); blocks = result.blocks; if (result.thumbnailHashes) { thumbnailHashes = result.thumbnailHashes.map(base64StringToUint8Array); } manifestSignature = result.manifestSignature; } catch (err: any) { // If paused before blocks/meta is fetched (DOM Error), restart on resume pause if (paused && isTransferCancelError(err)) { await waitUntil(() => paused === false); await start(); return false; } throw err; } return true; }; const flushBuffer = async (Index: number) => { const currentBuffer = buffers.get(Index); if (currentBuffer?.chunks.length) { for (const chunk of currentBuffer.chunks) { await fsWriter.ready; await fsWriter.write(chunk); } buffers.delete(Index); } }; const revertProgress = () => { if (onProgress) { // Revert progress of blacks that weren't finished buffers.forEach((buffer, Index) => { if (!buffer.done) { buffers.delete(Index); } }); let progressToRevert = 0; incompleteProgress.forEach((progress) => { progressToRevert += progress; }); incompleteProgress.clear(); onProgress(-progressToRevert); } }; const getBlockQueue = (startIndex = 1) => orderBy(blocks, 'Index').filter(({ Index }) => Index >= startIndex); let lastConsecutiveRetryTs = Date.now(); // Downloads several blocks at once, but streams sequentially only one block at a time // Other blocks are put into buffer until previous blocks have finished downloading const startDownload = async (blockQueue: DriveFileBlock[], numRetries = 0) => { if (!blockQueue.length) { return []; } activeIndex = blockQueue[0].Index; let ongoingNumberOfDownloads = 0; const retryDownload = async (activeIndex: number, refetchBlocks = false) => { revertProgress(); abortController = new AbortController(); if (refetchBlocks) { const result = await getBlocks(abortController.signal, { FromBlockIndex: fromBlockIndex, PageSize: BATCH_REQUEST_SIZE, }); blocks = result.blocks; } let retryCount = 0; /* * If download speed is too low, it might require several retries to cover * the whole block page (an amount of attempts greater than the value of * MAX_RETRIES_BEFORE_FAIL). For these cases retry count gets in considertaion * only within a certain timeframe defined by TIME_TO_RESET_RETRIES */ if (Date.now() - lastConsecutiveRetryTs < TIME_TO_RESET_RETRIES) { retryCount = numRetries + 1; } lastConsecutiveRetryTs = Date.now(); await waitUntil(() => paused === false && ongoingNumberOfDownloads === 0); await startDownload(getBlockQueue(activeIndex), retryCount); }; const downloadQueue = blockQueue.map(({ Index, EncSignature, BareURL, Token }) => async () => { ongoingNumberOfDownloads++; try { if (!buffers.get(Index)?.done) { await waitUntil(() => buffers.size < MAX_DOWNLOADING_BLOCKS || abortController.signal.aborted); if (abortController.signal.aborted) { throw new TransferCancel({ message: `Transfer canceled` }); } const blockStream = await downloadBlockCallback(abortController, BareURL, Token); const progressStream = new ObserverStream((value) => { if (abortController.signal.aborted) { throw new TransferCancel({ message: `Transfer canceled` }); } incompleteProgress.set(Index, (incompleteProgress.get(Index) ?? 0) + value.length); onProgress?.(value.length); }); const rawContentStream = blockStream.pipeThrough(progressStream); const { hash, data: transformedContentStream } = await transformBlockStream( abortController.signal, rawContentStream, EncSignature || '' ); await untilStreamEnd(transformedContentStream, async (data) => { if (abortController.signal.aborted) { throw new TransferCancel({ message: `Transfer canceled` }); } const buffer = buffers.get(Index); if (buffer) { buffer.chunks.push(data); } else { buffers.set(Index, { done: false, chunks: [data] }); } }); const currentBuffer = buffers.get(Index); // Block are indexed from 1 and we want to start from 0 for hashes hashes[Index - 1] = hash; if (currentBuffer) { currentBuffer.done = true; } } if (Index === activeIndex) { let nextIndex = activeIndex; // Flush buffers for subsequent complete blocks too while (buffers.get(nextIndex)?.done) { incompleteProgress.delete(nextIndex); await flushBuffer(nextIndex); nextIndex++; } // Assign next incomplete block as new active block activeIndex = nextIndex; } } finally { ongoingNumberOfDownloads--; } }); try { await runInQueue(downloadQueue, MAX_THREADS_PER_DOWNLOAD); } catch (e: any) { if (!paused) { abortController.abort(); /* * If a block gets expired, backend returns 404. In this case * we need to request new blocks and restart the download * from the active index */ if (e.status === HTTP_STATUS_CODE.NOT_FOUND && numRetries < MAX_RETRIES_BEFORE_FAIL) { console.warn(`Blocks for download might have expired. Retry num: ${numRetries}`); // Wait for all blocks to be finished to have proper activeIndex. await waitUntil(() => ongoingNumberOfDownloads === 0); return retryDownload(activeIndex, true); } // If we experience some slight issue on server side, lets try // one more time before notyfing user in transfer manager. // Be careful about too many attempts as that could be harmful // for our servers - if we have traffic issue, retrying too // many times could lead to longer downtime. // Also, there is some issue mostly in Brave that during upload // and download at the same time, we get weird network error // (err::NET_FAILED with status code 200) which is also "fixed" // by automatic retry. const isSlightIssue = getIsConnectionIssue(e) || // Unexpected end of packet is coming from crypto library // if the data is not complete, e.g., server did not send // the full block data, but only part of the block. e.message === 'Unexpected end of packet'; if (isSlightIssue && numRetries < MAX_RETRIES_BEFORE_FAIL) { console.warn( `Connection issue for block #${activeIndex} download. Retry num: ${numRetries}. Error:`, e ); // Wait for all blocks to be finished to have proper activeIndex. await waitUntil(() => ongoingNumberOfDownloads === 0); // Do not refetch blocks. Its not needed at this stage, and // also getting block information is not protected from // connection issues for now. return retryDownload(activeIndex); } // Sometimes the error can be thrown from untilStreamEnd, // where its simple network error during reading the stream. // Would be nice if this could be avoided after refactor. if (onNetworkError && getIsConnectionIssue(e)) { revertProgress(); // onNetworkError sets the state of the transfer and // the transfer can be resumed right away--therefore, // pausing has to be done first to avoid race (resuming // sooner than pausing). paused = true; onNetworkError(e); // Transfer can be resumed faster than ongoing block // downloads are aborted. Therefore, first we need to // wait for all downloads to be done to avoid flushing // the same buffer more than once. await waitUntil(() => paused === false && ongoingNumberOfDownloads === 0); await startDownload(getBlockQueue(activeIndex)); return; } throw e; } revertProgress(); await waitUntil(() => paused === false && ongoingNumberOfDownloads === 0); await startDownload(getBlockQueue(activeIndex)); } }; const downloadTheRestOfBlocks = async () => { while (hasMorePages(blocks.length)) { fromBlockIndex += BATCH_REQUEST_SIZE; if (await getBlocksPaged({ FromBlockIndex: fromBlockIndex, PageSize: BATCH_REQUEST_SIZE })) { activeIndex = 1; await startDownload(getBlockQueue()); } else { return false; } } return true; }; const run = async () => { // Downloads initial page if (!(await getBlocksPaged({ FromBlockIndex: fromBlockIndex, PageSize: BATCH_REQUEST_SIZE }))) { return; } await fsWriter.ready; await startDownload(getBlockQueue()); if (!(await downloadTheRestOfBlocks())) { return; } await checkManifestSignature?.( abortController.signal, mergeUint8Arrays([...thumbnailHashes, ...hashes]), manifestSignature ); // Wait for stream to be flushed await fsWriter.ready; await fsWriter.close(); }; void run() .then(() => { onFinish?.(); }) .catch((err) => { if (err?.data?.Code === RESPONSE_CODE.NOT_FOUND) { err = new ValidationError(c('Info').t`File "${name}" was deleted during download`); } abortController.abort(); fsWriter.abort(err).catch(logError); onError?.(err); }); return fileStream.readable; }; const cancel = () => { paused = false; abortController.abort(); const error = new TransferCancel({ message: `Transfer canceled` }); fsWriter.abort(error).catch(logError); onError?.(error); }; const pause = async () => { paused = true; abortController.abort(); // Wait for download to reset progress or be flushed await waitUntil(() => !incompleteProgress.size); }; const resume = () => { abortController = new AbortController(); paused = false; }; const downloadControls: DownloadStreamControls = { start, cancel, pause, resume, }; return downloadControls; }
3,058
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadLinkFile.ts
// @ts-ignore missing `toStream` TS definitions import { readToEnd, toStream } from '@openpgp/web-stream-tools'; import { ReadableStream } from 'web-streams-polyfill'; import { CryptoProxy, VERIFICATION_STATUS } from '@proton/crypto'; import { generateContentHash } from '@proton/shared/lib/keys/driveKeys'; import { DecryptFileKeys, DownloadCallbacks, DownloadStreamControls, LinkDownload } from '../interface'; import initDownloadBlocks from './downloadBlocks'; /** * initDownloadLinkFile prepares controls to download the provided file. * This epxects only file blocks, not thumbnail block, thus detached * signature is required. To download thumbnail, use thumbnail helper. */ export default function initDownloadLinkFile(link: LinkDownload, callbacks: DownloadCallbacks): DownloadStreamControls { let keysPromise: Promise<DecryptFileKeys> | undefined; const transformBlockStream = async ( abortSignal: AbortSignal, stream: ReadableStream<Uint8Array>, encSignature: string ) => { if (!keysPromise) { keysPromise = callbacks.getKeys(abortSignal, link); } const keys = await keysPromise; const { data: decryptedSignature } = await CryptoProxy.decryptMessage({ armoredMessage: encSignature, decryptionKeys: keys.privateKey, format: 'binary', }); const binaryMessage = await readToEnd<Uint8Array>(stream); const hash = (await generateContentHash(binaryMessage)).BlockHash; const { data, verified } = await CryptoProxy.decryptMessage({ binaryMessage, binarySignature: decryptedSignature, sessionKeys: keys.sessionKeys, verificationKeys: keys.addressPublicKeys, format: 'binary', }); if (verified !== VERIFICATION_STATUS.SIGNED_AND_VALID) { await callbacks.onSignatureIssue?.(abortSignal, link, { blocks: verified }); } return { hash, data: toStream(data) as ReadableStream<Uint8Array>, }; }; const checkManifestSignature = async (abortSignal: AbortSignal, hash: Uint8Array, signature: string) => { if (!keysPromise) { keysPromise = callbacks.getKeys(abortSignal, link); } const keys = await keysPromise; const { verified } = await CryptoProxy.verifyMessage({ binaryData: hash, verificationKeys: keys.addressPublicKeys || [], armoredSignature: signature, }); if (verified !== VERIFICATION_STATUS.SIGNED_AND_VALID) { await callbacks.onSignatureIssue?.(abortSignal, link, { manifest: verified }); } }; const controls = initDownloadBlocks(link.name, { ...callbacks, getBlocks: (abortSignal, pagination) => callbacks.getBlocks(abortSignal, link.shareId, link.linkId, pagination, link.revisionId), transformBlockStream, checkManifestSignature, onProgress: (bytes: number) => callbacks.onProgress?.([link.linkId], bytes), }); return { ...controls, start: () => { const linkSizes = Object.fromEntries([[link.linkId, link.size]]); callbacks.onInit?.(link.size, linkSizes); return controls.start(); }, }; }
3,059
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadLinkFolder.test.ts
import { ChildrenLinkMeta, LinkDownload } from '../interface'; import { FolderTreeLoader } from './downloadLinkFolder'; type Tree = { [linkId: string]: Tree | number; }; /** * Key is linkId and name. * Value is either * - object representing folder's children, * - or number representing file's size. */ const tree: Tree = { linkId: { a: { ab: { abc: { '11': 123, '12': 456, '13': 789, }, '21': 12, '22': 0, }, }, b: { bc: { bcd: { bcde: { '30': 147, '31': 258, }, '40': 963, }, }, bd: { '50': 2, '51': 3, }, }, '60': 7, '61': 9, }, }; const expectedTotalSize = 2769; async function stubGetChildren(abortSignal: AbortSignal, shareId: string, linkId: string): Promise<ChildrenLinkMeta[]> { const subtree = getSubtree(linkId, tree); if (!subtree) { throw new Error(`Wrong folder linkId: ${linkId}`); } return Object.entries(subtree).map(([linkId, value]) => { const size = typeof value === 'number' ? value : undefined; return makeChildrenLinkMeta(linkId, size); }); } function getSubtree(linkId: string, tree: Tree): Tree | undefined { for (const key in tree) { if (Object.prototype.hasOwnProperty.call(tree, key)) { const value = tree[key]; if (typeof value === 'number') { continue; } if (key === linkId) { return value; } const found = getSubtree(linkId, value); if (found) { return found; } } } } function makeChildrenLinkMeta(linkId: string, size?: number): ChildrenLinkMeta { return { isFile: size !== undefined, linkId, name: linkId, mimeType: size !== undefined ? 'text/plain' : 'Folder', size: size || 0, signatureAddress: 'address', fileModifyTime: 1692962760, }; } describe('FolderTreeLoader', () => { const linkDownload = { shareId: 'shareId', linkId: 'linkId' } as LinkDownload; it('calculates size', async () => { const folderTreeLoader = new FolderTreeLoader(linkDownload); const promise = folderTreeLoader.load(stubGetChildren); await expect(promise).resolves.toMatchObject({ size: expectedTotalSize, }); }); it('iterates all childs', async () => { const folderTreeLoader = new FolderTreeLoader(linkDownload); void folderTreeLoader.load(stubGetChildren); const items = []; for await (const item of folderTreeLoader.iterateAllChildren()) { items.push(item); } items.sort((a, b) => a.name.localeCompare(b.name)); expect(items).toMatchObject([ { linkId: '11', parentPath: ['a', 'ab', 'abc'] }, { linkId: '12', parentPath: ['a', 'ab', 'abc'] }, { linkId: '13', parentPath: ['a', 'ab', 'abc'] }, { linkId: '21', parentPath: ['a', 'ab'] }, { linkId: '22', parentPath: ['a', 'ab'] }, { linkId: '30', parentPath: ['b', 'bc', 'bcd', 'bcde'] }, { linkId: '31', parentPath: ['b', 'bc', 'bcd', 'bcde'] }, { linkId: '40', parentPath: ['b', 'bc', 'bcd'] }, { linkId: '50', parentPath: ['b', 'bd'] }, { linkId: '51', parentPath: ['b', 'bd'] }, { linkId: '60', parentPath: [] }, { linkId: '61', parentPath: [] }, { linkId: 'a', parentPath: [] }, { linkId: 'ab', parentPath: ['a'] }, { linkId: 'abc', parentPath: ['a', 'ab'] }, { linkId: 'b', parentPath: [] }, { linkId: 'bc', parentPath: ['b'] }, { linkId: 'bcd', parentPath: ['b', 'bc'] }, { linkId: 'bcde', parentPath: ['b', 'bc', 'bcd'] }, { linkId: 'bd', parentPath: ['b'] }, ]); }); });
3,060
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadLinkFolder.ts
import { c } from 'ttag'; import { RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import { wait } from '@proton/shared/lib/helpers/promise'; import { TransferCancel } from '../../../components/TransferManager/transfer'; import { ValidationError } from '../../../utils/errorHandling/ValidationError'; import { WAIT_TIME } from '../constants'; import { ChildrenLinkMeta, DownloadCallbacks, DownloadStreamControls, GetChildrenCallback, LinkDownload, OnProgressCallback, OnSignatureIssueCallback, } from '../interface'; import ArchiveGenerator from './archiveGenerator'; import ConcurrentIterator from './concurrentIterator'; import { NestedLinkDownload } from './interface'; type FolderLoadInfo = { size: number; linkSizes: { [linkId: string]: number }; }; /** * initDownloadLinkFolder prepares controls to download archive of the folder. * The folder itself is not part of the archive, all childs are in the root * of the archive. */ export default function initDownloadLinkFolder( link: LinkDownload, callbacks: DownloadCallbacks ): DownloadStreamControls { const folderLoader = new FolderTreeLoader(link); const concurrentIterator = new ConcurrentIterator(); const archiveGenerator = new ArchiveGenerator(); const start = () => { folderLoader .load(callbacks.getChildren, callbacks.onSignatureIssue, callbacks.onProgress) .then(({ size, linkSizes }) => { linkSizes[link.linkId] = size; callbacks.onInit?.(size, linkSizes); }) .catch((err) => { callbacks.onError?.(err); archiveGenerator.cancel(); }); const childrenIterator = folderLoader.iterateAllChildren(); const linksWithStreamsIterator = concurrentIterator.iterate(childrenIterator, callbacks); archiveGenerator .writeLinks(linksWithStreamsIterator) .then(() => { callbacks.onFinish?.(); }) .catch((err) => { callbacks.onError?.(err); archiveGenerator.cancel(); }); return archiveGenerator.stream; }; return { start, pause: () => concurrentIterator.pause(), resume: () => concurrentIterator.resume(), cancel: () => { folderLoader.cancel(); archiveGenerator.cancel(); concurrentIterator.cancel(); }, }; } /** * FolderTreeLoader loads recursively the whole tree and iterates over * all links with provided parent path for each of them. */ export class FolderTreeLoader { private rootLink: LinkDownload; private done: boolean; private links: NestedLinkDownload[]; private abortController: AbortController; constructor(link: LinkDownload) { this.rootLink = link; this.done = false; this.links = []; this.abortController = new AbortController(); } async load( getChildren: GetChildrenCallback, onSignatureIssue?: OnSignatureIssueCallback, onProgress?: OnProgressCallback ): Promise<FolderLoadInfo> { const result = await this.loadHelper( this.rootLink, [this.rootLink.linkId], getChildren, onSignatureIssue, onProgress ); this.done = true; return result; } private async loadHelper( link: LinkDownload, parentLinkIds: string[], getChildren: GetChildrenCallback, onSignatureIssue?: OnSignatureIssueCallback, onProgress?: OnProgressCallback, parent: string[] = [] ): Promise<FolderLoadInfo> { if (this.abortController.signal.aborted) { throw new TransferCancel({ message: `Transfer canceled` }); } if (link.signatureIssues) { await onSignatureIssue?.(this.abortController.signal, link, link.signatureIssues); } const shareId = link.shareId; const children = await getChildren(this.abortController.signal, link.shareId, link.linkId).catch((err) => { if (err?.data?.Code === RESPONSE_CODE.NOT_FOUND) { err = new ValidationError(c('Info').t`Folder "${link.name}" was deleted during download`); } throw err; }); this.links = [ ...this.links, ...children.map((link) => ({ parentLinkIds: parentLinkIds, parentPath: parent, isFile: link.isFile, shareId, linkId: link.linkId, name: link.name, mimeType: link.mimeType, size: link.size, fileModifyTime: link.fileModifyTime, signatureAddress: link.signatureAddress, signatureIssues: link.signatureIssues, })), ]; return Promise.all( children.map(async (item: ChildrenLinkMeta) => { // To get link into progresses right away so potentially loader can be displayed. onProgress?.([...parentLinkIds, item.linkId], 0); if (!item.isFile) { const result = await this.loadHelper( { ...item, shareId }, [...parentLinkIds, item.linkId], getChildren, onSignatureIssue, onProgress, [...parent, item.name] ); result.linkSizes[item.linkId] = result.size; return result; } return { size: item.size, linkSizes: Object.fromEntries([[item.linkId, item.size]]) }; }) ).then((results: FolderLoadInfo[]) => { const size = results.reduce((total, { size }) => total + size, 0); const linkSizes = results.reduce((sum, { linkSizes }) => ({ ...sum, ...linkSizes }), {}); return { size, linkSizes, }; }); } async *iterateAllChildren(): AsyncGenerator<NestedLinkDownload> { while (!this.done || this.links.length > 0) { const link = this.links.shift(); if (link) { yield link; } else { await wait(WAIT_TIME); } } } cancel() { this.done = true; this.abortController.abort(); } }
3,061
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadLinks.ts
import { sendErrorReport } from '../../../utils/errorHandling'; import { DownloadCallbacks, DownloadStreamControls, GetChildrenCallback, LinkDownload, OnInitCallback, OnProgressCallback, OnSignatureIssueCallback, } from '../interface'; import ArchiveGenerator from './archiveGenerator'; import ConcurrentIterator from './concurrentIterator'; import { FolderTreeLoader } from './downloadLinkFolder'; import { NestedLinkDownload } from './interface'; /** * initDownloadLinks prepares controls to download archive of passed `links`. * All links are in the root of the generated archive. */ export default function initDownloadLinks(links: LinkDownload[], callbacks: DownloadCallbacks): DownloadStreamControls { const folderLoaders: Map<String, FolderTreeLoader> = new Map(); const concurrentIterator = new ConcurrentIterator(); const archiveGenerator = new ArchiveGenerator(); const start = () => { // To get link into progresses right away so potentially loader can be displayed. callbacks.onProgress?.( links.map(({ linkId }) => linkId), 0 ); loadTotalSize( links, folderLoaders, callbacks.getChildren, callbacks.onInit, callbacks.onSignatureIssue, callbacks.onProgress ); const linksIterator = iterateAllLinks(links, folderLoaders); const linksWithStreamsIterator = concurrentIterator.iterate(linksIterator, callbacks); archiveGenerator .writeLinks(linksWithStreamsIterator) .then(() => { callbacks.onFinish?.(); }) .catch((err) => { callbacks.onError?.(err); archiveGenerator.cancel(); }); return archiveGenerator.stream; }; return { start, pause: () => concurrentIterator.pause(), resume: () => concurrentIterator.resume(), cancel: () => { Array.from(folderLoaders.values()).forEach((folderLoader) => folderLoader.cancel()); archiveGenerator.cancel(); concurrentIterator.cancel(); }, }; } function loadTotalSize( links: LinkDownload[], folderLoaders: Map<String, FolderTreeLoader>, getChildren: GetChildrenCallback, onInit?: OnInitCallback, onSignatureIssue?: OnSignatureIssueCallback, onProgress?: OnProgressCallback ) { const sizePromises = links.map(async (link) => { if (link.isFile) { return { size: link.size, linkSizes: Object.fromEntries([[link.linkId, link.size]]) }; } const folderLoader = new FolderTreeLoader(link); folderLoaders.set(link.shareId + link.linkId, folderLoader); const result = await folderLoader.load(getChildren, onSignatureIssue, onProgress); result.linkSizes[link.linkId] = result.size; return result; }); Promise.all(sizePromises) .then((results) => { const size = results.reduce((total, { size }) => total + size, 0); const linkSizes = results.reduce((sum, { linkSizes }) => ({ ...sum, ...linkSizes }), {}); onInit?.(size, linkSizes); }) .catch(sendErrorReport); } async function* iterateAllLinks( links: LinkDownload[], folderLoaders: Map<String, FolderTreeLoader> ): AsyncGenerator<NestedLinkDownload> { for (const link of links) { yield { parentLinkIds: [], parentPath: [], ...link, }; if (!link.isFile) { const f = folderLoaders.get(link.shareId + link.linkId) as FolderTreeLoader; for await (const childLink of f.iterateAllChildren()) { yield { ...childLink, parentPath: [link.name, ...childLink.parentPath], }; } } } }
3,062
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/downloadThumbnail.ts
// @ts-ignore missing `toStream` TS defs import { readToEnd, toStream } from '@openpgp/web-stream-tools'; import { ReadableStream } from 'web-streams-polyfill'; import { CryptoProxy, VERIFICATION_STATUS } from '@proton/crypto'; import { streamToBuffer } from '../../../utils/stream'; import { DecryptFileKeys } from '../interface'; import downloadBlock from './downloadBlock'; type GetKeysCallback = () => Promise<DecryptFileKeys>; export default async function downloadThumbnail(url: string, token: string, getKeys: GetKeysCallback) { const abortController = new AbortController(); const stream = await downloadBlock(abortController, url, token); const { data: decryptedStream, verifiedPromise } = await decryptThumbnail(stream, getKeys); const thumbnailData = streamToBuffer(decryptedStream); return { abortController, contents: thumbnailData, verifiedPromise, }; } async function decryptThumbnail( stream: ReadableStream<Uint8Array>, getKeys: GetKeysCallback ): Promise<{ data: ReadableStream<Uint8Array>; verifiedPromise: Promise<VERIFICATION_STATUS> }> { const { sessionKeys, addressPublicKeys } = await getKeys(); const { data, verified } = await CryptoProxy.decryptMessage({ binaryMessage: await readToEnd(stream), sessionKeys, verificationKeys: addressPublicKeys, format: 'binary', }); return { data: toStream(data) as ReadableStream<Uint8Array>, verifiedPromise: Promise.resolve(verified), // TODO lara/michal: refactor this since we no longer use streaming on decryption, hence verified is no longer a promise }; }
3,063
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/download/interface.ts
import { ReadableStream } from 'web-streams-polyfill'; import { LinkDownload } from '../interface'; export type NestedLinkDownload = LinkDownload & { parentLinkIds: string[]; parentPath: string[]; }; export type StartedNestedLinkDownload = | { isFile: false; name: string; parentPath: string[]; } | { isFile: true; name: string; parentPath: string[]; stream: ReadableStream<Uint8Array>; fileModifyTime?: number; };
3,064
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/fileSaver/download.ts
import { WritableStream } from 'web-streams-polyfill'; import { PUBLIC_PATH } from '@proton/shared/lib/constants'; import { isEdge, isEdgeChromium, isIos, isSafari } from '@proton/shared/lib/helpers/browser'; import { stripLeadingAndTrailingSlash } from '@proton/shared/lib/helpers/string'; import { TransferMeta } from '../../../components/TransferManager/transfer'; /** * Safari and Edge don't support returning stream as a response. * Safari - has everything but fails to stream a response from SW. * Edge - doesn't support ReadableStream() constructor, but supports it in chromium version. * IOS - forces all browsers to use webkit, so same problems as safari in all browsers. * For them download is done in-memory using blob response. */ export const isUnsupported = () => !('serviceWorker' in navigator) || isSafari() || (isEdge() && !isEdgeChromium()) || isIos(); // createDownloadIframe opens download URL created in service worker to // initialize the download in the browser. The response has headers to // act as the download. See DownloadServiceWorker for more info. function createDownloadIframe(src: string) { const iframe = document.createElement('iframe'); iframe.hidden = true; iframe.src = src; iframe.name = 'iframe'; document.body.appendChild(iframe); return iframe; } async function wakeUpServiceWorker() { const worker = navigator.serviceWorker.controller; if (worker) { worker.postMessage({ action: 'ping' }); } else { const url = [ document.location.href.substring(0, document.location.href.indexOf('/')), stripLeadingAndTrailingSlash(PUBLIC_PATH), 'sw/ping', ] .filter(Boolean) .join('/'); const res = await fetch(url); const body = await res.text(); if (!res.ok || body !== 'pong') { throw new Error('Download worker is dead'); } } return worker as ServiceWorker; } function serviceWorkerKeepAlive() { const interval = setInterval(() => { wakeUpServiceWorker().catch(() => clearInterval(interval)); }, 10000); } export async function initDownloadSW() { if (isUnsupported()) { throw new Error('Saving file via download is unsupported by this browser'); } await navigator.serviceWorker.register( /* webpackChunkName: "downloadSW" */ new URL('./downloadSW', import.meta.url), { scope: `/${stripLeadingAndTrailingSlash(PUBLIC_PATH)}`, } ); serviceWorkerKeepAlive(); } /** * Opens download stream into service worker. Use abort signal when pipeTo can't close the download stream. */ export async function openDownloadStream( meta: TransferMeta, { onCancel, abortSignal }: { onCancel: () => void; abortSignal?: AbortSignal } ) { const channel = new MessageChannel(); const stream = new WritableStream({ write(block: Uint8Array) { channel.port1.postMessage({ action: 'download_chunk', payload: block }); }, close() { channel.port1.postMessage({ action: 'end' }); }, abort(reason) { channel.port1.postMessage({ action: 'abort', reason: String(reason) }); }, }); if (abortSignal) { abortSignal.addEventListener('abort', () => { channel.port1.postMessage({ action: 'abort', reason: 'Download stream aborted' }); }); } const worker = await wakeUpServiceWorker(); // Channel to stream file contents through channel.port1.onmessage = ({ data }) => { if (data?.action === 'download_canceled') { onCancel(); } else if (data?.action === 'download_started') { createDownloadIframe(data.payload); } }; worker.postMessage({ action: 'start_download', payload: meta }, [channel.port2]); return stream; }
3,065
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/fileSaver/downloadSW.ts
declare const self: ServiceWorkerGlobalScope; interface DownloadConfig { stream: ReadableStream<Uint8Array>; filename: string; mimeType: string; size?: number; } const SECURITY_HEADERS = { 'Content-Security-Policy': "default-src 'none'", 'X-Content-Security-Policy': "default-src 'none'", 'X-WebKit-CSP': "default-src 'none'", 'Referrer-Policy': 'strict-origin-when-cross-origin', 'Strict-Transport-Security': 'max-age=31536000', 'X-Content-Type-Options': 'nosniff', 'X-Frame-Options': 'deny', 'X-XSS-Protection': '1; mode=block', 'X-Permitted-Cross-Domain-Policies': 'none', }; /** * Open a stream of data passed over MessageChannel. * Every download has it's own stream from app to SW. * * @param port MessageChannel port to listen on */ function createDownloadStream(port: MessagePort) { return new ReadableStream({ start(controller: ReadableStreamDefaultController) { port.onmessage = ({ data }) => { switch (data?.action) { case 'end': return controller.close(); case 'download_chunk': return controller.enqueue(data?.payload); case 'abort': return controller.error(data?.reason); default: console.error(`received unknown action "${data?.action}"`); } }; }, cancel() { port.postMessage({ action: 'download_canceled' }); }, }); } /** * Service worker that listens for client-generated file data * and generates a unique link for downloading the data as a file stream. */ class DownloadServiceWorker { pendingDownloads = new Map<string, DownloadConfig>(); /** * A counter used to generate IDs for `pendingDownloads` */ downloadId = 1; constructor() { self.addEventListener('install', this.onInstall); self.addEventListener('activate', this.onActivate); self.addEventListener('message', this.onMessage); self.addEventListener('fetch', this.onFetch); } private generateUID = () => { if (this.downloadId > 9000) { this.downloadId = 0; } return this.downloadId++; }; onInstall = () => { void self.skipWaiting(); }; onActivate = (event: ExtendableEvent) => { event.waitUntil(self.clients.claim()); }; /** * Intercepts requests on the generated download url * and responds with a stream, that client itself controls. */ onFetch = (event: FetchEvent) => { const url = new URL(event.request.url); // Our service worker is registered on the global scope // We currently only care about the /sw/* scope if (!url.pathname.startsWith('/sw')) { return; } // The main thread periodically wakes up the service worker with a ping if (url.pathname.endsWith('/sw/ping')) { return event.respondWith(new Response('pong', { headers: new Headers(SECURITY_HEADERS) })); } // URL format: /sw/ID const chunks = url.pathname.split('/').filter((item) => !!item); const id = chunks[chunks.length - 1]; const pendingDownload = this.pendingDownloads.get(id); // Return a 404 if we can't find the download. // In some cases, the download ID is not added to the map on time. // If we were to simply return, this query would get sent to the real network. if (!pendingDownload) { return event.respondWith( new Response(undefined, { status: 404, headers: new Headers(SECURITY_HEADERS), }) ); } const { stream, filename, size, mimeType } = pendingDownload; this.pendingDownloads.delete(id); const headers = new Headers({ ...(size ? { 'Content-Length': `${size}` } : {}), 'Content-Type': mimeType, 'Content-Disposition': `attachment; filename="${encodeURIComponent(filename)}"`, ...SECURITY_HEADERS, }); event.respondWith(new Response(stream, { headers })); }; /** * Called once before each download, opens a stream for file data * and generates a unique download link for the app to call to download file. */ onMessage = (event: ExtendableMessageEvent) => { if (event.data?.action !== 'start_download') { return; } const id = this.generateUID(); const { filename, mimeType, size } = event.data.payload; const downloadUrl = new URL(`/sw/${id}`, self.registration.scope); const port = event.ports[0]; this.pendingDownloads.set(`${id}`, { stream: createDownloadStream(port), filename, mimeType, size, }); port.postMessage({ action: 'download_started', payload: downloadUrl.toString() }); }; } export default new DownloadServiceWorker();
3,066
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_downloads/fileSaver/fileSaver.ts
import { ReadableStream } from 'web-streams-polyfill'; import { MEMORY_DOWNLOAD_LIMIT } from '@proton/shared/lib/drive/constants'; import downloadFile from '@proton/shared/lib/helpers/downloadFile'; import { TransferCancel, TransferMeta } from '../../../components/TransferManager/transfer'; import { isValidationError } from '../../../utils/errorHandling/ValidationError'; import { streamToBuffer } from '../../../utils/stream'; import { isTransferCancelError } from '../../../utils/transfer'; import { initDownloadSW, openDownloadStream } from './download'; // FileSaver provides functionality to start download to file. This class does // not deal with API or anything else. Files which fit the memory (see // MEMORY_DOWNLOAD_LIMIT constant) are buffered in browser and then saved in // one go. Bigger files are streamed and user can see the progress almost like // it would be normal file. See saveViaDownload for more info. class FileSaver { private useBlobFallback = false; constructor() { initDownloadSW().catch((error) => { this.useBlobFallback = true; console.warn('Saving file will fallback to in-memory downloads:', error.message); }); } // saveViaDownload uses service workers to download file without need to // buffer the whole content in memory and open the download in browser as // is done for regular files. To do this, using service worker is used // local new address where is streamed the download and that address is // opened as hidden iframe in the main page to start download in browser. // To have this working, service workers and option to return stream as // the response is needed, which is not supported by all browsers. See // isUnsupported in download.ts for more info. When the support is missing, // it falls back to buffered download. // Ideally, once we update to openpgpjs v5 with custom web workers, would // be great if we could merge this to the same worker (but note the // difference between web and service worker) to reduce data exchanges. private async saveViaDownload(stream: ReadableStream<Uint8Array>, meta: TransferMeta) { if (this.useBlobFallback) { return this.saveViaBuffer(stream, meta); } try { const abortController = new AbortController(); const saveStream = await openDownloadStream(meta, { onCancel: () => abortController.abort() }); await new Promise((resolve, reject) => { abortController.signal.addEventListener('abort', () => { reject(new TransferCancel({ message: `Transfer canceled` })); }); stream.pipeTo(saveStream, { preventCancel: true }).then(resolve).catch(reject); }); } catch (err: any) { if (!isTransferCancelError(err)) { console.warn('Failed to save file via download, falling back to in-memory download:', err); await this.saveViaBuffer(stream, meta); } throw err; } } // saveViaBuffer reads the stream and downloads the file in one go. // eslint-disable-next-line class-methods-use-this private async saveViaBuffer(stream: ReadableStream<Uint8Array>, meta: TransferMeta) { try { const chunks = await streamToBuffer(stream); downloadFile(new Blob(chunks, { type: meta.mimeType }), meta.filename); } catch (err: any) { if (!isTransferCancelError(err)) { if (isValidationError(err)) { throw err; } throw new Error(`Download failed: ${err.message || err}`, { cause: err }); } } } async saveAsFile(stream: ReadableStream<Uint8Array>, meta: TransferMeta) { if (meta.size && meta.size < MEMORY_DOWNLOAD_LIMIT) { return this.saveViaBuffer(stream, meta); } return this.saveViaDownload(stream, meta); } isFileTooBig(size: number) { return this.useBlobFallback && size > MEMORY_DOWNLOAD_LIMIT; } } export default new FileSaver();
3,067
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_events/index.ts
export * from './interface'; export { DriveEventManagerProvider, useDriveEventManager } from './useDriveEventManager';
3,068
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_events/interface.ts
import { EVENT_TYPES } from '@proton/shared/lib/drive/constants'; import { EncryptedLink } from '../_links'; export type EventHandler = (volumeId: string, events: DriveEvents) => Promise<void> | void; export interface DriveEvents { eventId: string; events: DriveEvent[]; refresh: boolean; } export type DriveEvent = { eventType: EVENT_TYPES; encryptedLink: EncryptedLink; originShareId?: string; };
3,069
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_events/useDriveEventManager.test.ts
import { act } from 'react-dom/test-utils'; import { RenderResult, renderHook } from '@testing-library/react-hooks'; import { EVENT_TYPES } from '@proton/shared/lib/drive/constants'; import createEventManager, { EventManager } from '@proton/shared/lib/eventManager/eventManager'; import { Api } from '@proton/shared/lib/interfaces'; import { DriveEventsResult } from '@proton/shared/lib/interfaces/drive/events'; import { driveEventsResultToDriveEvents } from '../_api'; import { useDriveEventManagerProvider } from './useDriveEventManager'; const VOLUME_ID_1 = 'volumeId-1'; const VOLUME_ID_2 = 'volumeId-2'; const SHARE_1_EVENT = { EventType: EVENT_TYPES.CREATE, Link: { LinkID: 'linkId' }, ContextShareID: 'shareId-1', }; const SHARE_2_EVENT = { EventType: EVENT_TYPES.CREATE, Link: { LinkID: 'linkId' }, ContextShareID: 'shareId-2', }; const EVENT_PAYLOAD = { EventID: 'event-id-1', Events: [SHARE_1_EVENT, SHARE_2_EVENT], Refresh: 0, More: 0, } as DriveEventsResult; const apiMock = jest.fn().mockImplementation(() => Promise.resolve(EVENT_PAYLOAD)); describe('useDriveEventManager', () => { let eventManager: EventManager; let hook: RenderResult<ReturnType<typeof useDriveEventManagerProvider>>; const renderTestHook = () => { const { result } = renderHook(() => useDriveEventManagerProvider(apiMock as Api, eventManager)); return result; }; beforeEach(() => { apiMock.mockClear(); eventManager = createEventManager({ api: apiMock, eventID: '1' }); hook = renderTestHook(); }); afterEach(() => { hook.current.clear(); }); it('subscribes to a share by id', async () => { await act(async () => { await hook.current.volumes.startSubscription(VOLUME_ID_1); expect(hook.current.getSubscriptionIds()).toEqual([VOLUME_ID_1]); await hook.current.volumes.startSubscription(VOLUME_ID_2); expect(hook.current.getSubscriptionIds()).toEqual([VOLUME_ID_1, VOLUME_ID_2]); }); }); it('unsubscribes from shares by id', async () => { await act(async () => { await hook.current.volumes.startSubscription(VOLUME_ID_1); await hook.current.volumes.startSubscription(VOLUME_ID_2); hook.current.volumes.unsubscribe(VOLUME_ID_2); expect(hook.current.getSubscriptionIds()).toEqual([VOLUME_ID_1]); }); }); it('registers event handlers', async () => { const handler = jest.fn().mockImplementation(() => {}); await act(async () => { await hook.current.volumes.startSubscription(VOLUME_ID_1); hook.current.eventHandlers.register(handler); await hook.current.pollEvents.volumes([VOLUME_ID_1]); expect(handler).toBeCalledTimes(1); }); }); it('registers multiple event handlers for one share', async () => { const handler = jest.fn().mockImplementation(() => {}); const handler2 = jest.fn().mockImplementation(() => {}); await act(async () => { await hook.current.volumes.startSubscription(VOLUME_ID_1); hook.current.eventHandlers.register(handler); hook.current.eventHandlers.register(handler2); await hook.current.pollEvents.volumes([VOLUME_ID_1]); expect(handler).toBeCalledTimes(1); expect(handler2).toBeCalledTimes(1); }); }); it('registers event handlers for multiple shares', async () => { const handler = jest.fn().mockImplementation(() => {}); await act(async () => { await hook.current.volumes.startSubscription(VOLUME_ID_2); await hook.current.volumes.startSubscription(VOLUME_ID_1); hook.current.eventHandlers.register(handler); await hook.current.pollEvents.volumes(VOLUME_ID_1); await hook.current.pollEvents.volumes(VOLUME_ID_2); expect(handler).toBeCalledTimes(2); }); }); it('removes handlers', async () => { const handler = jest.fn().mockImplementation(() => {}); const handler2 = jest.fn().mockImplementation(() => {}); await act(async () => { await hook.current.volumes.startSubscription(VOLUME_ID_1); hook.current.eventHandlers.register(handler); const handlerId = hook.current.eventHandlers.register(handler2); hook.current.eventHandlers.unregister(handlerId); await hook.current.pollEvents.volumes(VOLUME_ID_1); expect(handler).toBeCalledTimes(1); expect(handler2).toBeCalledTimes(0); }); }); it('polls event', async () => { await act(async () => { await hook.current.volumes.startSubscription(VOLUME_ID_1); await hook.current.pollEvents.volumes(VOLUME_ID_1); expect(apiMock).toBeCalledTimes(2); // fetching events + poll itself }); }); it("can poll events for all shares it's subscribed to", async () => { const handler = jest.fn().mockImplementation(() => {}); await act(async () => { await hook.current.volumes.startSubscription(VOLUME_ID_1); await hook.current.volumes.startSubscription(VOLUME_ID_2); hook.current.eventHandlers.register(handler); await hook.current.pollEvents.driveEvents(); expect(handler).toBeCalledTimes(2); expect(handler).toBeCalledWith(VOLUME_ID_1, driveEventsResultToDriveEvents(EVENT_PAYLOAD)); expect(handler).toBeCalledWith(VOLUME_ID_2, driveEventsResultToDriveEvents(EVENT_PAYLOAD)); }); }); });
3,070
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_events/useDriveEventManager.tsx
import { ReactNode, createContext, useContext, useRef } from 'react'; import { generateUID, useApi, useEventManager } from '@proton/components'; import { queryLatestVolumeEvent, queryVolumeEvents } from '@proton/shared/lib/api/drive/volume'; import createEventManager, { EventManager } from '@proton/shared/lib/eventManager/eventManager'; import { captureMessage } from '@proton/shared/lib/helpers/sentry'; import { Api } from '@proton/shared/lib/interfaces'; import { DriveEventsResult } from '@proton/shared/lib/interfaces/drive/events'; import { logError } from '../../utils/errorHandling'; import { driveEventsResultToDriveEvents } from '../_api'; import { EventHandler } from './interface'; const DRIVE_EVENT_HANDLER_ID_PREFIX = 'drive-event-handler'; const DRIVE_EVENT_MANAGER_FUNCTIONS_STUB = { getSubscriptionIds: () => [], clear: () => undefined, eventHandlers: { register: () => 'id', unregister: () => false, }, volumes: { startSubscription: () => { throw Error('Usage of uninitialized DriveEventManager!'); }, pauseSubscription: () => {}, unsubscribe: () => {}, }, pollEvents: { volumes: () => Promise.resolve(), driveEvents: () => Promise.resolve(), }, }; export function useDriveEventManagerProvider(api: Api, generalEventManager: EventManager) { const eventHandlers = useRef(new Map<string, EventHandler>()); const eventManagers = useRef(new Map<string, EventManager>()); const genericHandler = (volumeId: string, driveEvents: DriveEventsResult) => { if (!driveEvents.Events?.length) { return; } const handlerPromises: unknown[] = []; eventHandlers.current.forEach((handler) => { handlerPromises.push(handler(volumeId, driveEventsResultToDriveEvents(driveEvents))); }); /* forcing .poll function's returned Promise to be resolved *after* event processin is finished */ return Promise.all(handlerPromises); }; const createVolumeEventManager = async (volumeId: string) => { const { EventID } = await api<{ EventID: string }>(queryLatestVolumeEvent(volumeId)); const eventManager = createEventManager({ api, eventID: EventID, query: (eventId: string) => queryVolumeEvents(volumeId, eventId), }); eventManagers.current.set(volumeId, eventManager); return eventManager; }; /** * Creates event manager for a specified volume and starts interval polling of event. */ const subscribeToVolume = async (volumeId: string) => { const eventManager = await createVolumeEventManager(volumeId); eventManager.subscribe((payload: DriveEventsResult) => genericHandler(volumeId, payload)); eventManagers.current.set(volumeId, eventManager); }; /** * Creates an event manager for a specified volume if doesn't exist, * and starts event polling */ const startVolumeSubscription = async (volumeId: string) => { if (!eventManagers.current.get(volumeId)) { await subscribeToVolume(volumeId); } eventManagers.current.get(volumeId)!.start(); }; /** * Pauses event polling for the volume. Returns false if there's no event manager * associated with the volumeId */ const pauseVolumeSubscription = (volumeId: string): boolean => { const volumeSubscription = eventManagers.current.get(volumeId); if (volumeSubscription) { volumeSubscription.stop(); return true; } return false; }; /** * Stops event listening, empties handlers and clears reference to the event manager */ const unsubscribeFromVolume = (volumeId: string): boolean => { eventManagers.current.get(volumeId)?.reset(); return eventManagers.current.delete(volumeId); }; /** * Polls drive events for a volume * @private */ const pollVolume = async (volumeId: string): Promise<void> => { const eventManager = eventManagers.current.get(volumeId); if (!eventManager) { captureMessage('Trying to call non-existing event manager'); return; } await eventManager.call().catch(logError); }; /** * Polls events for specified list of volumes */ const pollVolumeEvents = async ( volumeIds: string | string[], params: { includeCommon: boolean } = { includeCommon: false } ) => { const volumeIdsArray = Array.isArray(volumeIds) ? volumeIds : [volumeIds]; const pollingTasks = []; if (params.includeCommon) { pollingTasks.push(generalEventManager.call()); } pollingTasks.push(...volumeIdsArray.map((volumeId) => pollVolume(volumeId))); await Promise.all(pollingTasks).catch(logError); }; /** * Polls drive events for all subscribed volumes */ const pollDriveEvents = async (params: { includeCommon: boolean } = { includeCommon: false }): Promise<void> => { const pollingPromises: Promise<unknown>[] = []; if (params.includeCommon) { pollingPromises.push(generalEventManager.call()); } eventManagers.current.forEach((eventManager) => { pollingPromises.push(eventManager.call()); }); await Promise.all(pollingPromises).catch(logError); }; /** * Registers passed event handler to process currenlty active share subscriptions by specific id */ const registerEventHandlerById = (id: string, callback: EventHandler): string => { eventHandlers.current.set(id, callback); return id; }; /** * Registers passed event handler to process currenlty active share subscriptions */ const registerEventHandler = (callback: EventHandler): string => { const callbackUID = generateUID(DRIVE_EVENT_HANDLER_ID_PREFIX); return registerEventHandlerById(callbackUID, callback); }; /** * Removes event handler */ const unregisterEventHandler = (callbackId: string): boolean => { return eventHandlers.current.delete(callbackId); }; /** * List share ids which event manager subscribed to */ const getSubscriptionIds = (): string[] => { return Array.from(eventManagers.current.keys()); }; /** * Cancels all ongoing requests, clears timeout and references to all listeners * event managers and handlers */ const clear = () => { // clear timeouts and listeners eventManagers.current.forEach((_, key) => { unsubscribeFromVolume(key); }); // clear references to event managers eventManagers.current.clear(); // clear event handlers eventHandlers.current.clear(); }; return { getSubscriptionIds, clear, volumes: { startSubscription: startVolumeSubscription, pauseSubscription: pauseVolumeSubscription, unsubscribe: unsubscribeFromVolume, }, eventHandlers: { register: registerEventHandler, unregister: unregisterEventHandler, }, pollEvents: { volumes: pollVolumeEvents, driveEvents: pollDriveEvents, }, }; } const DriveEventManagerContext = createContext<ReturnType<typeof useDriveEventManagerProvider> | null>(null); export function DriveEventManagerProvider({ children }: { children: React.ReactNode }) { const api = useApi(); const generalEventManager = useEventManager(); const driveEventManager = useDriveEventManagerProvider(api, generalEventManager); return <DriveEventManagerContext.Provider value={driveEventManager}>{children}</DriveEventManagerContext.Provider>; } export const useDriveEventManager = () => { const state = useContext(DriveEventManagerContext); if (!state) { // DriveEventManager might be uninitialized in some cases. // For example, public shares do not have this implemented yet. // Better would be to not have event manager as required automatic // dependency, but that requires bigger changes. In the end, this // situation is just because of how React hooks work. One day, once // this all is shifted to worker instead, we can make it nicer. return DRIVE_EVENT_MANAGER_FUNCTIONS_STUB; } return state; }; export type DriveEventManagerProviderProps = { api: Api; generalEventManager: EventManager; children: ReactNode; };
3,071
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/extendedAttributes.test.ts
import { FILE_CHUNK_SIZE } from '@proton/shared/lib/drive/constants'; import { mockGlobalFile, testFile } from '../../utils/test/file'; import { ExtendedAttributes, ParsedExtendedAttributes, XAttrCreateParams, createFileExtendedAttributes, createFolderExtendedAttributes, parseExtendedAttributes, } from './extendedAttributes'; const emptyExtendedAttributes: ParsedExtendedAttributes = { Common: { ModificationTime: undefined, Size: undefined, BlockSizes: undefined, }, }; describe('extended attrbiutes', () => { beforeAll(() => { jest.spyOn(global.console, 'warn').mockReturnValue(); }); beforeEach(() => { mockGlobalFile(); }); it('creates the struct from the folder', () => { const testCases: [Date, object][] = [ [ new Date(1234567890000), { Common: { ModificationTime: '2009-02-13T23:31:30.000Z', }, }, ], [new Date('2022-22-22'), {}], ]; testCases.forEach(([input, expectedAttributes]) => { const xattrs = createFolderExtendedAttributes(input); expect(xattrs).toMatchObject(expectedAttributes); }); }); it('creates the struct from the file', () => { const testCases: [XAttrCreateParams, ExtendedAttributes][] = [ [ { file: testFile('testfile.txt', 123), }, { Common: { ModificationTime: '2009-02-13T23:31:30.000Z', Size: 123, BlockSizes: [123], }, }, ], [ { file: testFile('testfile.txt', 123), digests: { sha1: 'abcdef' }, }, { Common: { ModificationTime: '2009-02-13T23:31:30.000Z', Size: 123, BlockSizes: [123], Digests: { SHA1: 'abcdef' }, }, }, ], [ { file: testFile('testfile.txt', FILE_CHUNK_SIZE * 2 + 123), media: { width: 100, height: 200 }, }, { Common: { ModificationTime: '2009-02-13T23:31:30.000Z', Size: FILE_CHUNK_SIZE * 2 + 123, BlockSizes: [FILE_CHUNK_SIZE, FILE_CHUNK_SIZE, 123], }, Media: { Width: 100, Height: 200, }, }, ], ]; testCases.forEach(([input, expectedAttributes]) => { const xattrs = createFileExtendedAttributes(input); expect(xattrs).toMatchObject(expectedAttributes); }); }); it('parses the struct', () => { const testCases: [string, ParsedExtendedAttributes][] = [ ['', emptyExtendedAttributes], ['{}', emptyExtendedAttributes], ['a', emptyExtendedAttributes], [ '{"Common": {"ModificationTime": "2009-02-13T23:31:30+0000"}}', { Common: { ModificationTime: 1234567890, Size: undefined, BlockSizes: undefined, }, }, ], [ '{"Common": {"Size": 123}}', { Common: { ModificationTime: undefined, Size: 123, BlockSizes: undefined, }, }, ], [ '{"Common": {"ModificationTime": "2009-02-13T23:31:30+0000", "Size": 123, "BlockSizes": [1, 2, 3]}}', { Common: { ModificationTime: 1234567890, Size: 123, BlockSizes: [1, 2, 3], }, }, ], [ '{"Common": {"ModificationTime": "aa", "Size": 123}}', { Common: { ModificationTime: undefined, Size: 123, BlockSizes: undefined, }, }, ], [ '{"Common": {"ModificationTime": "2009-02-13T23:31:30+0000", "Size": "aaa"}}', { Common: { ModificationTime: 1234567890, Size: undefined, BlockSizes: undefined, }, }, ], [ '{"Common": {"ModificationTime": "2009-02-13T23:31:30+0000", "BlockSizes": "aaa"}}', { Common: { ModificationTime: 1234567890, Size: undefined, BlockSizes: undefined, }, }, ], [ '{"Common": {}, "Media": {}}', { Common: { ModificationTime: undefined, Size: undefined, BlockSizes: undefined, }, }, ], [ '{"Common": {}, "Media": {"Width": "aa", "Height": "aa"}}', { Common: { ModificationTime: undefined, Size: undefined, BlockSizes: undefined, }, }, ], [ '{"Common": {}, "Media": {"Width": 100, "Height": "aa"}}', { Common: { ModificationTime: undefined, Size: undefined, BlockSizes: undefined, }, }, ], [ '{"Common": {}, "Media": {"Width": 100, "Height": 200}}', { Common: { ModificationTime: undefined, Size: undefined, BlockSizes: undefined, }, Media: { Width: 100, Height: 200, }, }, ], [ '{"Common": {"Digests": {}}', { Common: { ModificationTime: undefined, Size: undefined, BlockSizes: undefined, Digests: undefined, }, }, ], [ '{"Common": {"Digests": {"SHA1": null}}}', { Common: { ModificationTime: undefined, Size: undefined, BlockSizes: undefined, Digests: undefined, }, }, ], [ '{"Common": {"Digests": {"SHA1": "abcdef"}}}', { Common: { ModificationTime: undefined, Size: undefined, BlockSizes: undefined, Digests: { SHA1: 'abcdef', }, }, }, ], ]; testCases.forEach(([input, expectedAttributes]) => { const xattrs = parseExtendedAttributes(input); expect(xattrs).toMatchObject(expectedAttributes); }); }); });
3,072
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/extendedAttributes.ts
import { CryptoProxy, PrivateKeyReference, PublicKeyReference, VERIFICATION_STATUS } from '@proton/crypto'; import { FILE_CHUNK_SIZE } from '@proton/shared/lib/drive/constants'; import { decryptSigned } from '@proton/shared/lib/keys/driveKeys'; import { DeepPartial } from '../../utils/type/DeepPartial'; export interface ExtendedAttributes { Common: { ModificationTime?: string; Size?: number; BlockSizes?: number[]; Digests?: { SHA1: string; }; }; Location?: { Latitude: number; Longitude: number; }; Camera?: { CaptureTime?: string; Device?: string; Orientation?: number; SubjectCoordinates?: { Top: number; Left: number; Bottom: number; Right: number; }; }; Media?: { Width: number; Height: number; Duration?: number; }; } export interface ParsedExtendedAttributes { Common: { ModificationTime?: number; Size?: number; BlockSizes?: number[]; Digests?: { SHA1: string; }; }; Location?: { Latitude: number; Longitude: number; }; Camera?: { CaptureTime?: string; Device?: string; Orientation?: number; SubjectCoordinates?: { Top: number; Left: number; Bottom: number; Right: number; }; }; Media?: { Width: number; Height: number; Duration?: number; }; } export type MaybeExtendedAttributes = DeepPartial<ExtendedAttributes>; export async function encryptFolderExtendedAttributes( modificationTime: Date, nodePrivateKey: PrivateKeyReference, addressPrivateKey: PrivateKeyReference ) { const xattr = createFolderExtendedAttributes(modificationTime); return encryptExtendedAttributes(xattr, nodePrivateKey, addressPrivateKey); } export function createFolderExtendedAttributes(modificationTime: Date): ExtendedAttributes { return { Common: { ModificationTime: dateToIsoString(modificationTime), }, }; } export type XAttrCreateParams = { file: File; media?: { width: number; height: number; duration?: number; }; digests?: { sha1: string; }; location?: { latitude: number; longitude: number; }; camera?: { captureTime?: string; device?: string; orientation?: number; subjectCoordinates?: { top: number; left: number; bottom: number; right: number; }; }; }; export async function encryptFileExtendedAttributes( params: XAttrCreateParams, nodePrivateKey: PrivateKeyReference, addressPrivateKey: PrivateKeyReference ) { const xattr = createFileExtendedAttributes(params); return encryptExtendedAttributes(xattr, nodePrivateKey, addressPrivateKey); } export function createFileExtendedAttributes({ file, digests, media, camera, location, }: XAttrCreateParams): ExtendedAttributes { const blockSizes = new Array(Math.floor(file.size / FILE_CHUNK_SIZE)); blockSizes.fill(FILE_CHUNK_SIZE); blockSizes.push(file.size % FILE_CHUNK_SIZE); return { Common: { ModificationTime: dateToIsoString(new Date(file.lastModified)), Size: file.size, BlockSizes: blockSizes, Digests: digests ? { SHA1: digests.sha1, } : undefined, }, Media: media ? { Width: media.width, Height: media.height, Duration: media.duration, } : undefined, Location: location ? { Latitude: location.latitude, Longitude: location.longitude, } : undefined, Camera: camera ? { CaptureTime: camera.captureTime, Device: camera.device, Orientation: camera.orientation, SubjectCoordinates: camera.subjectCoordinates ? { Top: camera.subjectCoordinates.top, Left: camera.subjectCoordinates.left, Bottom: camera.subjectCoordinates.bottom, Right: camera.subjectCoordinates.right, } : undefined, } : undefined, }; } async function encryptExtendedAttributes( xattr: ExtendedAttributes, nodePrivateKey: PrivateKeyReference, addressPrivateKey: PrivateKeyReference ) { try { const xattrString = JSON.stringify(xattr); const { message } = await CryptoProxy.encryptMessage({ textData: xattrString, encryptionKeys: nodePrivateKey, signingKeys: addressPrivateKey, compress: true, }); return message; } catch (e) { throw new Error('Failed to encrypt extended attributes', { cause: { e, addressKeyId: addressPrivateKey.getKeyID(), }, }); } } export async function decryptExtendedAttributes( encryptedXAttr: string, nodePrivateKey: PrivateKeyReference, addressPublicKey: PublicKeyReference | PublicKeyReference[] ): Promise<{ xattrs: ParsedExtendedAttributes; verified: VERIFICATION_STATUS }> { try { const { data: xattrString, verified } = await decryptSigned({ armoredMessage: encryptedXAttr, privateKey: nodePrivateKey, publicKey: addressPublicKey, }); return { xattrs: parseExtendedAttributes(xattrString), verified, }; } catch (e) { throw new Error('Failed to decrypt extended attributes', { cause: { e, addressKeyIds: (Array.isArray(addressPublicKey) ? addressPublicKey : [addressPublicKey]).map((key) => key.getKeyID() ), }, }); } } export function parseExtendedAttributes(xattrString: string): ParsedExtendedAttributes { let xattr: MaybeExtendedAttributes = {}; try { xattr = JSON.parse(xattrString) as MaybeExtendedAttributes; } catch (err) { console.warn(`XAttr "${xattrString}" is not valid JSON`); } return { Common: { ModificationTime: parseModificationTime(xattr), Size: parseSize(xattr), BlockSizes: parseBlockSizes(xattr), Digests: parseDigests(xattr), }, Media: parseMedia(xattr), }; } function parseModificationTime(xattr: MaybeExtendedAttributes): number | undefined { const modificationTime = xattr?.Common?.ModificationTime; if (modificationTime === undefined) { return undefined; } const modificationDate = new Date(modificationTime); // This is the best way to check if date is "Invalid Date". :shrug: if (JSON.stringify(modificationDate) === 'null') { console.warn(`XAttr modification time "${modificationTime}" is not valid`); return undefined; } const modificationTimestamp = Math.trunc(modificationDate.getTime() / 1000); if (Number.isNaN(modificationTimestamp)) { console.warn(`XAttr modification time "${modificationTime}" is not valid`); return undefined; } return modificationTimestamp; } function parseSize(xattr: MaybeExtendedAttributes): number | undefined { const size = xattr?.Common?.Size; if (size === undefined) { return undefined; } if (typeof size !== 'number') { console.warn(`XAttr file size "${size}" is not valid`); return undefined; } return size; } function parseBlockSizes(xattr: MaybeExtendedAttributes): number[] | undefined { const blockSizes = xattr?.Common?.BlockSizes; if (blockSizes === undefined) { return undefined; } if (!Array.isArray(blockSizes)) { console.warn(`XAttr block sizes "${blockSizes}" is not valid`); return undefined; } if (!blockSizes.every((item) => typeof item === 'number')) { console.warn(`XAttr block sizes "${blockSizes}" is not valid`); return undefined; } return blockSizes as number[]; } function parseMedia(xattr: MaybeExtendedAttributes): { Width: number; Height: number; Duration?: number } | undefined { const media = xattr?.Media; if (media === undefined || media.Width === undefined || media.Height === undefined) { return undefined; } const width = media.Width; if (typeof width !== 'number') { console.warn(`XAttr media width "${width}" is not valid`); return undefined; } const height = media.Height; if (typeof height !== 'number') { console.warn(`XAttr media height "${height}" is not valid`); return undefined; } const duration = media.Duration; if (duration !== undefined && typeof duration !== 'number') { console.warn(`XAttr media duration "${duration}" is not valid`); return undefined; } return { Width: width, Height: height, Duration: duration, }; } function parseDigests(xattr: MaybeExtendedAttributes): { SHA1: string } | undefined { const digests = xattr?.Common?.Digests; if (digests === undefined || digests.SHA1 === undefined) { return undefined; } const sha1 = digests.SHA1; if (typeof sha1 !== 'string') { console.warn(`XAttr digest SHA1 "${sha1}" is not valid`); return undefined; } return { SHA1: sha1, }; } function dateToIsoString(date: Date) { const isDateValid = !Number.isNaN(date.getTime()); return isDateValid ? date.toISOString() : undefined; }
3,073
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/index.tsx
import { LinksKeysProvider } from './useLinksKeys'; import { LinksListingProvider, PublicLinksListingProvider } from './useLinksListing'; import { LinksStateProvider } from './useLinksState'; export * from './interface'; export * from './link'; export * from './validation'; export { encryptFileExtendedAttributes, encryptFolderExtendedAttributes } from './extendedAttributes'; export { default as useLink } from './useLink'; export { default as useLinks } from './useLinks'; export { default as useLinkActions } from './useLinkActions'; export { default as useLinksActions } from './useLinksActions'; export { useLinksListing, usePublicLinksListing } from './useLinksListing'; export { useLinksQueue } from './useLinksQueue'; export function LinksProvider({ children }: { children: React.ReactNode }) { return ( <LinksStateProvider> <LinksKeysProvider> <LinksListingProvider>{children}</LinksListingProvider> </LinksKeysProvider> </LinksStateProvider> ); } export function PublicLinksProvider({ children }: { children: React.ReactNode }) { return ( <LinksStateProvider> <LinksKeysProvider> <PublicLinksListingProvider>{children}</PublicLinksListingProvider> </LinksKeysProvider> </LinksStateProvider> ); }
3,074
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/interface.ts
import { VERIFICATION_STATUS } from '@proton/crypto'; import type { Photo } from '../_photos'; /** * Link should not be used directly. It is general set of attributes * commont for both EncryptedLink and DecryptedLink. */ interface Link { linkId: string; parentLinkId: string; isFile: boolean; name: string; mimeType: string; hash: string; size: number; createTime: number; // metaDataModifyTime represents time when the meta data of the link were // modified on the server, such as renaming the link, moving to different // folder and so on. Note that renaming is not cousing the change of modify // time in regular file system. The "real" modify is encrypted in XAttr // which is then available in fileModifyTime of DecryptedLink. metaDataModifyTime: number; trashed: number | null; // trashedByParent is set only by internal state in case when parent is // trashed which needs to trash also all children as well. // Child items need to be trashed so they do not pop up anywhere, for // example on shared links page, but at the same time childs of trashed // folders should not be listed in trash section to match API behaviour. // Note there is also other solution: simply delete childs of trashed // folder from the cache, as they should not be needed at all. That is // correct, but restoring it quickly back (in case of a mistake) would // lead to re-download the whole cache again, and there would be need // to re-fetch shared links. So better to keep it around. trashedByParent?: boolean; hasThumbnail: boolean; isShared: boolean; // Note that shareId is ID of the share, that is pointer of what is shared // with someone else. Link can be part of many shares; for example part of // user's default share and of shared folder with someone else. // Don't use this ID on places where the top/default share should be used. // The current share context needs to be always passed explicitely, never // used from the link itself. shareId?: string; // Links associated with Shared URLs don't have share id rootShareId: string; shareUrl?: LinkShareUrl; activeRevision?: { id: string; size: number; // Address used for signature checks of blocks and xattributes. signatureAddress: string; // Thumbnail URL is not part of all requests, because that would be // too heavy for API. For example, events do not include it. thumbnail?: { bareUrl: string; token: string; }; photo?: Photo; }; signatureAddress?: string; // Addresss used for key signatures. nameSignatureAddress?: string; // Address used for name signature. // If there is no issue, the value should be undefined. signatureIssues?: SignatureIssues; } export interface LinkShareUrl { id: string; token: string; isExpired: boolean; createTime: number; expireTime: number | null; // numAccesses is not part of API requests, because that would be too // heavy for API. This number needs to be loaded explicitely with route // to get info about share URL. numAccesses?: number; } export type SignatureIssues = { // Key represents where the issue originated, e.g., passphrase, hash, name // xattributes, block, and so on. [day in SignatureIssueLocation]?: VERIFICATION_STATUS; }; export type SignatureIssueLocation = | 'passphrase' | 'hash' | 'name' | 'xattrs' | 'contentKeyPacket' | 'blocks' | 'thumbnail' | 'manifest'; export interface EncryptedLink extends Link { nodeKey: string; nodePassphrase: string; nodePassphraseSignature?: string; nodeHashKey?: string; contentKeyPacket?: string; contentKeyPacketSignature?: string; xAttr: string; } export interface DecryptedLink extends Link { // name in DecryptedLink is the decrypted part, but we need to keep also // encryptedName for renaming procedure (to generate new sessionKey). encryptedName: string; // See metaDataModifyTime of Link. fileModifyTime: number; // isLocked is set to true when file is being manipulated, such as moved // to different location. When link is locked, it should not be allowed // to do anything else with the link (until the operation is done). isLocked?: boolean; // isStale is indicating whether link needs to be re-decrypted due to // server-side update. By default, we don't want to automatically decrypt // everything, also, we don't want to simply remove stale link from cache // to not cause GUI blinks. App should re-decrypt link on background next // time link should be displayed. isStale?: boolean; // cachedThumbnailUrl is computed URL to cached image. This is not part // of any request and not filled automatically. To get this value, use // `loadLinkThumbnail` from `useDrive`. cachedThumbnailUrl?: string; originalSize?: number; // In case of image it might contain dimensions stored in XAttributes. originalDimensions?: { width: number; height: number; }; // Digests stored in XAttributes digests?: { sha1: string; }; duration?: number; // corruptedLink is set when a link failed to be decrypted. // In this case we still want to show it to the user so he can delete it. corruptedLink?: boolean; }
3,075
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/link.test.ts
import { adjustName, splitLinkName } from './link'; describe('adjustName', () => { it('should add index to a file with extension', () => { expect(adjustName(3, 'filename', 'ext')).toBe('filename (3).ext'); }); it('should add index to a file without extension', () => { expect(adjustName(3, 'filename')).toBe('filename (3)'); expect(adjustName(3, 'filename', '')).toBe('filename (3)'); expect(adjustName(3, 'filename.')).toBe('filename. (3)'); expect(adjustName(3, '.filename.')).toBe('.filename. (3)'); }); it('should add index to a file without name', () => { expect(adjustName(3, '', 'ext')).toBe('.ext (3)'); }); it('should leave zero-index filename with extension unchanged', () => { expect(adjustName(0, 'filename', 'ext')).toBe('filename.ext'); }); it('should leave zero-index filename without extension unchanged', () => { expect(adjustName(0, 'filename')).toBe('filename'); expect(adjustName(0, 'filename', '')).toBe('filename'); expect(adjustName(0, 'filename.')).toBe('filename.'); expect(adjustName(0, '.filename.')).toBe('.filename.'); }); it('should leave zero-index filename without name unchanged', () => { expect(adjustName(0, '', 'ext')).toBe('.ext'); }); }); describe('splitLinkName', () => { it('should split file name and extension', () => { expect(splitLinkName('filename.ext')).toEqual(['filename', 'ext']); }); it('should split file name without extension', () => { expect(splitLinkName('filename')).toEqual(['filename', '']); expect(splitLinkName('filename.')).toEqual(['filename.', '']); }); it('should split file name without name', () => { expect(splitLinkName('.ext')).toEqual(['', 'ext']); }); });
3,076
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/link.ts
import { splitExtension } from '@proton/shared/lib/helpers/file'; import isTruthy from '@proton/utils/isTruthy'; import { EncryptedLink } from './interface'; export const WINDOWS_FORBIDDEN_CHARACTERS = /[<>:"|?*]/; // eslint-disable-next-line no-control-regex export const GLOBAL_FORBIDDEN_CHARACTERS = /\/|\\|[\u0000-\u001F]|[\u2000-\u200F]|[\u202E-\u202F]/; export const WINDOWS_RESERVED_NAMES = [ 'CON', 'PRN', 'AUX', 'NUL', 'COM1', 'COM2', 'COM3', 'COM4', 'COM5', 'COM6', 'COM7', 'COM8', 'COM9', 'LPT1', 'LPT2', 'LPT3', 'LPT4', 'LPT5', 'LPT6', 'LPT7', 'LPT8', 'LPT9', ]; export const formatLinkName = (str: string) => str.trim(); export const splitLinkName = (linkName: string) => { if (linkName.endsWith('.')) { return [linkName, '']; } return splitExtension(linkName); }; export const adjustWindowsLinkName = (fileName: string) => { let adjustedFileName = fileName.replaceAll(RegExp(WINDOWS_FORBIDDEN_CHARACTERS, 'g'), '_'); if (WINDOWS_RESERVED_NAMES.includes(fileName.toUpperCase())) { adjustedFileName = `_${fileName}`; } if (adjustedFileName.endsWith('.')) { adjustedFileName = `${adjustedFileName.slice(0, -1)}_`; } return adjustedFileName; }; export const adjustName = (index: number, namePart: string, extension?: string) => { if (index === 0) { return extension ? `${namePart}.${extension}` : namePart; } if (!namePart) { return [`.${extension}`, `(${index})`].join(' '); } const newNamePart = [namePart, `(${index})`].filter(isTruthy).join(' '); return [newNamePart, extension].filter(isTruthy).join('.'); }; /** * isEncryptedLinkSame returns whether the encrypted content and keys are * the same, so we might clear signature issues and try decryption again, * for example. */ export function isEncryptedLinkSame(original: EncryptedLink, newLink: EncryptedLink): boolean { return ( original.nodeKey === newLink.nodeKey && original.nodePassphrase === newLink.nodePassphrase && original.nodePassphraseSignature === newLink.nodePassphraseSignature && original.nodeHashKey === newLink.nodeHashKey && original.contentKeyPacket === newLink.contentKeyPacket && original.contentKeyPacketSignature === newLink.contentKeyPacketSignature && original.signatureAddress === newLink.signatureAddress && isDecryptedLinkSame(original, newLink) ); } /** * isDecryptedLinkSame returns whether the encrypted content (not keys) is * the same and thus we can say decrypted content is also the same, so we * might skip decryption if we already have decrypted content, for example. */ export function isDecryptedLinkSame(original: EncryptedLink, newLink: EncryptedLink): boolean { return ( original.parentLinkId === newLink.parentLinkId && original.name === newLink.name && original.xAttr === newLink.xAttr ); }
3,077
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLink.test.ts
import { act, renderHook } from '@testing-library/react-hooks'; import { RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import { decryptSigned } from '@proton/shared/lib/keys/driveKeys'; import { decryptPassphrase } from '@proton/shared/lib/keys/drivePassphrase'; import { ShareType } from '../_shares'; import { useLinkInner } from './useLink'; jest.mock('@proton/shared/lib/keys/driveKeys'); jest.mock('@proton/shared/lib/keys/drivePassphrase'); const mockRequest = jest.fn(); jest.mock('../_api/useDebouncedRequest', () => { const useDebouncedRequest = () => { return mockRequest; }; return useDebouncedRequest; }); jest.mock('../_utils/useDebouncedFunction', () => { const useDebouncedFunction = () => { return (wrapper: any) => wrapper(); }; return useDebouncedFunction; }); describe('useLink', () => { const mockFetchLink = jest.fn(); const mockLinksKeys = { getPassphrase: jest.fn(), setPassphrase: jest.fn(), getPassphraseSessionKey: jest.fn(), setPassphraseSessionKey: jest.fn(), getPrivateKey: jest.fn(), setPrivateKey: jest.fn(), getSessionKey: jest.fn(), setSessionKey: jest.fn(), getHashKey: jest.fn(), setHashKey: jest.fn(), }; const mockLinksState = { getLink: jest.fn(), setLinks: jest.fn(), setCachedThumbnail: jest.fn(), }; const mockGetVerificationKey = jest.fn(); const mockGetSharePrivateKey = jest.fn(); const mockGetShare = jest.fn(); const mockDecryptPrivateKey = jest.fn(); const abortSignal = new AbortController().signal; let hook: { current: ReturnType<typeof useLinkInner>; }; beforeAll(() => { // Time relative function can have issue with test environments // To prevent hanging async function we use Timer Mocks from jest // https://jestjs.io/docs/timer-mocks jest.useFakeTimers(); }); beforeEach(() => { jest.resetAllMocks(); global.URL.createObjectURL = jest.fn(() => 'blob:objecturl'); // @ts-ignore decryptSigned.mockImplementation(({ armoredMessage }) => Promise.resolve({ data: `dec:${armoredMessage}`, verified: 1 }) ); // @ts-ignore decryptPassphrase.mockImplementation(({ armoredPassphrase }) => Promise.resolve({ decryptedPassphrase: `decPass:${armoredPassphrase}`, sessionKey: `sessionKey:${armoredPassphrase}`, verified: 1, }) ); mockGetSharePrivateKey.mockImplementation((_, shareId) => `privateKey:${shareId}`); mockDecryptPrivateKey.mockImplementation(({ armoredKey: nodeKey }) => `privateKey:${nodeKey}`); const { result } = renderHook(() => useLinkInner( mockFetchLink, mockLinksKeys, mockLinksState, mockGetVerificationKey, mockGetSharePrivateKey, mockGetShare, mockDecryptPrivateKey ) ); hook = result; }); it('returns decrypted version from the cache', async () => { const item = { name: 'name' }; mockLinksState.getLink.mockReturnValue({ decrypted: item }); await act(async () => { const link = hook.current.getLink(abortSignal, 'shareId', 'linkId'); await expect(link).resolves.toMatchObject(item); }); expect(mockLinksState.getLink).toBeCalledWith('shareId', 'linkId'); expect(mockFetchLink).not.toBeCalled(); }); it('decrypts when missing decrypted version in the cache', async () => { mockLinksState.getLink.mockReturnValue({ encrypted: { linkId: 'linkId', parentLinkId: undefined, name: 'name' }, }); await act(async () => { const link = hook.current.getLink(abortSignal, 'shareId', 'linkId'); await expect(link).resolves.toMatchObject({ linkId: 'linkId', name: 'dec:name', }); }); expect(mockLinksState.getLink).toBeCalledWith('shareId', 'linkId'); expect(mockFetchLink).not.toBeCalled(); }); it('decrypts link with parent link', async () => { const generateLink = (id: string, parentId?: string) => { return { linkId: `${id}`, parentLinkId: parentId, name: `name ${id}`, nodeKey: `nodeKey ${id}`, nodePassphrase: `nodePassphrase ${id}`, }; }; const links: Record<string, ReturnType<typeof generateLink>> = { root: generateLink('root'), parent: generateLink('parent', 'root'), link: generateLink('link', 'parent'), }; mockLinksState.getLink.mockImplementation((_, linkId) => ({ encrypted: links[linkId] })); await act(async () => { const link = hook.current.getLink(abortSignal, 'shareId', 'link'); await expect(link).resolves.toMatchObject({ linkId: 'link', name: 'dec:name link', }); }); expect(mockFetchLink).not.toBeCalled(); expect(mockLinksState.getLink.mock.calls.map(([, linkId]) => linkId)).toMatchObject([ 'link', // Called by getLink. 'link', // Called by getEncryptedLink. 'parent', // Called by getLinkPrivateKey. 'parent', // Called by getLinkPassphraseAndSessionKey. 'root', // Called by getLinkPrivateKey. 'root', // Called by getLinkPassphraseAndSessionKey. ]); // Decrypt passphrases so we can decrypt private keys for the root and the parent. // @ts-ignore expect(decryptPassphrase.mock.calls.map(([{ armoredPassphrase }]) => armoredPassphrase)).toMatchObject([ 'nodePassphrase root', 'nodePassphrase parent', ]); expect(mockDecryptPrivateKey.mock.calls.map(([{ armoredKey: nodeKey }]) => nodeKey)).toMatchObject([ 'nodeKey root', 'nodeKey parent', ]); // With the parent key is decrypted the name of the requested link. expect( // @ts-ignore decryptSigned.mock.calls.map(([{ privateKey, armoredMessage }]) => [privateKey, armoredMessage]) ).toMatchObject([['privateKey:nodeKey parent', 'name link']]); }); describe('root name', () => { const LINK_NAME = 'LINK_NAME'; const tests = [ { type: ShareType.standard, name: `dec:${LINK_NAME}` }, { type: ShareType.default, name: 'My files' }, { type: ShareType.photos, name: 'Photos' }, ]; tests.forEach(({ type, name }) => { it(`detects type ${type} as "${name}"`, async () => { const link = { linkId: `root`, name: LINK_NAME, nodeKey: `nodeKey root`, nodePassphrase: `nodePassphrase root`, }; mockLinksState.getLink.mockImplementation(() => ({ encrypted: link })); mockGetShare.mockImplementation((_, shareId) => ({ shareId, rootLinkId: link.linkId, type, })); await act(async () => { const link = hook.current.getLink(abortSignal, 'shareId', 'root'); await expect(link).resolves.toMatchObject({ linkId: 'root', name, }); }); }); }); }); it('fetches link from API and decrypts when missing in the cache', async () => { mockFetchLink.mockReturnValue(Promise.resolve({ linkId: 'linkId', parentLinkId: undefined, name: 'name' })); await act(async () => { const link = hook.current.getLink(abortSignal, 'shareId', 'linkId'); await expect(link).resolves.toMatchObject({ linkId: 'linkId', name: 'dec:name', }); }); expect(mockLinksState.getLink).toBeCalledWith('shareId', 'linkId'); expect(mockFetchLink).toBeCalledTimes(1); }); it('skips failing fetch if already attempted before', async () => { const err = { data: { Code: RESPONSE_CODE.NOT_FOUND } }; mockFetchLink.mockRejectedValue(err); const link = hook.current.getLink(abortSignal, 'shareId', 'linkId'); await expect(link).rejects.toMatchObject(err); const link2 = hook.current.getLink(abortSignal, 'shareId', 'linkId'); await expect(link2).rejects.toMatchObject(err); const link3 = hook.current.getLink(abortSignal, 'shareId', 'linkId2'); await expect(link3).rejects.toMatchObject(err); expect(mockLinksState.getLink).toBeCalledWith('shareId', 'linkId'); expect(mockFetchLink).toBeCalledTimes(2); // linkId once and linkId2 }); it('skips load of already cached thumbnail', async () => { const downloadCallbackMock = jest.fn(); mockLinksState.getLink.mockReturnValue({ decrypted: { name: 'name', cachedThumbnailUrl: 'url', }, }); await act(async () => { await hook.current.loadLinkThumbnail(abortSignal, 'shareId', 'linkId', downloadCallbackMock); }); expect(mockRequest).not.toBeCalled(); expect(downloadCallbackMock).not.toBeCalled(); expect(mockLinksState.setCachedThumbnail).not.toBeCalled(); }); it('loads link thumbnail using cached link thumbnail info', async () => { const downloadCallbackMock = jest.fn().mockReturnValue( Promise.resolve({ contents: Promise.resolve(undefined), verifiedPromise: Promise.resolve(1), }) ); mockLinksState.getLink.mockReturnValue({ decrypted: { name: 'name', hasThumbnail: true, activeRevision: { thumbnail: { bareUrl: 'bareUrl', token: 'token', }, }, }, }); await act(async () => { await hook.current.loadLinkThumbnail(abortSignal, 'shareId', 'linkId', downloadCallbackMock); }); expect(downloadCallbackMock).toBeCalledWith('bareUrl', 'token'); expect(mockLinksState.setCachedThumbnail).toBeCalledWith('shareId', 'linkId', expect.any(String)); expect(mockRequest).not.toBeCalled(); }); it('loads link thumbnail with expired cached link thumbnail info', async () => { mockRequest.mockReturnValue({ ThumbnailBareURL: 'bareUrl', ThumbnailToken: 'token2', // Requested new non-expired token. }); const downloadCallbackMock = jest.fn().mockImplementation((url: string, token: string) => token === 'token' ? Promise.reject('token expired') : Promise.resolve({ contents: Promise.resolve(undefined), verifiedPromise: Promise.resolve(1), }) ); mockLinksState.getLink.mockReturnValue({ decrypted: { name: 'name', hasThumbnail: true, activeRevision: { thumbnail: { bareUrl: 'bareUrl', token: 'token', // Expired token. }, }, }, }); await act(async () => { await hook.current.loadLinkThumbnail(abortSignal, 'shareId', 'linkId', downloadCallbackMock); }); expect(downloadCallbackMock).toBeCalledWith('bareUrl', 'token'); // First attempted with expired token. expect(mockRequest).toBeCalledTimes(1); // Then requested the new token. expect(downloadCallbackMock).toBeCalledWith('bareUrl', 'token2'); // And the new one used for final download. expect(mockLinksState.setCachedThumbnail).toBeCalledWith('shareId', 'linkId', expect.any(String)); }); it('loads link thumbnail with its url on API', async () => { mockRequest.mockReturnValue({ ThumbnailBareURL: 'bareUrl', ThumbnailToken: 'token', }); const downloadCallbackMock = jest.fn().mockReturnValue( Promise.resolve({ contents: Promise.resolve(undefined), verifiedPromise: Promise.resolve(1), }) ); mockLinksState.getLink.mockReturnValue({ decrypted: { name: 'name', hasThumbnail: true, activeRevision: { id: 'revisionId', }, }, }); await act(async () => { await hook.current.loadLinkThumbnail(abortSignal, 'shareId', 'linkId', downloadCallbackMock); }); expect(mockRequest).toBeCalledTimes(1); expect(downloadCallbackMock).toBeCalledWith('bareUrl', 'token'); expect(mockLinksState.setCachedThumbnail).toBeCalledWith('shareId', 'linkId', expect.any(String)); }); it('decrypts badly signed thumbnail block', async () => { mockLinksState.getLink.mockReturnValue({ encrypted: { linkId: 'link', }, decrypted: { linkId: 'link', name: 'name', hasThumbnail: true, activeRevision: { id: 'revisionId', }, }, }); mockRequest.mockReturnValue({ ThumbnailBareURL: 'bareUrl', ThumbnailToken: 'token', }); const downloadCallbackMock = jest.fn().mockReturnValue( Promise.resolve({ contents: Promise.resolve(undefined), verifiedPromise: Promise.resolve(2), }) ); await act(async () => { await hook.current.loadLinkThumbnail(abortSignal, 'shareId', 'link', downloadCallbackMock); }); expect(mockLinksState.setLinks).toBeCalledWith('shareId', [ expect.objectContaining({ encrypted: expect.objectContaining({ linkId: 'link', signatureIssues: { thumbnail: 2 }, }), }), ]); }); describe('decrypts link meta data with signature issues', () => { beforeEach(() => { const generateLink = (id: string, parentId?: string) => { return { linkId: `${id}`, parentLinkId: parentId, name: `name ${id}`, nodeKey: `nodeKey ${id}`, nodeHashKey: `nodeHashKey ${id}`, nodePassphrase: `nodePassphrase ${id}`, }; }; const links: Record<string, ReturnType<typeof generateLink>> = { root: generateLink('root'), parent: generateLink('parent', 'root'), link: generateLink('link', 'parent'), }; mockLinksState.getLink.mockImplementation((_, linkId) => ({ encrypted: links[linkId] })); }); it('decrypts badly signed passphrase', async () => { // @ts-ignore decryptPassphrase.mockReset(); // @ts-ignore decryptPassphrase.mockImplementation(({ armoredPassphrase }) => Promise.resolve({ decryptedPassphrase: `decPass:${armoredPassphrase}`, sessionKey: `sessionKey:${armoredPassphrase}`, verified: 2, }) ); await act(async () => { await hook.current.getLink(abortSignal, 'shareId', 'link'); }); ['root', 'parent'].forEach((linkId) => { expect(mockLinksState.setLinks).toBeCalledWith('shareId', [ expect.objectContaining({ encrypted: expect.objectContaining({ linkId, signatureIssues: { passphrase: 2 }, }), }), ]); }); }); it('decrypts badly signed hash', async () => { // @ts-ignore decryptSigned.mockReset(); // @ts-ignore decryptSigned.mockImplementation(({ armoredMessage }) => Promise.resolve({ data: `dec:${armoredMessage}`, verified: 2 }) ); mockGetVerificationKey.mockReturnValue([]); await act(async () => { await hook.current.getLinkHashKey(abortSignal, 'shareId', 'parent'); }); expect(mockLinksState.setLinks).toBeCalledWith('shareId', [ expect.objectContaining({ encrypted: expect.objectContaining({ linkId: 'parent', signatureIssues: { hash: 2 }, }), }), ]); }); it('decrypts badly signed name', async () => { // @ts-ignore decryptSigned.mockReset(); // @ts-ignore decryptSigned.mockImplementation(({ armoredMessage }) => Promise.resolve({ data: `dec:${armoredMessage}`, verified: 2 }) ); await act(async () => { await hook.current.getLink(abortSignal, 'shareId', 'link'); }); expect(mockLinksState.setLinks).toBeCalledWith('shareId', [ expect.objectContaining({ decrypted: expect.objectContaining({ linkId: 'link', signatureIssues: { name: 2 }, }), }), ]); }); }); });
3,078
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLink.ts
import { useRef } from 'react'; import { fromUnixTime, isAfter } from 'date-fns'; import { c } from 'ttag'; import { CryptoProxy, PrivateKeyReference, SessionKey, VERIFICATION_STATUS } from '@proton/crypto'; import { queryFileRevision, queryFileRevisionThumbnail } from '@proton/shared/lib/api/drive/files'; import { queryGetLink } from '@proton/shared/lib/api/drive/link'; import { RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import { base64StringToUint8Array } from '@proton/shared/lib/helpers/encoding'; import { DriveFileRevisionResult, DriveFileRevisionThumbnailResult } from '@proton/shared/lib/interfaces/drive/file'; import { LinkMetaResult } from '@proton/shared/lib/interfaces/drive/link'; import { decryptSigned } from '@proton/shared/lib/keys/driveKeys'; import { decryptPassphrase, getDecryptedSessionKey } from '@proton/shared/lib/keys/drivePassphrase'; import { linkMetaToEncryptedLink, revisionPayloadToRevision, useDebouncedRequest } from '../_api'; import { useDriveCrypto } from '../_crypto'; import { ShareType, useShare } from '../_shares'; import { useDebouncedFunction } from '../_utils'; import { decryptExtendedAttributes } from './extendedAttributes'; import { DecryptedLink, EncryptedLink, SignatureIssueLocation, SignatureIssues } from './interface'; import { isDecryptedLinkSame } from './link'; import useLinksKeys from './useLinksKeys'; import useLinksState from './useLinksState'; // Interval should not be too low to not cause spikes on the server but at the // same time not too high to not overflow available memory on the device. const FAILING_FETCH_BACKOFF_MS = 10 * 60 * 1000; // 10 minutes. const generateCorruptDecryptedLink = (encryptedLink: EncryptedLink, name: string): DecryptedLink => ({ encryptedName: encryptedLink.name, name, linkId: encryptedLink.linkId, createTime: encryptedLink.createTime, corruptedLink: true, activeRevision: encryptedLink.activeRevision, digests: { sha1: '' }, hash: encryptedLink.hash, size: encryptedLink.size, originalSize: encryptedLink.size, fileModifyTime: encryptedLink.metaDataModifyTime, metaDataModifyTime: encryptedLink.metaDataModifyTime, isFile: encryptedLink.isFile, mimeType: encryptedLink.mimeType, hasThumbnail: encryptedLink.hasThumbnail, isShared: encryptedLink.isShared, parentLinkId: encryptedLink.parentLinkId, rootShareId: encryptedLink.rootShareId, signatureIssues: encryptedLink.signatureIssues, originalDimensions: { height: 0, width: 0, }, trashed: encryptedLink.trashed, }); export default function useLink() { const linksKeys = useLinksKeys(); const linksState = useLinksState(); const { getVerificationKey } = useDriveCrypto(); const { getSharePrivateKey, getShare } = useShare(); const debouncedRequest = useDebouncedRequest(); const fetchLink = async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<EncryptedLink> => { const { Link } = await debouncedRequest<LinkMetaResult>( { ...queryGetLink(shareId, linkId), // Ignore HTTP errors (e.g. "Not Found", "Unprocessable Entity" // etc). Not every `fetchLink` call relates to a user action // (it might be a helper function for a background job). Hence, // there are potential cases when displaying such messages will // confuse the user. Every higher-level caller should handle it // based on the context. silence: true, }, abortSignal ); return linkMetaToEncryptedLink(Link, shareId); }; return useLinkInner( fetchLink, linksKeys, linksState, getVerificationKey, getSharePrivateKey, getShare, CryptoProxy.importPrivateKey ); } export function useLinkInner( fetchLink: (abortSignal: AbortSignal, shareId: string, linkId: string) => Promise<EncryptedLink>, linksKeys: Pick< ReturnType<typeof useLinksKeys>, | 'getPassphrase' | 'setPassphrase' | 'getPassphraseSessionKey' | 'setPassphraseSessionKey' | 'getPrivateKey' | 'setPrivateKey' | 'getSessionKey' | 'setSessionKey' | 'getHashKey' | 'setHashKey' >, linksState: Pick<ReturnType<typeof useLinksState>, 'getLink' | 'setLinks' | 'setCachedThumbnail'>, getVerificationKey: ReturnType<typeof useDriveCrypto>['getVerificationKey'], getSharePrivateKey: ReturnType<typeof useShare>['getSharePrivateKey'], getShare: ReturnType<typeof useShare>['getShare'], importPrivateKey: typeof CryptoProxy.importPrivateKey // passed as arg for easier mocking when testing ) { const debouncedFunction = useDebouncedFunction(); const debouncedRequest = useDebouncedRequest(); // Cache certain API errors in order to avoid sending multiple requests to // the same failing link. For example, trying to fetch the same missing // parent link for multiple descendants (when processing already outdated // events). const linkFetchErrors = useRef<{ [key: string]: any }>({}); const fetchLinkDONOTUSE = fetchLink; fetchLink = async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<EncryptedLink> => { const err = linkFetchErrors.current[shareId + linkId]; if (err) { throw err; } return fetchLinkDONOTUSE(abortSignal, shareId, linkId).catch((err) => { if ( [RESPONSE_CODE.NOT_FOUND, RESPONSE_CODE.NOT_ALLOWED, RESPONSE_CODE.INVALID_ID].includes(err?.data?.Code) ) { linkFetchErrors.current[shareId + linkId] = err; setTimeout(() => { delete linkFetchErrors.current[shareId + linkId]; }, FAILING_FETCH_BACKOFF_MS); } throw err; }); }; const handleSignatureCheck = ( shareId: string, encryptedLink: EncryptedLink, location: SignatureIssueLocation, verified: VERIFICATION_STATUS ) => { if (verified !== VERIFICATION_STATUS.SIGNED_AND_VALID) { const signatureIssues: SignatureIssues = {}; signatureIssues[location] = verified; linksState.setLinks(shareId, [ { encrypted: { ...encryptedLink, signatureIssues, }, }, ]); } }; /** * debouncedFunctionDecorator wraps original callback with debouncedFunction * to ensure that if even two or more calls with the same parameters are * executed only once. E.g., to not decrypt the same link keys twice. */ const debouncedFunctionDecorator = <T>( cacheKey: string, callback: (abortSignal: AbortSignal, shareId: string, linkId: string) => Promise<T> ): ((abortSignal: AbortSignal, shareId: string, linkId: string) => Promise<T>) => { const wrapper = async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<T> => { return debouncedFunction( async (abortSignal: AbortSignal) => { return callback(abortSignal, shareId, linkId); }, [cacheKey, shareId, linkId], abortSignal ); }; return wrapper; }; const getEncryptedLink = debouncedFunctionDecorator( 'getEncryptedLink', async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<EncryptedLink> => { const cachedLink = linksState.getLink(shareId, linkId); if (cachedLink) { return cachedLink.encrypted; } const link = await fetchLink(abortSignal, shareId, linkId); linksState.setLinks(shareId, [{ encrypted: link }]); return link; } ); /** * getLinkPassphraseAndSessionKey returns the passphrase with session key * used for locking the private key. */ const getLinkPassphraseAndSessionKey = debouncedFunctionDecorator( 'getLinkPassphraseAndSessionKey', async ( abortSignal: AbortSignal, shareId: string, linkId: string ): Promise<{ passphrase: string; passphraseSessionKey: SessionKey }> => { const passphrase = linksKeys.getPassphrase(shareId, linkId); const sessionKey = linksKeys.getPassphraseSessionKey(shareId, linkId); if (passphrase && sessionKey) { return { passphrase, passphraseSessionKey: sessionKey }; } const encryptedLink = await getEncryptedLink(abortSignal, shareId, linkId); const parentPrivateKeyPromise = encryptedLink.parentLinkId ? // eslint-disable-next-line @typescript-eslint/no-use-before-define getLinkPrivateKey(abortSignal, shareId, encryptedLink.parentLinkId) : getSharePrivateKey(abortSignal, shareId); const [parentPrivateKey, addressPublicKey] = await Promise.all([ parentPrivateKeyPromise, getVerificationKey(encryptedLink.signatureAddress), ]); try { const { decryptedPassphrase, sessionKey: passphraseSessionKey, verified, } = await decryptPassphrase({ armoredPassphrase: encryptedLink.nodePassphrase, armoredSignature: encryptedLink.nodePassphraseSignature, privateKeys: [parentPrivateKey], publicKeys: addressPublicKey, validateSignature: false, }); handleSignatureCheck(shareId, encryptedLink, 'passphrase', verified); linksKeys.setPassphrase(shareId, linkId, decryptedPassphrase); linksKeys.setPassphraseSessionKey(shareId, linkId, passphraseSessionKey); return { passphrase: decryptedPassphrase, passphraseSessionKey, }; } catch (e) { throw new Error('Failed to decrypt link passphrase', { cause: { e, shareId, linkId, }, }); } } ); /** * getLinkPrivateKey returns the private key used for link meta data encryption. */ const getLinkPrivateKey = debouncedFunctionDecorator( 'getLinkPrivateKey', async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<PrivateKeyReference> => { let privateKey = linksKeys.getPrivateKey(shareId, linkId); if (privateKey) { return privateKey; } const encryptedLink = await getEncryptedLink(abortSignal, shareId, linkId); const { passphrase } = await getLinkPassphraseAndSessionKey(abortSignal, shareId, linkId); try { privateKey = await importPrivateKey({ armoredKey: encryptedLink.nodeKey, passphrase }); } catch (e) { throw new Error('Failed to import link private key', { cause: { e, shareId, linkId, }, }); } linksKeys.setPrivateKey(shareId, linkId, privateKey); return privateKey; } ); /** * getLinkSessionKey returns the session key used for block encryption. */ const getLinkSessionKey = debouncedFunctionDecorator( 'getLinkSessionKey', async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<SessionKey> => { let sessionKey = linksKeys.getSessionKey(shareId, linkId); if (sessionKey) { return sessionKey; } const encryptedLink = await getEncryptedLink(abortSignal, shareId, linkId); if (!encryptedLink.contentKeyPacket) { // This is dev error, should not happen in the wild. throw new Error('Content key is available only in file context'); } const privateKey = await getLinkPrivateKey(abortSignal, shareId, linkId); const blockKeys = base64StringToUint8Array(encryptedLink.contentKeyPacket); try { sessionKey = await getDecryptedSessionKey({ data: blockKeys, privateKeys: privateKey, }); } catch (e) { throw new Error('Failed to decrypt link session key', { cause: { e, shareId, linkId, }, }); } if (encryptedLink.contentKeyPacketSignature) { const publicKeys = [privateKey, ...(await getVerificationKey(encryptedLink.signatureAddress))]; const { verified } = await CryptoProxy.verifyMessage({ binaryData: sessionKey.data, verificationKeys: publicKeys, armoredSignature: encryptedLink.contentKeyPacketSignature, }); // iOS signed content key instead of session key in the past. // Therefore we need to check that as well until we migrate // old files. if (verified !== VERIFICATION_STATUS.SIGNED_AND_VALID) { const { verified: blockKeysVerified } = await CryptoProxy.verifyMessage({ binaryData: blockKeys, verificationKeys: publicKeys, armoredSignature: encryptedLink.contentKeyPacketSignature, }); if (blockKeysVerified !== VERIFICATION_STATUS.SIGNED_AND_VALID) { // If even fall back solution does not succeed, report // the original verified status of the session key as // that one is the one we want to verify here. handleSignatureCheck(shareId, encryptedLink, 'contentKeyPacket', verified); } } } linksKeys.setSessionKey(shareId, linkId, sessionKey); return sessionKey; } ); /** * getLinkHashKey returns the hash key used for checking name collisions. */ const getLinkHashKey = debouncedFunctionDecorator( 'getLinkHashKey', async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<Uint8Array> => { let cachedHashKey = linksKeys.getHashKey(shareId, linkId); if (cachedHashKey) { return cachedHashKey; } const encryptedLink = await getEncryptedLink(abortSignal, shareId, linkId); if (!encryptedLink.nodeHashKey) { // This is dev error, should not happen in the wild. throw new Error('Hash key is available only in folder context'); } const [privateKey, addressPrivateKey] = await Promise.all([ getLinkPrivateKey(abortSignal, shareId, linkId), getVerificationKey(encryptedLink.signatureAddress), ]); // In the past we had misunderstanding what key is used to sign // hash key. Originally it meant to be node key, which web used // for all links besides the root one, where address key was // used instead. Similarly, iOS or Android used address key for // all links. Latest versions should use node key in all cases // but we accept also address key. Its still signed with valid // key. In future we might re-sign bad links so we can get rid // of this. const publicKey = [privateKey, ...addressPrivateKey]; try { const { data: hashKey, verified } = await decryptSigned({ armoredMessage: encryptedLink.nodeHashKey, privateKey, publicKey, format: 'binary', }); if ( verified === VERIFICATION_STATUS.SIGNED_AND_INVALID || // The hash was not signed until Beta 17 (DRVWEB-1219). (verified === VERIFICATION_STATUS.NOT_SIGNED && isAfter(fromUnixTime(encryptedLink.createTime), new Date(2021, 7, 1))) ) { handleSignatureCheck(shareId, encryptedLink, 'hash', verified); } linksKeys.setHashKey(shareId, linkId, hashKey); return hashKey; } catch (e) { throw new Error('Failed to decrypt link hash key', { cause: { e, shareId, linkId, }, }); } } ); const getLinkRevision = async ( abortSignal: AbortSignal, { shareId, linkId, revisionId }: { shareId: string; linkId: string; revisionId: string } ) => { const { Revision } = await debouncedRequest<DriveFileRevisionResult>( queryFileRevision(shareId, linkId, revisionId), abortSignal ); return revisionPayloadToRevision(Revision); }; /** * decryptLink decrypts provided `encryptedLink`. The result is not stored * anywhere, only returned back. */ const decryptLink = async ( abortSignal: AbortSignal, shareId: string, encryptedLink: EncryptedLink, revisionId?: string ): Promise<DecryptedLink> => { return debouncedFunction( async (abortSignal: AbortSignal): Promise<DecryptedLink> => { const namePromise = decryptSigned({ armoredMessage: encryptedLink.name, privateKey: !encryptedLink.parentLinkId ? await getSharePrivateKey(abortSignal, shareId) : await getLinkPrivateKey(abortSignal, shareId, encryptedLink.parentLinkId), // nameSignatureAddress is missing for some old files. // Fallback to signatureAddress might result in failed // signature check, but no one reported it so far so // we should be good. Important is that user can access // the file and the verification do not hard fail. // If we find out that it doesnt work for some user, // we could skip the verification instead. But the best // would be to fix it properly in the database. publicKey: await getVerificationKey( encryptedLink.nameSignatureAddress || encryptedLink.signatureAddress ), }).then(({ data, verified }) => ({ name: data, nameVerified: verified })); const revision = !!revisionId ? await getLinkRevision(abortSignal, { shareId, linkId: encryptedLink.linkId, revisionId }) : undefined; const xattrPromise = !encryptedLink.xAttr ? { fileModifyTime: encryptedLink.metaDataModifyTime, fileModifyTimeVerified: VERIFICATION_STATUS.SIGNED_AND_VALID, originalSize: undefined, originalDimensions: undefined, digests: undefined, duration: undefined, } : getLinkPrivateKey(abortSignal, shareId, encryptedLink.linkId) .then(async (privateKey) => decryptExtendedAttributes( encryptedLink.xAttr, privateKey, // Files have signature address on the revision. // Folders have signature address on the link itself. await getVerificationKey( revision?.signatureAddress || encryptedLink.activeRevision?.signatureAddress || encryptedLink.signatureAddress ) ) ) .then(({ xattrs, verified }) => ({ fileModifyTime: xattrs.Common.ModificationTime || encryptedLink.metaDataModifyTime, fileModifyTimeVerified: verified, originalSize: xattrs.Common?.Size, originalDimensions: xattrs.Media ? { width: xattrs.Media.Width, height: xattrs.Media.Height, } : undefined, duration: xattrs.Media?.Duration, digests: xattrs.Common?.Digests ? { sha1: xattrs.Common.Digests.SHA1, } : undefined, })); const [nameResult, xattrResult] = await Promise.allSettled([namePromise, xattrPromise]); if (nameResult.status === 'rejected') { return generateCorruptDecryptedLink(encryptedLink, '�'); } const { nameVerified, name } = nameResult.value; const signatureIssues: SignatureIssues = {}; if ( nameVerified === VERIFICATION_STATUS.SIGNED_AND_INVALID || // The name was not signed until Beta 3 (DRVWEB-673). (nameVerified === VERIFICATION_STATUS.NOT_SIGNED && isAfter(fromUnixTime(encryptedLink.createTime), new Date(2021, 0, 1))) ) { signatureIssues.name = nameVerified; } if (xattrResult.status === 'rejected') { return generateCorruptDecryptedLink(encryptedLink, name); } const { fileModifyTimeVerified, fileModifyTime, originalSize, originalDimensions, digests, duration } = xattrResult.value; if (fileModifyTimeVerified !== VERIFICATION_STATUS.SIGNED_AND_VALID) { signatureIssues.xattrs = fileModifyTimeVerified; } // Share will already be in cache due to getSharePrivateKey above const share = !encryptedLink.parentLinkId ? await getShare(abortSignal, shareId) : undefined; let displayName = name; if (share?.type === ShareType.default) { displayName = c('Title').t`My files`; } else if (share?.type === ShareType.photos) { displayName = c('Title').t`Photos`; } return { ...encryptedLink, encryptedName: encryptedLink.name, name: displayName, fileModifyTime: fileModifyTime, originalSize, originalDimensions, duration, signatureIssues: Object.keys(signatureIssues).length > 0 ? signatureIssues : undefined, digests, }; }, ['decryptLink', shareId, encryptedLink.linkId], abortSignal ); }; /** * getLInk provides decrypted link. If the cached link is available, it is * returned right away. In other cases it might first fetch link from API, * or just decrypt the encrypted cached one. If the decrypted link is stale * (that means new version of encrypted link was fetched but not decrypted * yet), it is first re-decrypted. */ const getLink = debouncedFunctionDecorator( 'getLink', async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<DecryptedLink> => { const cachedLink = linksState.getLink(shareId, linkId); if (cachedLink && cachedLink.decrypted && !cachedLink.decrypted.isStale) { return cachedLink.decrypted; } const encrypted = await getEncryptedLink(abortSignal, shareId, linkId); try { const decrypted = await decryptLink(abortSignal, shareId, encrypted); linksState.setLinks(shareId, [{ encrypted, decrypted }]); return decrypted; } catch (e) { throw new Error('Failed to decrypt link', { cause: { e, shareId, linkId, }, }); } } ); /** * loadFreshLink always fetches the fresh link meta data from API, but * the decryption is done only when its needed. Anyway, this should be * used only when really needed, for example, if we need to make sure if * the link doesn't have any shared link already before creating new one. */ const loadFreshLink = async (abortSignal: AbortSignal, shareId: string, linkId: string): Promise<DecryptedLink> => { const cachedLink = linksState.getLink(shareId, linkId); const encryptedLink = await fetchLink(abortSignal, shareId, linkId); try { const decryptedLink = cachedLink && isDecryptedLinkSame(cachedLink.encrypted, encryptedLink) ? undefined : await decryptLink(abortSignal, shareId, encryptedLink); linksState.setLinks(shareId, [{ encrypted: encryptedLink, decrypted: decryptedLink }]); return linksState.getLink(shareId, linkId)?.decrypted as DecryptedLink; } catch (e) { throw new Error('Failed to decrypt link', { cause: { e, shareId, linkId, }, }); } }; /** * loadLinkThumbnail gets thumbnail URL either from cached link or fetches * it from API, then downloads the thumbnail block and decrypts it using * `downloadCallback`, and finally creates local URL to it which is set to * the cached link. */ const loadLinkThumbnail = async ( abortSignal: AbortSignal, shareId: string, linkId: string, downloadCallback: ( downloadUrl: string, downloadToken: string ) => Promise<{ contents: Promise<Uint8Array[]>; verifiedPromise: Promise<VERIFICATION_STATUS> }> ): Promise<string | undefined> => { const link = await getLink(abortSignal, shareId, linkId); if (link.cachedThumbnailUrl || !link.hasThumbnail || !link.activeRevision) { return link.cachedThumbnailUrl; } let downloadInfo = { isFresh: false, downloadUrl: link.activeRevision.thumbnail?.bareUrl, downloadToken: link.activeRevision.thumbnail?.token, }; const loadDownloadUrl = async (activeRevisionId: string) => { const res = (await debouncedRequest( queryFileRevisionThumbnail(shareId, linkId, activeRevisionId) )) as DriveFileRevisionThumbnailResult; return { isFresh: true, downloadUrl: res.ThumbnailBareURL, downloadToken: res.ThumbnailToken, }; }; const loadThumbnailUrl = async (downloadUrl: string, downloadToken: string): Promise<string> => { const { contents, verifiedPromise } = await downloadCallback(downloadUrl, downloadToken); const data = await contents; const url = URL.createObjectURL(new Blob(data, { type: 'image/jpeg' })); linksState.setCachedThumbnail(shareId, linkId, url); const cachedLink = linksState.getLink(shareId, linkId); if (cachedLink) { const verified = await verifiedPromise; handleSignatureCheck(shareId, cachedLink.encrypted, 'thumbnail', verified); } return url; }; if (!downloadInfo.downloadUrl || !downloadInfo.downloadToken) { downloadInfo = await loadDownloadUrl(link.activeRevision.id); } if (!downloadInfo.downloadUrl || !downloadInfo.downloadToken) { return; } try { return await loadThumbnailUrl(downloadInfo.downloadUrl, downloadInfo.downloadToken); } catch (err) { // Download URL and token can be expired if we used cached version. // We get thumbnail info with the link, but if user don't scroll // to the item before cached version expires, we need to try again // with a loading the new URL and token. if (downloadInfo.isFresh) { throw err; } downloadInfo = await loadDownloadUrl(link.activeRevision.id); if (!downloadInfo.downloadUrl || !downloadInfo.downloadToken) { return; } return await loadThumbnailUrl(downloadInfo.downloadUrl, downloadInfo.downloadToken); } }; const setSignatureIssues = async ( abortSignal: AbortSignal, shareId: string, linkId: string, signatureIssues: SignatureIssues ) => { const link = await getEncryptedLink(abortSignal, shareId, linkId); linksState.setLinks(shareId, [ { encrypted: { ...link, signatureIssues, }, }, ]); }; return { getLinkPassphraseAndSessionKey, getLinkPrivateKey, getLinkSessionKey, getLinkHashKey, decryptLink, getLink, loadFreshLink, loadLinkThumbnail, setSignatureIssues, }; }
3,079
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinkActions.ts
import { usePreventLeave } from '@proton/components'; import { CryptoProxy } from '@proton/crypto'; import { queryCreateFolder } from '@proton/shared/lib/api/drive/folder'; import { queryRenameLink } from '@proton/shared/lib/api/drive/share'; import { encryptName, generateLookupHash, generateNodeHashKey, generateNodeKeys, } from '@proton/shared/lib/keys/driveKeys'; import { getDecryptedSessionKey } from '@proton/shared/lib/keys/drivePassphrase'; import getRandomString from '@proton/utils/getRandomString'; import { ValidationError } from '../../utils/errorHandling/ValidationError'; import { useDebouncedRequest } from '../_api'; import { useDriveEventManager } from '../_events'; import { useShare } from '../_shares'; import { useVolumesState } from '../_volumes'; import { encryptFolderExtendedAttributes } from './extendedAttributes'; import useLink from './useLink'; import { validateLinkName } from './validation'; /** * useLinkActions provides actions for manipulating with individual link. */ export default function useLinkActions() { const { preventLeave } = usePreventLeave(); const debouncedRequest = useDebouncedRequest(); const events = useDriveEventManager(); const { getLink, getLinkPrivateKey, getLinkSessionKey, getLinkHashKey } = useLink(); const { getSharePrivateKey, getShareCreatorKeys } = useShare(); const volumeState = useVolumesState(); const createFolder = async ( abortSignal: AbortSignal, shareId: string, parentLinkId: string, name: string, modificationTime?: Date ) => { // Name Hash is generated from LC, for case-insensitive duplicate detection. const error = validateLinkName(name); if (error) { throw new ValidationError(error); } const [parentPrivateKey, parentHashKey, { privateKey: addressKey, address }] = await Promise.all([ getLinkPrivateKey(abortSignal, shareId, parentLinkId), getLinkHashKey(abortSignal, shareId, parentLinkId), getShareCreatorKeys(abortSignal, shareId), ]); const [Hash, { NodeKey, NodePassphrase, privateKey, NodePassphraseSignature }, encryptedName] = await Promise.all([ generateLookupHash(name, parentHashKey).catch((e) => Promise.reject( new Error('Failed to generate folder link lookup hash during folder creation', { cause: { e, shareId, parentLinkId, }, }) ) ), generateNodeKeys(parentPrivateKey, addressKey).catch((e) => Promise.reject( new Error('Failed to generate folder link node keys during folder creation', { cause: { e, shareId, parentLinkId, }, }) ) ), encryptName(name, parentPrivateKey, addressKey).catch((e) => Promise.reject( new Error('Failed to encrypt folder link name during folder creation', { cause: { e, shareId, parentLinkId, }, }) ) ), ]); // We use private key instead of address key to sign the hash key // because its internal property of the folder. We use address key for // name or content to have option to trust some users more or less. const { NodeHashKey } = await generateNodeHashKey(privateKey, privateKey).catch((e) => Promise.reject( new Error('Failed to encrypt node hash key during folder creation', { cause: { e, shareId, parentLinkId, }, }) ) ); const xattr = !modificationTime ? undefined : await encryptFolderExtendedAttributes(modificationTime, privateKey, addressKey); const { Folder } = await preventLeave( debouncedRequest<{ Folder: { ID: string } }>( queryCreateFolder(shareId, { Hash, NodeHashKey, Name: encryptedName, NodeKey, NodePassphrase, NodePassphraseSignature, SignatureAddress: address.Email, ParentLinkID: parentLinkId, XAttr: xattr, }) ) ); const volumeId = volumeState.findVolumeId(shareId); if (volumeId) { await events.pollEvents.volumes(volumeId); } return Folder.ID; }; const renameLink = async (abortSignal: AbortSignal, shareId: string, linkId: string, newName: string) => { const error = validateLinkName(newName); if (error) { throw new ValidationError(error); } const [meta, { privateKey: addressKey, address }] = await Promise.all([ getLink(abortSignal, shareId, linkId), getShareCreatorKeys(abortSignal, shareId), ]); if (meta.corruptedLink) { throw new Error('Cannot rename corrupted file'); } const [parentPrivateKey, parentHashKey] = await Promise.all([ meta.parentLinkId ? getLinkPrivateKey(abortSignal, shareId, meta.parentLinkId) : getSharePrivateKey(abortSignal, shareId), meta.parentLinkId ? getLinkHashKey(abortSignal, shareId, meta.parentLinkId) : null, ]); const sessionKey = await getDecryptedSessionKey({ data: meta.encryptedName, privateKeys: parentPrivateKey, }).catch((e) => Promise.reject( new Error('Failed to decrypt link name session key during rename', { cause: { e, shareId, linkId, }, }) ) ); const [Hash, { message: encryptedName }] = await Promise.all([ parentHashKey ? generateLookupHash(newName, parentHashKey).catch((e) => Promise.reject( new Error('Failed to generate link lookup hash during rename', { cause: { e, shareId, linkId, }, }) ) ) : getRandomString(64), CryptoProxy.encryptMessage({ textData: newName, stripTrailingSpaces: true, sessionKey, signingKeys: addressKey, }).catch((e) => Promise.reject( new Error('Failed to encrypt link name during rename', { cause: { e, shareId, linkId, }, }) ) ), ]); await preventLeave( debouncedRequest( queryRenameLink(shareId, linkId, { Name: encryptedName, Hash, SignatureAddress: address.Email, OriginalHash: meta.hash, }) ) ); const volumeId = volumeState.findVolumeId(shareId); if (volumeId) { await events.pollEvents.volumes(volumeId); } }; /** * checkLinkMetaSignatures checks for all signatures of various attributes: * passphrase, hash key, name or xattributes. It does not check content, * that is file blocks including thumbnail block. */ const checkLinkMetaSignatures = async (abortSignal: AbortSignal, shareId: string, linkId: string) => { const [link] = await Promise.all([ // Decrypts name and xattributes. getLink(abortSignal, shareId, linkId), // Decrypts passphrase. getLinkPrivateKey(abortSignal, shareId, linkId), ]); if (link.isFile) { await getLinkSessionKey(abortSignal, shareId, linkId); } else { await getLinkHashKey(abortSignal, shareId, linkId); } // Get latest link with signature updates. return (await getLink(abortSignal, shareId, linkId)).signatureIssues; }; return { createFolder, renameLink, checkLinkMetaSignatures, }; }
3,080
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinks.ts
import { MAX_THREADS_PER_REQUEST } from '@proton/shared/lib/drive/constants'; import runInQueue from '@proton/shared/lib/helpers/runInQueue'; import isTruthy from '@proton/utils/isTruthy'; import { isIgnoredError } from '../../utils/errorHandling'; import { DecryptedLink, EncryptedLink } from './interface'; import useLink from './useLink'; export default function useLinks() { const { decryptLink, getLink } = useLink(); const decryptLinks = async ( abortSignal: AbortSignal, shareId: string, encryptedLinks: EncryptedLink[] ): Promise<{ links: { encrypted: EncryptedLink; decrypted: DecryptedLink }[]; errors: any[]; }> => { let errors: any[] = []; const queue = encryptedLinks.map((encrypted) => async () => { if (abortSignal.aborted) { return; } return decryptLink(abortSignal, shareId, encrypted) .then((decrypted) => { if (decrypted.corruptedLink) { errors.push(new Error('Failed to decrypt link')); } return { encrypted, decrypted }; }) .catch((err) => { if (!isIgnoredError(err)) { errors.push(err); } }); }); // Limit the decryption so the app does not freeze when loading big page. const results = await runInQueue(queue, MAX_THREADS_PER_REQUEST); const links = results.filter(isTruthy); return { links, errors }; }; const getLinks = async ( abortSignal: AbortSignal, ids: { linkId: string; shareId: string }[] ): Promise<DecryptedLink[]> => { const queue = ids.map( ({ linkId, shareId }) => async () => getLink(abortSignal, shareId, linkId) ); return runInQueue(queue, MAX_THREADS_PER_REQUEST); }; return { decryptLinks, getLinks, }; }
3,081
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksActions.test.tsx
import { renderHook } from '@testing-library/react-hooks'; import { RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import isTruthy from '@proton/utils/isTruthy'; import { VolumesStateProvider } from '../_volumes/useVolumesState'; import { useLinksActions } from './useLinksActions'; jest.mock('@proton/components/hooks/usePreventLeave', () => { const usePreventLeave = () => { return { preventLeave: (fn: any) => fn, }; }; return usePreventLeave; }); jest.mock('../_events/useDriveEventManager', () => { const useDriveEventManager = () => { return { pollEvents: { volumes: () => Promise.resolve(), }, }; }; return { useDriveEventManager, }; }); jest.mock('./useLinksState', () => { const useLinksState = () => { return { lockLinks: () => {}, lockTrash: () => {}, unlockLinks: () => {}, }; }; return useLinksState; }); jest.mock('../_crypto/useDriveCrypto', () => { const useDriveCrypto = () => { return { getPrimaryAddressKey: () => {}, getOwnAddressAndPrimaryKeys: () => {}, }; }; return useDriveCrypto; }); jest.mock('./useLink', () => { const useLink = () => { return { getLink: () => {}, getLinkPassphraseAndSessionKey: () => {}, getLinkPrivateKey: () => {}, getLinkHashKey: () => {}, }; }; return useLink; }); const mockRequest = jest.fn(); const mockGetLinks = jest.fn(); const mockQueryTrashLinks = jest.fn(); const mockQueryRestoreLinks = jest.fn(); const mockQueryDeleteTrashedLinks = jest.fn(); const mockGetShare = jest.fn(); const mockGetDefaultShare = jest.fn(); jest.mock('./useLinks', () => { const useLink = () => { return { getLinks: mockGetLinks, }; }; return useLink; }); jest.mock('../_api/useDebouncedRequest', () => { const useDebouncedRequest = () => { return mockRequest; }; return useDebouncedRequest; }); jest.mock('../_shares/useShare', () => { const useShare = () => { return { getShare: mockGetShare, }; }; return useShare; }); jest.mock('../_shares/useDefaultShare', () => { const useDefaultShare = () => { return { getDefaultShare: mockGetDefaultShare, }; }; return useDefaultShare; }); const SHARE_ID_0 = 'shareId00'; const SHARE_ID_1 = 'shareId01'; const VOLUME_ID = 'volumeId00'; // TODO: Test suite incomplete // covers operations allowing using links from multiple shares describe('useLinksActions', () => { let hook: { current: ReturnType<typeof useLinksActions>; }; beforeEach(() => { jest.resetAllMocks(); mockRequest.mockImplementation((linkIds: string[]) => { return Promise.resolve({ Responses: linkIds.map(() => ({ Response: { Code: RESPONSE_CODE.SUCCESS, }, })), }); }); mockQueryTrashLinks.mockImplementation((shareId, parentLinkId, linkIds) => linkIds); mockQueryRestoreLinks.mockImplementation((shareId, linkIds) => linkIds); mockQueryDeleteTrashedLinks.mockImplementation((shareId, linkIds) => linkIds); mockGetShare.mockImplementation((ac, shareId) => ({ shareId })); mockGetDefaultShare.mockImplementation(() => ({ volumeId: VOLUME_ID })); const wrapper = ({ children }: { children: React.ReactNode }) => ( <VolumesStateProvider>{children}</VolumesStateProvider> ); const { result } = renderHook( () => useLinksActions({ queries: { queryTrashLinks: mockQueryTrashLinks, queryRestoreLinks: mockQueryRestoreLinks, queryDeleteTrashedLinks: mockQueryDeleteTrashedLinks, queryEmptyTrashOfShare: jest.fn(), queryDeleteChildrenLinks: jest.fn(), }, }), { wrapper } ); hook = result; }); it('trashes links from different shares', async () => { /* shareId00 └── link00 ├── link01 │ ├── link03 x │ └── link04 x └── link02 └── link05 x shareId01 └── link10 ├── link01 | └── link12 x <-- non-unique parent link id └── link11 ├── link13 └── link14 x */ const ac = new AbortController(); const result = await hook.current.trashLinks(ac.signal, [ { shareId: SHARE_ID_0, linkId: 'linkId03', parentLinkId: 'linkId01' }, { shareId: SHARE_ID_0, linkId: 'linkId04', parentLinkId: 'linkId01' }, { shareId: SHARE_ID_0, linkId: 'linkId05', parentLinkId: 'linkId02' }, { shareId: SHARE_ID_1, linkId: 'linkId14', parentLinkId: 'linkId11' }, { shareId: SHARE_ID_1, linkId: 'linkId12', parentLinkId: 'linkId01' }, ]); // ensure api requests are invoked by correct groups expect(mockQueryTrashLinks).toBeCalledWith(SHARE_ID_0, 'linkId01', ['linkId03', 'linkId04']); expect(mockQueryTrashLinks).toBeCalledWith(SHARE_ID_0, 'linkId02', ['linkId05']); expect(mockQueryTrashLinks).toBeCalledWith(SHARE_ID_1, 'linkId01', ['linkId12']); expect(mockQueryTrashLinks).toBeCalledWith(SHARE_ID_1, 'linkId11', ['linkId14']); // verify all requested links were processed expect(result.successes.sort()).toEqual(['linkId03', 'linkId04', 'linkId05', 'linkId12', 'linkId14'].sort()); }); it('restores links from different shares', async () => { /* shareId00 └── link00 ├── linkId01 < │ ├── linkId03 │ └── linkId04 < shareId01 └── linkId10 └── linkId11 < */ // emulate partial state const state: Record<string, any> = { linkId01: { rootShareId: SHARE_ID_0, linkId: 'linkId01', trashed: 3, }, linkId04: { rootShareId: SHARE_ID_0, linkId: 'linkId04', trashed: 1, }, linkId11: { rootShareId: SHARE_ID_1, linkId: 'linkId11', trashed: 3, }, }; mockGetLinks.mockImplementation(async (signal, ids: { linkId: string }[]) => { return ids.map((idGroup) => state[idGroup.linkId]).filter(isTruthy); }); const ac = new AbortController(); const result = await hook.current.restoreLinks(ac.signal, [ { shareId: SHARE_ID_0, linkId: 'linkId01' }, { shareId: SHARE_ID_0, linkId: 'linkId04' }, { shareId: SHARE_ID_1, linkId: 'linkId11' }, ]); expect(mockQueryRestoreLinks).toBeCalledWith(SHARE_ID_0, [ 'linkId01', 'linkId04', // this link has been deleted before link linkId, thus restored last ]); expect(mockQueryRestoreLinks).toBeCalledWith(SHARE_ID_1, ['linkId11']); expect(result.successes.sort()).toEqual(['linkId01', 'linkId04', 'linkId11'].sort()); }); it('deletes trashed links from different shares', async () => { /* shareId00 └── link00 ├── linkId01 x │ ├── linkId03 │ └── linkId04 x shareId01 └── linkId10 └── linkId11 x */ const ac = new AbortController(); const result = await hook.current.deleteTrashedLinks(ac.signal, [ { shareId: SHARE_ID_0, linkId: 'linkId01' }, { shareId: SHARE_ID_0, linkId: 'linkId04' }, { shareId: SHARE_ID_1, linkId: 'linkId11' }, ]); expect(mockQueryDeleteTrashedLinks).toBeCalledWith(SHARE_ID_0, ['linkId01', 'linkId04']); expect(mockQueryDeleteTrashedLinks).toBeCalledWith(SHARE_ID_1, ['linkId11']); expect(result.successes.sort()).toEqual(['linkId01', 'linkId04', 'linkId11'].sort()); }); });
3,082
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksActions.ts
import { usePreventLeave } from '@proton/components'; import { CryptoProxy } from '@proton/crypto'; import { queryDeleteChildrenLinks, queryDeleteTrashedLinks, queryEmptyTrashOfShare, queryRestoreLinks, queryTrashLinks, } from '@proton/shared/lib/api/drive/link'; import { queryMoveLink } from '@proton/shared/lib/api/drive/share'; import { queryVolumeEmptyTrash } from '@proton/shared/lib/api/drive/volume'; import { BATCH_REQUEST_SIZE, MAX_THREADS_PER_REQUEST, RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import runInQueue from '@proton/shared/lib/helpers/runInQueue'; import { encryptPassphrase, generateLookupHash } from '@proton/shared/lib/keys/driveKeys'; import { getDecryptedSessionKey } from '@proton/shared/lib/keys/drivePassphrase'; import chunk from '@proton/utils/chunk'; import groupWith from '@proton/utils/groupWith'; import { ValidationError } from '../../utils/errorHandling/ValidationError'; import { useDebouncedRequest } from '../_api'; import { useDriveEventManager } from '../_events'; import { useDefaultShare, useShare } from '../_shares'; import { useVolumesState } from '../_volumes'; import useLink from './useLink'; import useLinks from './useLinks'; import useLinksState from './useLinksState'; const INVALID_REQUEST_ERROR_CODES = [RESPONSE_CODE.ALREADY_EXISTS, RESPONSE_CODE.INVALID_REQUIREMENT]; interface APIResponses { Responses: { Response: { Code: RESPONSE_CODE; Error?: string; }; }[]; } /** * useLinksActions provides actions for manipulating with links in batches. */ export function useLinksActions({ queries, }: { queries: { queryDeleteChildrenLinks: typeof queryDeleteChildrenLinks; queryDeleteTrashedLinks: typeof queryDeleteTrashedLinks; queryEmptyTrashOfShare: typeof queryEmptyTrashOfShare; queryRestoreLinks: typeof queryRestoreLinks; queryTrashLinks: typeof queryTrashLinks; }; }) { const { preventLeave } = usePreventLeave(); const debouncedRequest = useDebouncedRequest(); const events = useDriveEventManager(); const { getLink, getLinkPassphraseAndSessionKey, getLinkPrivateKey, getLinkHashKey } = useLink(); const { getLinks } = useLinks(); const { lockLinks, lockTrash, unlockLinks } = useLinksState(); const { getDefaultShare } = useDefaultShare(); const { getShareCreatorKeys } = useShare(); const volumeState = useVolumesState(); /** * withLinkLock is helper to lock provided `linkIds` before the action done * using `callback`, and ensure links are unlocked after its done no matter * the result of the action. */ const withLinkLock = async <T>(shareId: string, linkIds: string[], callback: () => Promise<T>): Promise<T> => { lockLinks(shareId, linkIds); try { return await callback(); } finally { const volumeId = volumeState.findVolumeId(shareId); if (volumeId) { await events.pollEvents.volumes(volumeId); } unlockLinks(shareId, linkIds); } }; const moveLink = async ( abortSignal: AbortSignal, { shareId, newParentLinkId, linkId, newShareId = shareId, }: { shareId: string; newParentLinkId: string; linkId: string; newShareId?: string; } ) => { const [ link, { passphrase, passphraseSessionKey }, newParentPrivateKey, newParentHashKey, { privateKey: addressKey, address }, ] = await Promise.all([ getLink(abortSignal, shareId, linkId), getLinkPassphraseAndSessionKey(abortSignal, shareId, linkId), getLinkPrivateKey(abortSignal, newShareId, newParentLinkId), getLinkHashKey(abortSignal, newShareId, newParentLinkId), getShareCreatorKeys(abortSignal, newShareId), ]); if (link.corruptedLink) { throw new Error('Cannot move corrupted file'); } const [currentParentPrivateKey, Hash, ContentHash, { NodePassphrase, NodePassphraseSignature }] = await Promise.all([ getLinkPrivateKey(abortSignal, shareId, link.parentLinkId), generateLookupHash(link.name, newParentHashKey).catch((e) => Promise.reject( new Error('Failed to generate lookup hash during move', { cause: { e, shareId, newParentLinkId, newShareId: newShareId === shareId ? undefined : newShareId, linkId, }, }) ) ), link.digests?.sha1 && generateLookupHash(link.digests.sha1, newParentHashKey).catch((e) => Promise.reject( new Error('Failed to generate content hash during move', { cause: { e, shareId, newParentLinkId, newShareId: newShareId === shareId ? undefined : newShareId, linkId, }, }) ) ), encryptPassphrase(newParentPrivateKey, addressKey, passphrase, passphraseSessionKey).catch((e) => Promise.reject( new Error('Failed to encrypt link passphrase during move', { cause: { e, shareId, newParentLinkId, newShareId: newShareId === shareId ? undefined : newShareId, linkId, }, }) ) ), ]); const sessionKeyName = await getDecryptedSessionKey({ data: link.encryptedName, privateKeys: currentParentPrivateKey, }).catch((e) => Promise.reject( new Error('Failed to decrypt link name session key during move', { cause: { e, shareId, newParentLinkId, newShareId: newShareId === shareId ? undefined : newShareId, linkId, }, }) ) ); const { message: encryptedName } = await CryptoProxy.encryptMessage({ textData: link.name, stripTrailingSpaces: true, sessionKey: sessionKeyName, encryptionKeys: newParentPrivateKey, signingKeys: addressKey, }).catch((e) => Promise.reject( new Error('Failed to encrypt link name during move', { cause: { e, shareId, newParentLinkId, newShareId: newShareId === shareId ? undefined : newShareId, linkId, }, }) ) ); await debouncedRequest( queryMoveLink(shareId, linkId, { Name: encryptedName, Hash, ParentLinkID: newParentLinkId, NodePassphrase, NodePassphraseSignature, SignatureAddress: address.Email, NewShareID: newShareId === shareId ? undefined : newShareId, ContentHash, }) ).catch((err) => { if (INVALID_REQUEST_ERROR_CODES.includes(err?.data?.Code)) { throw new ValidationError(err.data.Error); } throw err; }); const originalParentId = link.parentLinkId; return originalParentId; }; const moveLinks = async ( abortSignal: AbortSignal, { shareId, linkIds, newParentLinkId, newShareId, onMoved, onError, }: { shareId: string; linkIds: string[]; newParentLinkId: string; newShareId?: string; onMoved?: (linkId: string) => void; onError?: (linkId: string) => void; } ) => { return withLinkLock(shareId, linkIds, async () => { const originalParentIds: { [linkId: string]: string } = {}; const successes: string[] = []; const failures: { [linkId: string]: any } = {}; const moveQueue = linkIds.map((linkId) => async () => { return moveLink(abortSignal, { shareId, newParentLinkId, linkId, newShareId }) .then((originalParentId) => { successes.push(linkId); originalParentIds[linkId] = originalParentId; onMoved?.(linkId); }) .catch((error) => { failures[linkId] = error; onError?.(linkId); }); }); await preventLeave(runInQueue(moveQueue, MAX_THREADS_PER_REQUEST)); return { successes, failures, originalParentIds }; }); }; /** * batchHelper makes easier to do any action with many links in several * batches to make sure API can handle it (to not send thousands of links * in one request), all run in parallel (up to a reasonable limit). */ const batchHelper = async <T>( abortSignal: AbortSignal, shareId: string, linkIds: string[], query: (batchLinkIds: string[], shareId: string) => any, maxParallelRequests = MAX_THREADS_PER_REQUEST ) => { return withLinkLock(shareId, linkIds, async () => { const responses: { batchLinkIds: string[]; response: T }[] = []; const successes: string[] = []; const failures: { [linkId: string]: any } = {}; const batches = chunk(linkIds, BATCH_REQUEST_SIZE); const queue = batches.map( (batchLinkIds) => () => debouncedRequest<T>(query(batchLinkIds, shareId), abortSignal) .then((response) => { responses.push({ batchLinkIds, response }); batchLinkIds.forEach((linkId) => successes.push(linkId)); }) .catch((error) => { batchLinkIds.forEach((linkId) => (failures[linkId] = error)); }) ); await preventLeave(runInQueue(queue, maxParallelRequests)); return { responses, successes, failures, }; }); }; const batchHelperMultipleShares = async ( abortSignal: AbortSignal, ids: { shareId: string; linkId: string }[], query: (batchLinkIds: string[], shareId: string) => any, maxParallelRequests = MAX_THREADS_PER_REQUEST ) => { const groupedByShareId = groupWith((a, b) => a.shareId === b.shareId, ids); const results = await Promise.all( groupedByShareId.map((group) => { return batchHelper<APIResponses>( abortSignal, group[0].shareId, group.map(({ linkId }) => linkId), query, maxParallelRequests ); }) ); const { responses, failures } = accumulateResults(results); const successes: string[] = []; responses.forEach(({ batchLinkIds, response }) => { response.Responses.forEach(({ Response }, index) => { const linkId = batchLinkIds[index]; if (!Response.Error) { successes.push(linkId); } else if (INVALID_REQUEST_ERROR_CODES.includes(Response.Code)) { failures[linkId] = new ValidationError(Response.Error); } else { failures[linkId] = Response.Error; } }); }); return { responses, successes, failures }; }; const trashLinks = async ( abortSignal: AbortSignal, ids: { shareId: string; linkId: string; parentLinkId: string }[] ) => { const linksByParentIds = groupWith((a, b) => a.parentLinkId === b.parentLinkId, ids); const results = await Promise.all( linksByParentIds.map((linksGroup) => { const groupParentLinkId = linksGroup[0].parentLinkId; return batchHelperMultipleShares(abortSignal, linksGroup, (batchLinkIds, shareId) => { return queries.queryTrashLinks(shareId, groupParentLinkId, batchLinkIds); }); }) ); return accumulateResults(results); }; const restoreLinks = async (abortSignal: AbortSignal, ids: { shareId: string; linkId: string }[]) => { /* Make sure to restore the most freshly trashed links first to ensure the potential parents are restored first because it is not possible to restore child if the parent stays in the trash. If user does not select the parent anyway, it is fine, it will just show error notification that some link(s) were not restored. */ const links = await getLinks(abortSignal, ids); const sortedLinks = links.sort((a, b) => (b.trashed || 0) - (a.trashed || 0)); const sortedLinkIds = sortedLinks.map(({ linkId, rootShareId }) => ({ linkId, shareId: rootShareId })); // Limit restore to one thread at a time only to make sure links are // restored in proper order (parents need to be restored before childs). const maxParallelRequests = 1; const results = await batchHelperMultipleShares( abortSignal, sortedLinkIds, (batchLinkIds, shareId) => { return queries.queryRestoreLinks(shareId, batchLinkIds); }, maxParallelRequests ); return results; }; const deleteChildrenLinks = async ( abortSignal: AbortSignal, shareId: string, parentLinkId: string, linkIds: string[] ) => { return batchHelper(abortSignal, shareId, linkIds, (batchLinkIds) => queryDeleteChildrenLinks(shareId, parentLinkId, batchLinkIds) ); }; const deleteTrashedLinks = async (abortSignal: AbortSignal, ids: { linkId: string; shareId: string }[]) => { return batchHelperMultipleShares(abortSignal, ids, (batchLinkIds, shareId) => { return queries.queryDeleteTrashedLinks(shareId, batchLinkIds); }); }; const emptyTrash = async (abortSignal: AbortSignal) => { const { volumeId } = await getDefaultShare(); lockTrash(); await debouncedRequest(queryVolumeEmptyTrash(volumeId), abortSignal); await events.pollEvents.volumes(volumeId); }; return { moveLinks, trashLinks, restoreLinks, deleteChildrenLinks, deleteTrashedLinks, emptyTrash, }; } export default function useLinksActionsWithQuieries() { return useLinksActions({ queries: { queryTrashLinks, queryDeleteChildrenLinks, queryDeleteTrashedLinks, queryEmptyTrashOfShare, queryRestoreLinks, }, }); } interface Result<T> { responses: { batchLinkIds: string[]; response: T; }[]; successes: string[]; failures: { [linkId: string]: any; }; } function accumulateResults<T>(results: Result<T>[]): Result<T> { return results.reduce( (acc, result) => { acc.responses.push(...result.responses); acc.successes.push(...result.successes); acc.failures = { ...acc.failures, ...result.failures }; return acc; }, { responses: [], successes: [], failures: {}, } ); }
3,083
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksKeys.test.tsx
import { LinksKeys } from './useLinksKeys'; describe('useLinksKeys', () => { let keys: LinksKeys; beforeEach(() => { keys = new LinksKeys(); }); it('returns empty passphrase when not set', () => { keys.setPassphrase('shareId', 'linkId', 'pass'); expect(keys.getPassphrase('shareId', 'missingLinkId')).toBe(undefined); }); it('returns the cached passphrase', () => { keys.setPassphrase('shareId', 'linkId', 'pass'); expect(keys.getPassphrase('shareId', 'linkId')).toBe('pass'); }); it('setting another key for the same link does not remove the other key', () => { const hashKey = new Uint8Array([1, 2]); keys.setPassphrase('shareId', 'linkId', 'pass'); keys.setHashKey('shareId', 'linkId', hashKey); expect(keys.getPassphrase('shareId', 'linkId')).toBe('pass'); expect(keys.getHashKey('shareId', 'linkId')).toBe(hashKey); }); it('setting the key again overrides the original value', () => { keys.setPassphrase('shareId', 'linkId', 'pass'); keys.setPassphrase('shareId', 'linkId', 'newpass'); expect(keys.getPassphrase('shareId', 'linkId')).toBe('newpass'); }); });
3,084
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksKeys.tsx
import { createContext, useContext } from 'react'; import { PrivateKeyReference, SessionKey } from '@proton/crypto'; type LinksKeysByShare = { [shareId: string]: { [linkId: string]: LinkKeys; }; }; type LinkKeys = FileLinkKeys & FolderLinkKeys; type FileLinkKeys = { passphrase?: string; passphraseSessionKey?: SessionKey; privateKey?: PrivateKeyReference; sessionKey?: SessionKey; }; type FolderLinkKeys = { passphrase?: string; passphraseSessionKey?: SessionKey; privateKey?: PrivateKeyReference; hashKey?: Uint8Array; }; /** * LinksKeys provides a simple storage to cache link keys. * Ideally, there should be only one instance in the whole app. */ export class LinksKeys { private keys: LinksKeysByShare; constructor() { this.keys = {}; } getPassphrase(shareId: string, linkId: string): string | undefined { return this.keys[shareId]?.[linkId]?.passphrase; } getPassphraseSessionKey(shareId: string, linkId: string): SessionKey | undefined { return this.keys[shareId]?.[linkId]?.passphraseSessionKey; } getPrivateKey(shareId: string, linkId: string): PrivateKeyReference | undefined { return this.keys[shareId]?.[linkId]?.privateKey; } getSessionKey(shareId: string, linkId: string): SessionKey | undefined { return this.keys[shareId]?.[linkId]?.sessionKey; } getHashKey(shareId: string, linkId: string): Uint8Array | undefined { return this.keys[shareId]?.[linkId]?.hashKey; } setPassphrase(shareId: string, linkId: string, passphrase: string) { this.setKey(shareId, linkId, (keys: LinkKeys) => { keys.passphrase = passphrase; }); } setPassphraseSessionKey(shareId: string, linkId: string, sessionKey: SessionKey) { this.setKey(shareId, linkId, (keys: LinkKeys) => { keys.passphraseSessionKey = sessionKey; }); } setPrivateKey(shareId: string, linkId: string, privateKey: PrivateKeyReference) { this.setKey(shareId, linkId, (keys: LinkKeys) => { keys.privateKey = privateKey; }); } setSessionKey(shareId: string, linkId: string, sessionKey: SessionKey) { this.setKey(shareId, linkId, (keys: LinkKeys) => { keys.sessionKey = sessionKey; }); } setHashKey(shareId: string, linkId: string, hashKey: Uint8Array) { this.setKey(shareId, linkId, (keys: LinkKeys) => { keys.hashKey = hashKey; }); } private setKey(shareId: string, linkId: string, setter: (keys: LinkKeys) => void) { if (!this.keys[shareId]) { this.keys[shareId] = {}; } if (!this.keys[shareId][linkId]) { this.keys[shareId][linkId] = {}; } setter(this.keys[shareId][linkId]); } } const LinksKeysContext = createContext<LinksKeys | null>(null); export function LinksKeysProvider({ children }: { children: React.ReactNode }) { const value = new LinksKeys(); return <LinksKeysContext.Provider value={value}>{children}</LinksKeysContext.Provider>; } export default function useLinksKeys() { const state = useContext(LinksKeysContext); if (!state) { throw new Error('Trying to use uninitialized LinksKeysProvider'); } return state; }
3,085
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksQueue.test.tsx
import { act, renderHook } from '@testing-library/react-hooks'; import { useLinksQueue } from './useLinksQueue'; const mockedLoadLinksMeta = jest.fn(); jest.mock('./useLinksListing', () => ({ useLinksListing: () => ({ loadLinksMeta: mockedLoadLinksMeta, }), })); const mockedGetLink = jest.fn(); jest.mock('./useLinksState', () => ({ __esModule: true, default: () => ({ getLink: mockedGetLink, }), })); jest.useFakeTimers(); jest.spyOn(global, 'setTimeout'); const mockedAbort = jest.fn(); let mockedAbortSignal = { aborted: false }; // @ts-ignore - not mocking the entire AbortSignal, sorry =) global.AbortController = jest.fn(() => ({ signal: mockedAbortSignal, abort: mockedAbort, })); const SHARE_ID = 'shareId'; const getLinkIds = (count: number) => [...Array(count).keys()].map((id) => `linkId-${id}`); const getFakeRef: <T>(value: T) => React.MutableRefObject<T> = (value) => ({ current: value }); describe('useLinksQueue', () => { let hook: { current: ReturnType<typeof useLinksQueue>; }; let unmountHook: () => void; beforeEach(() => { const { result, unmount } = renderHook(() => useLinksQueue()); hook = result; unmountHook = unmount; jest.clearAllTimers(); jest.clearAllMocks(); mockedGetLink.mockReset(); mockedLoadLinksMeta.mockReset(); mockedAbort.mockReset(); mockedAbortSignal.aborted = false; mockedGetLink.mockImplementation(() => undefined); }); it('should not add to queue if link is already in state', async () => { mockedGetLink.mockImplementation((shareId, linkId) => ({ shareId, linkId, })); await act(async () => { hook.current.addToQueue(SHARE_ID, 'linkId'); }); expect(setTimeout).not.toHaveBeenCalled(); }); it('should not add to queue if linkId is already in queue', async () => { await act(async () => { hook.current.addToQueue(SHARE_ID, 'linkId'); hook.current.addToQueue(SHARE_ID, 'linkId'); }); expect(setTimeout).toHaveBeenCalledTimes(1); }); it('should debounce processing to wait for the queue to fill up', async () => { const items = getLinkIds(7); await act(async () => { items.forEach((item) => hook.current.addToQueue(SHARE_ID, item)); }); jest.runAllTimers(); expect(setTimeout).toHaveBeenCalledTimes(items.length); expect(mockedLoadLinksMeta).toHaveBeenCalledTimes(1); }); it('should not load links if the domRef is null', async () => { await act(async () => { hook.current.addToQueue(SHARE_ID, 'linkId', getFakeRef(null)); }); jest.runAllTimers(); expect(setTimeout).toHaveBeenCalledTimes(1); expect(mockedLoadLinksMeta).not.toHaveBeenCalled(); }); it('should abort when the hook is unmounted', async () => { await act(async () => { hook.current.addToQueue(SHARE_ID, 'linkId'); }); jest.runAllTimers(); unmountHook(); expect(mockedAbort).toHaveBeenCalled(); }); it('should not load links if aborted', async () => { mockedAbortSignal.aborted = true; await act(async () => { hook.current.addToQueue(SHARE_ID, 'linkId'); }); jest.runAllTimers(); expect(setTimeout).toHaveBeenCalledTimes(1); expect(mockedLoadLinksMeta).not.toHaveBeenCalled(); }); it('should not infinite loop if loadLinksMeta fails', async () => { // Silence console errors const consoleErrorMock = jest.spyOn(console, 'error').mockImplementation(() => {}); mockedLoadLinksMeta.mockRejectedValue(new Error('oh no')); await act(async () => { hook.current.addToQueue(SHARE_ID, 'linkId'); }); await jest.runAllTimersAsync(); expect(setTimeout).toHaveBeenCalledTimes(1); expect(mockedLoadLinksMeta).toHaveBeenCalledTimes(1); expect(consoleErrorMock).toHaveBeenCalledTimes(1); consoleErrorMock.mockRestore(); }, 1000); });
3,086
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksQueue.ts
import { MutableRefObject, useEffect, useRef } from 'react'; import { useLinksListing } from './useLinksListing'; import useLinksState from './useLinksState'; type Props = { /** * Whether or not to load thumbnails with the links. */ loadThumbnails?: boolean; }; export const useLinksQueue = ({ loadThumbnails }: Props = {}) => { const { loadLinksMeta } = useLinksListing(); const linksState = useLinksState(); const queue = useRef(new Set<string>()); const domRefMap = useRef(new Map<string, MutableRefObject<unknown>>()); const controller = useRef<AbortController | null>(null); const promise = useRef<Promise<unknown> | null>(null); useEffect(() => { return () => { controller.current?.abort(); }; }, []); const processQueue = (shareId: string) => new Promise(async (resolve) => { controller.current = new AbortController(); while (queue.current.size > 0 && !controller.current.signal.aborted) { // Remove items from the queue which are no longer visible queue.current.forEach((item) => { let ref = domRefMap.current.get(item); if (ref && !ref.current) { queue.current.delete(item); domRefMap.current.delete(item); } }); if (queue.current.size === 0) { break; } const linkIds = Array.from(queue.current); try { await loadLinksMeta(controller.current.signal, `links-${shareId}`, shareId, linkIds, { loadThumbnails, }); } catch (e) { console.error(e); } linkIds.forEach((linkId) => { queue.current.delete(linkId); domRefMap.current.delete(linkId); }); } controller.current = null; resolve(null); }); const addToQueue = (shareId: string, linkId: string, domRef?: React.MutableRefObject<unknown>) => { if (linksState.getLink(shareId, linkId) || queue.current.has(linkId)) { return; } queue.current.add(linkId); if (domRef) { domRefMap.current.set(linkId, domRef); } // We'll debounce starting the queue for a bit, to collect items to batch setTimeout(() => { if (!promise.current) { promise.current = processQueue(shareId).then(() => { promise.current = null; }); } }, 10); }; return { addToQueue, }; }; export default useLinksQueue;
3,087
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksState.test.tsx
import { act, renderHook } from '@testing-library/react-hooks'; import { EVENT_TYPES } from '@proton/shared/lib/drive/constants'; import { DriveEvents } from '../_events'; import { DecryptedLink, EncryptedLink, LinkShareUrl } from './interface'; import { Link, LinksState, addOrUpdate, deleteLinks, setCachedThumbnailUrl, setLock, updateByEvents, useLinksStateProvider, } from './useLinksState'; jest.mock('../_events/useDriveEventManager', () => { const useDriveEventManager = () => { return { eventHandlers: { register: () => 'id', unregister: () => false, }, }; }; return { useDriveEventManager, }; }); function generateTestLink(id: string, parentId: string | undefined, decrypted = false): Link { return { encrypted: { linkId: id, name: id, parentLinkId: parentId, }, decrypted: decrypted ? { linkId: id, name: id, parentLinkId: parentId, } : undefined, } as Link; } function getLockedIds(state: LinksState): string[] { return Object.values(state.shareId.links) .filter(({ decrypted }) => decrypted?.isLocked) .map(({ encrypted }) => encrypted.linkId); } function generateEvents(events: any[]): DriveEvents { return { eventId: 'eventId', events: events.map(([eventType, encryptedLink]) => ({ eventType, encryptedLink })), refresh: false, }; } describe('useLinksState', () => { let state: LinksState; beforeEach(() => { state = { shareId: { links: { linkId0: generateTestLink('linkId0', undefined), linkId1: generateTestLink('linkId1', 'linkId0'), linkId2: generateTestLink('linkId2', 'linkId1'), linkId3: generateTestLink('linkId3', 'linkId1'), linkId4: generateTestLink('linkId4', 'linkId0'), linkId5: generateTestLink('linkId5', 'linkId4'), linkId6: generateTestLink('linkId6', 'linkId4'), linkId7: generateTestLink('linkId7', 'linkId0', true), linkId8: generateTestLink('linkId8', 'linkId7', true), linkId9: generateTestLink('linkId9', 'linkId7', true), }, tree: { linkId0: ['linkId1', 'linkId4', 'linkId7'], linkId1: ['linkId2', 'linkId3'], linkId4: ['linkId5', 'linkId6'], linkId7: ['linkId8', 'linkId9'], }, }, }; }); it('deletes links', () => { const result = deleteLinks(state, 'shareId', ['linkId1', 'linkId2', 'linkId6', 'linkId8', 'linkId9']); // Removed links from links. expect(Object.keys(result.shareId.links)).toMatchObject(['linkId0', 'linkId4', 'linkId5', 'linkId7']); // Removed parent from tree. expect(Object.keys(result.shareId.tree)).toMatchObject(['linkId0', 'linkId4', 'linkId7']); // Removed children from tree. expect(result.shareId.tree.linkId4).toMatchObject(['linkId5']); }); it('adds new encrypted link', () => { const result = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'newLink', name: 'newLink', parentLinkId: 'linkId1', } as EncryptedLink, }, ]); // Added to links. expect(result.shareId.links.newLink).toMatchObject({ encrypted: { linkId: 'newLink' }, }); // Added to tree as child to its parent. expect(result.shareId.tree.linkId1).toMatchObject(['linkId2', 'linkId3', 'newLink']); }); it('adds link to new share', () => { const result = addOrUpdate(state, 'shareId2', [ { encrypted: { linkId: 'newLink', name: 'newLink', parentLinkId: 'linkId1', } as EncryptedLink, }, ]); // Added new link to links. expect(result.shareId2.links.newLink).toMatchObject({ encrypted: { linkId: 'newLink' }, }); // Added parent to tree. expect(Object.keys(result.shareId2.tree)).toMatchObject(['linkId1']); expect(result.shareId2.tree.linkId1).toMatchObject(['newLink']); }); it('updates encrypted link', () => { const result = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', } as EncryptedLink, }, ]); expect(result.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', name: 'linkId7', isStale: true }, encrypted: { linkId: 'linkId7' }, }); }); it('updates encrypted link without need to re-decrypt', () => { const result = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', } as EncryptedLink, }, ]); expect(result.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', name: 'linkId7', isStale: false }, encrypted: { linkId: 'linkId7' }, }); }); it('updates encrypted link with different parent', () => { const result = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId6', } as EncryptedLink, }, ]); // Updated link in links. expect(result.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', isStale: true }, // Changing parent requires to re-decrypt again. encrypted: { linkId: 'linkId7' }, }); // Updated original parent tree. expect(result.shareId.tree.linkId0).toMatchObject(['linkId1', 'linkId4']); // Updated new parent tree. expect(result.shareId.tree.linkId6).toMatchObject(['linkId7']); }); it('updates decrypted link', () => { const result = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', } as EncryptedLink, decrypted: { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', } as DecryptedLink, }, ]); expect(result.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', name: 'new name' }, encrypted: { linkId: 'linkId7' }, }); }); it('updates trashed link', () => { const result1 = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', trashed: 12345678, } as EncryptedLink, }, { encrypted: { linkId: 'linkId8', name: 'linkId8', parentLinkId: 'linkId7', trashed: 123456789, } as EncryptedLink, }, ]); expect(result1.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', name: 'linkId7', isStale: false }, encrypted: { linkId: 'linkId7' }, }); // Trashed link is removed from the parent. expect(result1.shareId.tree.linkId0).toMatchObject(['linkId1', 'linkId4']); // Trashed parent trashes automatically also children. expect(result1.shareId.links.linkId7.encrypted.trashed).toBe(12345678); expect(result1.shareId.links.linkId7.encrypted.trashedByParent).toBeFalsy(); expect(result1.shareId.links.linkId8.encrypted.trashed).toBe(123456789); expect(result1.shareId.links.linkId8.encrypted.trashedByParent).toBeFalsy(); expect(result1.shareId.links.linkId9.encrypted.trashed).toBe(12345678); expect(result1.shareId.links.linkId9.encrypted.trashedByParent).toBeTruthy(); // Restoring from trash re-adds link back to its parent. const result2 = addOrUpdate(result1, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', trashed: null, } as EncryptedLink, }, ]); expect(result2.shareId.tree.linkId0).toMatchObject(['linkId1', 'linkId4', 'linkId7']); // Restoring from trash removes also trashed flag to its children which were trashed with it. expect(result1.shareId.links.linkId7.encrypted.trashed).toBe(null); expect(result1.shareId.links.linkId7.encrypted.trashedByParent).toBeFalsy(); expect(result1.shareId.links.linkId8.encrypted.trashed).toBe(123456789); expect(result1.shareId.links.linkId8.encrypted.trashedByParent).toBeFalsy(); expect(result1.shareId.links.linkId9.encrypted.trashed).toBe(null); expect(result1.shareId.links.linkId9.encrypted.trashedByParent).toBeFalsy(); }); it('updates trashed folder and adds files to it', () => { const result1 = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', trashed: 12345678, } as EncryptedLink, }, { encrypted: { linkId: 'linkId7a', name: 'linkId7a', parentLinkId: 'linkId7', } as EncryptedLink, }, ]); // Children of trashed parent is added to tree structure. expect(result1.shareId.tree.linkId7).toMatchObject(['linkId8', 'linkId9', 'linkId7a']); // Trashed parent trashes automatically also children. expect(result1.shareId.links.linkId7.encrypted.trashed).toBe(12345678); expect(result1.shareId.links.linkId7.encrypted.trashedByParent).toBeFalsy(); expect(result1.shareId.links.linkId7a.encrypted.trashed).toBe(12345678); expect(result1.shareId.links.linkId7a.encrypted.trashedByParent).toBeTruthy(); // Restoring from trash re-adds link back to its parent. const result2 = addOrUpdate(result1, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', trashed: null, } as EncryptedLink, }, ]); // Trashed parent trashes automatically also children. expect(result2.shareId.links.linkId7.encrypted.trashed).toBe(null); expect(result2.shareId.links.linkId7.encrypted.trashedByParent).toBeFalsy(); expect(result2.shareId.links.linkId7a.encrypted.trashed).toBe(null); expect(result2.shareId.links.linkId7a.encrypted.trashedByParent).toBeFalsy(); }); it('updates encrypted link with signature issue', () => { // First, it sets signature issue. const result1 = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', signatureIssues: { name: 2 }, } as unknown as EncryptedLink, }, ]); expect(result1.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', signatureIssues: { name: 2 } }, encrypted: { linkId: 'linkId7', signatureIssues: { name: 2 } }, }); // Second, it keeps it even if we do another update which doesnt change // how the link is encrypted (keys and encrypted data are the same). const result2 = addOrUpdate(result1, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', } as unknown as EncryptedLink, }, ]); expect(result2.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', signatureIssues: { name: 2 } }, encrypted: { linkId: 'linkId7', signatureIssues: { name: 2 } }, }); // Third, signature issue is cleared if keys or encrypted data is changed. const result3 = addOrUpdate(result2, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', nodeKey: 'anotherKey', } as unknown as EncryptedLink, }, ]); expect(result3.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', signatureIssues: undefined }, encrypted: { linkId: 'linkId7', signatureIssues: undefined }, }); }); it('updates decrypted link with signature issue', () => { const result = addOrUpdate(state, 'shareId', [ { encrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', } as unknown as EncryptedLink, decrypted: { linkId: 'linkId7', name: 'linkId7', parentLinkId: 'linkId0', signatureIssues: { name: 2 }, } as unknown as DecryptedLink, }, ]); expect(result.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', signatureIssues: { name: 2 } }, encrypted: { linkId: 'linkId7', signatureIssues: { name: 2 } }, }); }); it('locks and unlocks links', () => { const result1 = setLock(state, 'shareId', ['linkId7', 'linkId8'], true); expect(getLockedIds(result1)).toMatchObject(['linkId7', 'linkId8']); const result2 = setLock(state, 'shareId', ['linkId8'], false); expect(getLockedIds(result2)).toMatchObject(['linkId7']); }); it('locks and unlocks trashed links', () => { (state.shareId.links.linkId7.decrypted as DecryptedLink).trashed = 1234; (state.shareId.links.linkId8.decrypted as DecryptedLink).trashed = 5678; const result1 = setLock(state, 'shareId', 'trash', true); expect(getLockedIds(result1)).toMatchObject(['linkId7', 'linkId8']); const result2 = setLock(state, 'shareId', 'trash', false); expect(getLockedIds(result2)).toMatchObject([]); }); it('preserves lock for newly added trashed link', () => { const result1 = setLock(state, 'shareId', 'trash', true); const result2 = addOrUpdate(result1, 'shareId', [ { encrypted: { linkId: 'linkId100', name: 'linkId100', parentLinkId: 'linkId0', trashed: 12345678, } as EncryptedLink, decrypted: { linkId: 'linkId100', name: 'linkId100', parentLinkId: 'linkId0', trashed: 12345678, } as DecryptedLink, }, { encrypted: { linkId: 'linkId101', name: 'linkId101', parentLinkId: 'linkId0', trashed: 12345678900, // Way in future after setLock was called. } as EncryptedLink, decrypted: { linkId: 'linkId101', name: 'linkId101', parentLinkId: 'linkId0', trashed: 12345678900, } as DecryptedLink, }, ]); // linkId101 was deleted after our empty action, so is not locked. expect(getLockedIds(result2)).toMatchObject(['linkId100']); }); it('sets cached thumbnail', () => { const result = setCachedThumbnailUrl(state, 'shareId', 'linkId7', 'cachedurl'); expect(result.shareId.links.linkId7.decrypted).toMatchObject({ cachedThumbnailUrl: 'cachedurl' }); }); it('preserves cached lock flag', () => { const state2 = setLock(state, 'shareId', ['linkId7'], true); const link = { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', }; const result = addOrUpdate(state2, 'shareId', [ { encrypted: link as EncryptedLink, decrypted: link as DecryptedLink, }, ]); expect(result.shareId.links.linkId7.decrypted).toMatchObject({ isLocked: true }); }); it('preserves cached thumbnail', () => { const state2 = setCachedThumbnailUrl(state, 'shareId', 'linkId7', 'cachedurl'); const link = { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', }; const result = addOrUpdate(state2, 'shareId', [ { encrypted: link as EncryptedLink, decrypted: link as DecryptedLink, }, ]); expect(result.shareId.links.linkId7.decrypted).toMatchObject({ cachedThumbnailUrl: 'cachedurl' }); }); it('does not preserve cached thumbnail when revision changed', () => { const state2 = setCachedThumbnailUrl(state, 'shareId', 'linkId7', 'cachedurl'); const link = { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', activeRevision: { id: 'newId' }, }; const result = addOrUpdate(state2, 'shareId', [ { encrypted: link as EncryptedLink, decrypted: link as DecryptedLink, }, ]); expect(result.shareId.links.linkId7.decrypted).toMatchObject({ cachedThumbnailUrl: undefined }); }); it('preserves latest share url num accesses', () => { expect(state.shareId.links.linkId7.decrypted?.shareUrl?.numAccesses).toBe(undefined); // First set the numAccesses and check its set. const linkWithAccesses = { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', shareUrl: { id: 'shareUrlId', numAccesses: 0, // Test with zero to make sure zero is also well handled. }, }; const result1 = addOrUpdate(state, 'shareId', [ { encrypted: linkWithAccesses as EncryptedLink, decrypted: linkWithAccesses as DecryptedLink, }, ]); expect(result1.shareId.links.linkId7.decrypted?.shareUrl?.numAccesses).toBe(0); // Then set newer link without numAccesses which stil preserves the previous value. const linkWithoutAccesses = { linkId: 'linkId7', name: 'newer name', parentLinkId: 'linkId0', shareUrl: { id: 'shareUrlId', }, }; const result2 = addOrUpdate(state, 'shareId', [ { encrypted: linkWithoutAccesses as EncryptedLink, decrypted: linkWithoutAccesses as DecryptedLink, }, ]); expect(result2.shareId.links.linkId7.decrypted?.shareUrl?.numAccesses).toBe(0); }); it('sets zero num accesses for fresh new share url', () => { (state.shareId.links.linkId7.decrypted as DecryptedLink).shareUrl = undefined; (state.shareId.links.linkId8.decrypted as DecryptedLink).shareUrl = { id: 'shareUrlId', } as LinkShareUrl; const link7 = { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', shareUrl: { id: 'shareUrlId1', }, }; const link8 = { linkId: 'linkId8', name: 'new name', parentLinkId: 'linkId7', shareUrl: { id: 'shareUrlId2', }, }; const result = addOrUpdate(state, 'shareId', [ { encrypted: link7 as EncryptedLink, decrypted: link7 as DecryptedLink, }, { encrypted: link8 as EncryptedLink, decrypted: link8 as DecryptedLink, }, ]); // Link 7 had no shareUrl before, that means it is freshly created, so set to 0. expect(result.shareId.links.linkId7.decrypted?.shareUrl?.numAccesses).toBe(0); // Whereas link 8 had shareUrl before, so the update is about something else, and we need to keep undefined. expect(result.shareId.links.linkId8.decrypted?.shareUrl?.numAccesses).toBe(undefined); }); it('processes events', () => { const result = updateByEvents( state, generateEvents([ [ EVENT_TYPES.CREATE, { linkId: 'newLink', name: 'newLink', parentLinkId: 'linkId0', rootShareId: 'shareId' }, ], [EVENT_TYPES.DELETE, { linkId: 'linkId1' }], [EVENT_TYPES.DELETE, { linkId: 'linkId4' }], [ EVENT_TYPES.UPDATE, { linkId: 'linkId7', name: 'new name', parentLinkId: 'linkId0', rootShareId: 'shareId' }, ], ]) ); expect(Object.keys(result.shareId.links)).toMatchObject([ 'linkId0', 'linkId7', 'linkId8', 'linkId9', 'newLink', ]); expect(Object.keys(result.shareId.tree)).toMatchObject(['linkId0', 'linkId7']); expect(result.shareId.links.linkId7).toMatchObject({ decrypted: { linkId: 'linkId7', name: 'linkId7', isStale: true }, encrypted: { linkId: 'linkId7' }, }); }); it('skips events from non-present share', () => { const result = updateByEvents( state, generateEvents([ [ EVENT_TYPES.CREATE, { linkId: 'newLink', name: 'newLink', parentLinkId: 'linkId0', rootShareId: 'shareId2' }, ], ]) ); expect(Object.keys(result)).toMatchObject(['shareId']); }); describe('hook', () => { let hook: { current: ReturnType<typeof useLinksStateProvider>; }; beforeEach(() => { const { result } = renderHook(() => useLinksStateProvider()); hook = result; act(() => { state.shareId.links.linkId6.encrypted.shareUrl = { id: 'shareUrlId', token: 'token', isExpired: false, createTime: 12345, expireTime: null, }; state.shareId.links.linkId7.encrypted.trashed = 12345; state.shareId.links.linkId8.encrypted.trashed = 12345; state.shareId.links.linkId8.encrypted.trashedByParent = true; state.shareId.links.linkId9.encrypted.trashed = 12345; state.shareId.links.linkId9.encrypted.trashedByParent = true; hook.current.setLinks('shareId', Object.values(state.shareId.links)); }); }); it('returns children of the parent', () => { const links = hook.current.getChildren('shareId', 'linkId1'); expect(links.map((link) => link.encrypted.linkId)).toMatchObject(['linkId2', 'linkId3']); }); it('returns trashed links', () => { const links = hook.current.getTrashed('shareId'); expect(links.map((link) => link.encrypted.linkId)).toMatchObject(['linkId7']); }); it('returns shared links', () => { const links = hook.current.getSharedByLink('shareId'); expect(links.map((link) => link.encrypted.linkId)).toMatchObject(['linkId6']); }); }); });
3,088
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksState.tsx
import { createContext, useCallback, useContext, useEffect, useState } from 'react'; import { EVENT_TYPES } from '@proton/shared/lib/drive/constants'; import isTruthy from '@proton/utils/isTruthy'; import { DriveEvents, useDriveEventManager } from '../_events'; import { DecryptedLink, EncryptedLink, LinkShareUrl, SignatureIssues } from './interface'; import { isDecryptedLinkSame, isEncryptedLinkSame } from './link'; export type LinksState = { [shareId: string]: { links: Links; tree: Tree; // Timestamp of the last "Empty Trash" action to properly compute // isLocked flag for newly added links. Links added later needs // to have isLocked based on the information if API will delete it // or not. latestTrashEmptiedAt?: number; }; }; type Links = { [linkId: string]: Link; }; export type Link = { encrypted: EncryptedLink; decrypted?: DecryptedLink; }; type Tree = { [parentLinkId: string]: string[]; }; /** * Returns whether or not a `Link` is decrypted. */ export function isLinkDecrypted(link: Link | undefined): link is Required<Link> { return !!link && !!link.decrypted && !link.decrypted.isStale; } /** * useLinksStateProvider provides a storage to cache links. */ export function useLinksStateProvider() { const events = useDriveEventManager(); const [state, setState] = useState<LinksState>({}); useEffect(() => { const callbackId = events.eventHandlers.register((_volumeId, events) => setState((state) => updateByEvents(state, events)) ); return () => { events.eventHandlers.unregister(callbackId); }; }, []); const setLinks = useCallback((shareId: string, links: Link[]) => { setState((state) => addOrUpdate(state, shareId, links)); }, []); const lockLinks = useCallback((shareId: string, linkIds: string[]) => { setState((state) => setLock(state, shareId, linkIds, true)); }, []); const unlockLinks = useCallback((shareId: string, linkIds: string[]) => { setState((state) => setLock(state, shareId, linkIds, false)); }, []); const lockTrash = useCallback(() => { setState((state) => Object.keys(state).reduce((acc, shareId) => { return setLock(acc, shareId, 'trash', true); }, state) ); }, []); const setCachedThumbnail = useCallback((shareId: string, linkId: string, url: string) => { setState((state) => setCachedThumbnailUrl(state, shareId, linkId, url)); }, []); const getLink = useCallback( (shareId: string, linkId: string): Link | undefined => { return state[shareId]?.links[linkId]; }, [state] ); const getChildren = useCallback( (shareId: string, parentLinkId: string, foldersOnly: boolean = false): Link[] => { const childrenLinkIds = state[shareId]?.tree[parentLinkId] || []; return childrenLinkIds .map((linkId) => state[shareId].links[linkId]) .filter(isTruthy) .filter((link) => !foldersOnly || !link.encrypted.isFile); }, [state] ); const getAllShareLinks = (shareId: string): Link[] => { return Object.values(state[shareId]?.links || []); }; const getTrashed = useCallback( (shareId: string): Link[] => { return getAllShareLinks(shareId).filter( (link) => !!link.encrypted.trashed && !link.encrypted.trashedByParent ); }, [state] ); const getSharedByLink = useCallback( (shareId: string): Link[] => { return getAllShareLinks(shareId).filter(({ encrypted }) => !encrypted.trashed && !!encrypted.shareUrl); }, [state] ); return { setLinks, lockLinks, unlockLinks, lockTrash, setCachedThumbnail, getLink, getChildren, getTrashed, getSharedByLink, }; } export function updateByEvents(state: LinksState, { events }: DriveEvents): LinksState { events.forEach((event) => { if (event.eventType === EVENT_TYPES.DELETE) { // Delete event does not contain context share ID because // the link is already deleted and backend might not know // it anymore. There might be two links in mulitple shares // with the same link IDs, but it is very rare, and it can // happen in user cache only with direct sharing. Because // the risk is almost zero, it is simply deleting all the // links with the given ID. In future when we have full sync // we will have storage mapped with volumes instead removing // the problem altogether. Object.keys(state).forEach((shareId) => { state = deleteLinks(state, shareId, [event.encryptedLink.linkId]); }); return; } // If link is moved from one share to the another one, we need // to delete it from the original one too. It is not very efficient // as it will delete the decrypted content. But this is rare and // it will be solved in the future with volume-centric approach // as described above. if ( event.originShareId && event.encryptedLink.rootShareId !== event.originShareId && state[event.originShareId] ) { state = deleteLinks(state, event.originShareId, [event.encryptedLink.linkId]); } if (!state[event.encryptedLink.rootShareId]) { return state; } state = addOrUpdate(state, event.encryptedLink.rootShareId, [{ encrypted: event.encryptedLink }]); }); return state; } export function deleteLinks(state: LinksState, shareId: string, linkIds: string[]): LinksState { if (!state[shareId]) { return state; } let updated = false; linkIds.forEach((linkId) => { const original = state[shareId].links[linkId]; if (!original) { return; } updated = true; // Delete the link itself from links and tree. delete state[shareId].links[linkId]; const originalParentChildren = state[shareId].tree[original.encrypted.parentLinkId]; if (originalParentChildren) { state[shareId].tree[original.encrypted.parentLinkId] = originalParentChildren.filter( (childLinkId) => childLinkId !== linkId ); } // Delete the root and children of the deleting link. state[shareId].tree[linkId]?.forEach((childLinkId) => { delete state[shareId].links[childLinkId]; }); delete state[shareId].tree[linkId]; }); return updated ? { ...state } : state; } export function addOrUpdate(state: LinksState, shareId: string, links: Link[]): LinksState { if (!links.length) { return state; } if (!state[shareId]) { state[shareId] = { links: {}, tree: {}, }; } links.forEach((link) => { const { linkId, parentLinkId } = link.encrypted; const original = state[shareId].links[linkId]; const originalTrashed = original?.encrypted.trashed; // Backend does not return trashed property set for children of trashed // parent. For example, file can have trashed equal to null even if its // in the folder which is trashed. Its heavy operation on backend and // because client needs to load all the parents to get keys anyway, we // can calculate it here. // Note this can be problematic in the future once we dont keep the full // cache from memory consuption reasons. That will need more thoughts // how to tackle this problem to keep the trashed property just fine. if (!link.encrypted.trashed) { const parentLinkTrashed = getParentTrashed(state, shareId, parentLinkId); let trashedProps; if (parentLinkTrashed) { trashedProps = { trashed: parentLinkTrashed, trashedByParent: true, }; } else if (original?.encrypted.trashedByParent) { // If the link do not belong under trashed tree anymore, and // the link is trashed by parent, we can reset it back. trashedProps = { trashed: null, trashedByParent: false, }; } if (trashedProps) { link = { encrypted: { ...link.encrypted, ...trashedProps }, decrypted: link.decrypted ? { ...link.decrypted, ...trashedProps } : undefined, }; } } if (original) { const originalParentId = original.encrypted.parentLinkId; if (originalParentId !== parentLinkId) { const originalParentChildren = state[shareId].tree[originalParentId]; if (originalParentChildren) { state[shareId].tree[originalParentId] = originalParentChildren.filter( (childLinkId) => childLinkId !== linkId ); } } const newSignatureIssues = getNewSignatureIssues(original.encrypted, link); original.decrypted = getNewDecryptedLink(original, link); original.encrypted = link.encrypted; original.encrypted.signatureIssues = newSignatureIssues; if (original.decrypted) { original.decrypted.signatureIssues = newSignatureIssues; } } else { state[shareId].links[linkId] = link; } // Lock newly loaded trashed link if the whole trash is locked. // For example, when trash is being emptied but at the same time // the next page is loaded. const lastTrashed = state[shareId].latestTrashEmptiedAt; const cachedLink = state[shareId].links[linkId].decrypted; if (cachedLink?.trashed && lastTrashed && cachedLink.trashed < lastTrashed) { cachedLink.isLocked = true; } // Only root link has no parent ID. if (parentLinkId) { const parentChildIds = state[shareId].tree[parentLinkId]; if (parentChildIds) { // If the parent is trashed, we keep the tree structure, so we // can update properly trashed flag for all children after // parent is restored. if (link.encrypted.trashedByParent) { if (!parentChildIds.includes(linkId)) { state[shareId].tree[parentLinkId] = [...parentChildIds, linkId]; } } else if (link.encrypted.trashed) { state[shareId].tree[parentLinkId] = parentChildIds.filter((childId) => childId !== linkId); recursivelyTrashChildren(state, shareId, linkId, link.encrypted.trashed); } else { if (!parentChildIds.includes(linkId)) { state[shareId].tree[parentLinkId] = [...parentChildIds, linkId]; } if (originalTrashed) { recursivelyRestoreChildren(state, shareId, linkId, originalTrashed); } } } else { state[shareId].tree[parentLinkId] = [linkId]; } } }); return { ...state }; } /** * getParentTrashed finds closest parent which is trashed and returns its * trashed property, or returns null if link is not belonging under trashed * folder. */ function getParentTrashed(state: LinksState, shareId: string, linkId: string): number | null { while (linkId) { const link = state[shareId].links[linkId]; if (!link) { return null; } if (link.encrypted.trashed) { return link.encrypted.trashed; } linkId = link.encrypted.parentLinkId; } return null; } /** * recursivelyTrashChildren sets trashed flag to all children of the parent. * When parent is trashed, API do not create event for every child, therefore * we need to update trashed flag the same way for all of them in our cache. */ function recursivelyTrashChildren(state: LinksState, shareId: string, linkId: string, trashed: number) { recursivelyUpdateLinks(state, shareId, linkId, (link) => { return { encrypted: { ...link.encrypted, trashed: link.encrypted.trashed || trashed, trashedByParent: true, }, decrypted: link.decrypted ? { ...link.decrypted, trashed: link.decrypted.trashed || trashed, trashedByParent: true, } : undefined, }; }); } /** * recursivelyRestoreChildren unsets trashed flag to children of the parent. * It's similar to trashing: API do not create event for the childs, therefore * we need to remove trashed flag from children but only the ones which have * the same value because if the child was trashed first and then parent, user * will restore only parent and the previosly trashed child still needs to stay * in trash. */ function recursivelyRestoreChildren(state: LinksState, shareId: string, linkId: string, originalTrashed: number) { recursivelyUpdateLinks(state, shareId, linkId, (link) => { if (link.encrypted.trashed !== originalTrashed) { return link; } return { encrypted: { ...link.encrypted, trashed: null, trashedByParent: false, }, decrypted: link.decrypted ? { ...link.decrypted, trashed: null, trashedByParent: false, } : undefined, }; }); } /** * recursivelyUpdateLinks recursively calls updateCallback for every cached * child of the provided linkId in scope of shareId. */ function recursivelyUpdateLinks( state: LinksState, shareId: string, linkId: string, updateCallback: (link: Link) => Link ) { state[shareId].tree[linkId]?.forEach((linkId) => { const child = state[shareId].links[linkId]; if (!child) { return; } state[shareId].links[linkId] = updateCallback(child); recursivelyUpdateLinks(state, shareId, child.encrypted.linkId, updateCallback); }); } function getNewSignatureIssues(original: EncryptedLink, newLink: Link): SignatureIssues | undefined { const newSignatureIssues = newLink.decrypted?.signatureIssues || newLink.encrypted.signatureIssues; const isSame = isEncryptedLinkSame(original, newLink.encrypted); // If the link is different (different keys or new version of encrypted // values), we need to forget all previous signature issues and try decrypt // them again, or accept new issues if it was already tried. if (!isSame) { return newSignatureIssues; } if (original.signatureIssues || newSignatureIssues) { return { ...original.signatureIssues, ...newSignatureIssues }; } return undefined; } /** * getNewDecryptedLink returns new version of decrypted link. It tries to * preserve the locally cached data, such as thumbnail or isLocked flag. * If the `newLink` has decrypted version, it is used directly and enhanced * with `getDecryptedLinkComputedData`. * If the `original` link has decrypted version, the new decrypted link * is combination of `newLink` encrypted version, `original` decrypted * values (such as name or fileModifyTime), and locally computed data. * If the case the new decrypted link doesn't match with previous encrypted * data and needs re-decryption, `isStale` is set for later decryption. * Decryption is not done right away, because the link might not be needed; * any view which needs data needs to make sure to run code to re-decrypt * stale links. The link is not cleared to not cause blinks in the UI. */ function getNewDecryptedLink(original: Link, newLink: Link): DecryptedLink | undefined { if (newLink.decrypted) { return { ...newLink.decrypted, ...getDecryptedLinkComputedData( original.decrypted, newLink.decrypted.activeRevision?.id, newLink.decrypted.shareUrl ), }; } if (original.decrypted) { return { ...newLink.encrypted, encryptedName: original.decrypted.encryptedName, name: original.decrypted.name, fileModifyTime: original.decrypted.fileModifyTime, duration: original.decrypted.duration, corruptedLink: original.decrypted.corruptedLink, ...getDecryptedLinkComputedData( original.decrypted, newLink.encrypted.activeRevision?.id, newLink.encrypted.shareUrl ), isStale: !isDecryptedLinkSame(original.encrypted, newLink.encrypted), }; } return undefined; } /** * getDecryptedLinkComputedData returns locally computed data. * The list includes: * - numAccesses from shareUrl, * - isLocked, * - and cachedThumbnailUrl. */ function getDecryptedLinkComputedData(link?: DecryptedLink, newRevisionId?: string, newShareUrl?: LinkShareUrl) { return !link ? {} : { shareUrl: newShareUrl ? { ...newShareUrl, numAccesses: getNewNumAccesses(newShareUrl, link), } : undefined, isLocked: link.isLocked, cachedThumbnailUrl: link.activeRevision?.id === newRevisionId ? link.cachedThumbnailUrl : undefined, }; } function getNewNumAccesses(newShareUrl: LinkShareUrl, oldLink?: DecryptedLink): number | undefined { // Prefer the one coming from the new share URL info if set. if (newShareUrl.numAccesses !== undefined) { return newShareUrl.numAccesses; } // If not set, but we have it cached from before, use that. // This information is not part of every response. if (oldLink?.shareUrl?.numAccesses !== undefined) { return oldLink.shareUrl.numAccesses; } // If there is no old share URL, but there is incoming one, that // means it is freshly created share URL. In other words, it was // not shared yet. We can safely set zero in such case so we don't // have to do extra request to get zero. if (oldLink && !oldLink.shareUrl) { return 0; } // In all other cases keep undefined. We just don't know. return undefined; } export function setLock( state: LinksState, shareId: string, linkIdsOrTrash: string[] | 'trash', isLocked: boolean ): LinksState { if (!state[shareId]) { return state; } if (Array.isArray(linkIdsOrTrash)) { linkIdsOrTrash.forEach((linkId) => { if (!state[shareId].links[linkId]?.decrypted) { return; } state[shareId].links[linkId].decrypted = { ...(state[shareId].links[linkId].decrypted as DecryptedLink), isLocked, }; }); } else { state[shareId].latestTrashEmptiedAt = Date.now() / 1000; // From ms to sec. Object.entries(state[shareId].links) .filter(([, link]) => link.decrypted && link.decrypted.trashed) .forEach(([linkId, link]) => { state[shareId].links[linkId].decrypted = { ...(link.decrypted as DecryptedLink), isLocked, }; }); } return { ...state }; } export function setCachedThumbnailUrl( state: LinksState, shareId: string, linkId: string, cachedThumbnailUrl: string ): LinksState { if (!state[shareId]) { return state; } const link = state[shareId].links[linkId]; if (!link?.decrypted) { return state; } link.decrypted = { ...link.decrypted, cachedThumbnailUrl, }; return { ...state }; } const LinksStateContext = createContext<ReturnType<typeof useLinksStateProvider> | null>(null); export function LinksStateProvider({ children }: { children: React.ReactNode }) { const value = useLinksStateProvider(); return <LinksStateContext.Provider value={value}>{children}</LinksStateContext.Provider>; } export default function useLinksState() { const state = useContext(LinksStateContext); if (!state) { throw new Error('Trying to use uninitialized LinksStateProvider'); } return state; }
3,089
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/validation.ts
import { c, msgid } from 'ttag'; import { MAX_NAME_LENGTH } from '@proton/shared/lib/drive/constants'; const composeValidators = <T>(validators: ((value: T) => string | undefined)[]) => (value: T) => { for (const validator of validators) { const result = validator(value); if (result) { return result; } } return undefined; }; const validateNameLength = (str: string) => { return str.length > MAX_NAME_LENGTH ? c('Validation Error').ngettext( msgid`Name must be ${MAX_NAME_LENGTH} character long at most`, `Name must be ${MAX_NAME_LENGTH} characters long at most`, MAX_NAME_LENGTH ) : undefined; }; const validateNameEmpty = (str: string) => { return !str ? c('Validation Error').t`Name must not be empty` : undefined; }; export const validateLinkName = composeValidators([validateNameEmpty, validateNameLength]); export const validateLinkNameField = composeValidators([validateNameEmpty, validateNameLength]);
3,090
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/index.ts
export { default as useLinksListing, LinksListingProvider } from './useLinksListing'; export { default as usePublicLinksListing, PublicLinksListingProvider } from './usePublicLinksListing';
3,091
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/interface.ts
import { DecryptedLink } from '../interface'; type LoadLinksMetaOptions = { /** * Whether or not to request thumbnail tokens from the API */ loadThumbnails?: boolean; }; export type FetchLoadLinksMeta = ( abortSignal: AbortSignal, query: string, shareId: string, linkIds: string[], options?: LoadLinksMetaOptions ) => Promise<{ links: DecryptedLink[]; parents: DecryptedLink[]; errors: any[]; }>;
3,092
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/useLinksListing.test.tsx
import { act, renderHook } from '@testing-library/react-hooks'; import { SORT_DIRECTION } from '@proton/shared/lib/constants'; import { VolumesStateProvider } from '../../_volumes/useVolumesState'; import { EncryptedLink } from '../interface'; import { LinksStateProvider } from '../useLinksState'; import { useLinksListingProvider } from './useLinksListing'; import { PAGE_SIZE } from './useLinksListingHelpers'; const LINKS = [...Array(PAGE_SIZE * 2 - 1)].map((_, x) => ({ linkId: `children${x}`, parentLinkId: 'parentLinkId' })); function linksToApiLinks(links: { linkId: string; parentLinkId: string }[]) { return links.map(({ linkId, parentLinkId }) => ({ LinkID: linkId, ParentLinkID: parentLinkId })); } jest.mock('../../_utils/errorHandler', () => { return { useErrorHandler: () => ({ showErrorNotification: jest.fn(), showAggregatedErrorNotification: jest.fn(), }), }; }); const mockRequest = jest.fn(); jest.mock('../../_api/useDebouncedRequest', () => { const useDebouncedRequest = () => { return mockRequest; }; return useDebouncedRequest; }); jest.mock('../../_events/useDriveEventManager', () => { const useDriveEventManager = () => { return { eventHandlers: { register: () => 'id', unregister: () => false, }, }; }; return { useDriveEventManager, }; }); jest.mock('../../_shares/useShare', () => { const useLink = () => { return {}; }; return useLink; }); const mockDecrypt = jest.fn(); jest.mock('../useLink', () => { const useLink = () => { return { decryptLink: mockDecrypt, }; }; return useLink; }); describe('useLinksListing', () => { const abortSignal = new AbortController().signal; let hook: { current: ReturnType<typeof useLinksListingProvider>; }; beforeEach(() => { jest.resetAllMocks(); mockDecrypt.mockImplementation((_abortSignal: AbortSignal, _shareId: string, encrypted: EncryptedLink) => Promise.resolve(encrypted) ); const wrapper = ({ children }: { children: React.ReactNode }) => ( <VolumesStateProvider> <LinksStateProvider>{children}</LinksStateProvider> </VolumesStateProvider> ); const { result } = renderHook(() => useLinksListingProvider(), { wrapper }); hook = result; }); it('fetches children all pages with the same sorting', async () => { mockRequest.mockReturnValueOnce({ Links: linksToApiLinks(LINKS.slice(0, PAGE_SIZE)) }); mockRequest.mockReturnValueOnce({ Links: linksToApiLinks(LINKS.slice(PAGE_SIZE)) }); await act(async () => { await hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId', { sortField: 'createTime', sortOrder: SORT_DIRECTION.ASC, }); await hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId', { sortField: 'createTime', sortOrder: SORT_DIRECTION.ASC, }); }); // Check fetch calls - two pages. expect(mockRequest.mock.calls.map(([{ params }]) => params)).toMatchObject([ { Page: 0, Sort: 'CreateTime', Desc: 0 }, { Page: 1, Sort: 'CreateTime', Desc: 0 }, ]); expect(hook.current.getCachedChildren(abortSignal, 'shareId', 'parentLinkId')).toMatchObject({ links: LINKS, isDecrypting: false, }); // Check decrypt calls - all links were decrypted. expect(mockDecrypt.mock.calls.map(([, , { linkId }]) => linkId)).toMatchObject( LINKS.map(({ linkId }) => linkId) ); }); it('fetches from the beginning when sorting changes', async () => { const links = LINKS.slice(0, PAGE_SIZE); mockRequest.mockReturnValue({ Links: linksToApiLinks(links) }); await act(async () => { await hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId', { sortField: 'createTime', sortOrder: SORT_DIRECTION.ASC, }); await hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId', { sortField: 'createTime', sortOrder: SORT_DIRECTION.DESC, }); }); // Check fetch calls - twice starting from the first page. expect(mockRequest.mock.calls.map(([{ params }]) => params)).toMatchObject([ { Page: 0, Sort: 'CreateTime', Desc: 0 }, { Page: 0, Sort: 'CreateTime', Desc: 1 }, ]); expect(hook.current.getCachedChildren(abortSignal, 'shareId', 'parentLinkId')).toMatchObject({ links, isDecrypting: false, }); // Check decrypt calls - the second call returned the same links, no need to decrypt them twice. expect(mockDecrypt.mock.calls.map(([, , { linkId }]) => linkId)).toMatchObject( links.map(({ linkId }) => linkId) ); }); it('skips fetch if all was fetched', async () => { const links = LINKS.slice(0, 5); mockRequest.mockReturnValue({ Links: linksToApiLinks(links) }); await act(async () => { await hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId'); await hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId'); }); // Check fetch calls - first call fetched all, no need to call the second. expect(mockRequest).toBeCalledTimes(1); expect(hook.current.getCachedChildren(abortSignal, 'shareId', 'parentLinkId')).toMatchObject({ links, isDecrypting: false, }); }); it('loads the whole folder', async () => { mockRequest.mockReturnValueOnce({ Links: linksToApiLinks(LINKS.slice(0, PAGE_SIZE)) }); mockRequest.mockReturnValueOnce({ Links: linksToApiLinks(LINKS.slice(PAGE_SIZE)) }); await act(async () => { await hook.current.loadChildren(abortSignal, 'shareId', 'parentLinkId'); }); expect(mockRequest.mock.calls.map(([{ params }]) => params)).toMatchObject([ { Page: 0, Sort: 'CreateTime', Desc: 0 }, { Page: 1, Sort: 'CreateTime', Desc: 0 }, ]); }); it('continues the load of the whole folder where it ended', async () => { mockRequest.mockReturnValueOnce({ Links: linksToApiLinks(LINKS.slice(0, PAGE_SIZE)) }); mockRequest.mockReturnValueOnce({ Links: linksToApiLinks(LINKS.slice(PAGE_SIZE)) }); await act(async () => { await hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId', { sortField: 'metaDataModifyTime', // Make sure it is not default. sortOrder: SORT_DIRECTION.ASC, }); await hook.current.loadChildren(abortSignal, 'shareId', 'parentLinkId'); }); expect(mockRequest.mock.calls.map(([{ params }]) => params)).toMatchObject([ { Page: 0, Sort: 'ModifyTime', Desc: 0 }, // Done by fetchChildrenNextPage. { Page: 1, Sort: 'ModifyTime', Desc: 0 }, // Done by loadChildren, continues with the same sorting. ]); }); it("can count link's children", async () => { const PAGE_LENGTH = 5; const links = LINKS.slice(0, PAGE_LENGTH); mockRequest.mockReturnValueOnce({ Links: linksToApiLinks(links) }); await act(async () => { await hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId'); }); expect(mockRequest).toBeCalledTimes(1); expect(hook.current.getCachedChildrenCount('shareId', 'parentLinkId')).toBe(PAGE_LENGTH); }); });
3,093
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/useLinksListing.tsx
import { createContext, useCallback, useContext, useRef } from 'react'; import { queryFolderChildren } from '@proton/shared/lib/api/drive/folder'; import { queryLinkMetaBatch } from '@proton/shared/lib/api/drive/link'; import { BATCH_REQUEST_SIZE } from '@proton/shared/lib/drive/constants'; import { LinkChildrenResult, LinkMetaBatchPayload } from '@proton/shared/lib/interfaces/drive/link'; import chunk from '@proton/utils/chunk'; import isTruthy from '@proton/utils/isTruthy'; import { linkMetaToEncryptedLink, useDebouncedRequest } from '../../_api'; import { waitFor } from '../../_utils'; import { DecryptedLink } from './../interface'; import useLinksState, { isLinkDecrypted } from './../useLinksState'; import { FetchLoadLinksMeta } from './interface'; import { FetchMeta, FetchResponse, PAGE_SIZE, SortParams, sortParamsToServerSortArgs, useLinksListingHelpers, } from './useLinksListingHelpers'; import { useSharedLinksListing } from './useSharedLinksListing'; import { useTrashedLinksListing } from './useTrashedLinksListing'; type FetchState = { [shareId: string]: FetchShareState; }; type FetchShareState = { folders: { [linkId: string]: { // all represents version for all files in the folder, whereas // foldersOnly is state of requesting only folders for the given // folder. In case `all` is ongoing, `foldersOnly` version waits // till that is done. See `fetchChildrenNextPage` for more info. all: FetchMeta; foldersOnly: FetchMeta; }; }; links: { [key: string]: FetchMeta; }; }; /** * Provides way to list links, such as folder links or * trashed links or shared links, and ensure the links are decrypted. * The typical usage should be as follow: * * const listing = useLinksListingProvider(); * * // getCachedChildren returns links right away. * // abortSignal is used for background decryption of stale links * // (links not loaded by listing, but using events, for example). * const children = listing.getCachedChildren(abortSignal, shareId, linkId); * * useEffect(() => { * const ac = new AbortController(); * // Load and decrypt all children for given folder. * linksListing.loadChildren(ac.signal, shareId, linkId) * return () => { * // Stop the load operation when its not needed anymore. * // E.g., different folder was requested. * ac.abort(); * }; * }, [shareId, linkId]); */ export function useLinksListingProvider() { const debouncedRequest = useDebouncedRequest(); const linksState = useLinksState(); const trashedLinksListing = useTrashedLinksListing(); const sharedLinksListing = useSharedLinksListing(); const { cacheLoadedLinks, fetchNextPageWithSortingHelper, loadFullListing, getDecryptedLinksAndDecryptRest } = useLinksListingHelpers(); const state = useRef<FetchState>({}); /** * getShareFetchState returns state for given `shareId`. * It ensures that the share is present in the state. */ const getShareFetchState = (shareId: string): FetchShareState => { if (state.current[shareId]) { return state.current[shareId]; } state.current[shareId] = { folders: {}, links: {}, }; return state.current[shareId]; }; const fetchChildrenPage = async ( abortSignal: AbortSignal, shareId: string, parentLinkId: string, sorting: SortParams, page: number, foldersOnly?: boolean, showNotification = true ): Promise<FetchResponse> => { const { Links } = await debouncedRequest<LinkChildrenResult>( { ...queryFolderChildren(shareId, parentLinkId, { ...sortParamsToServerSortArgs(sorting), PageSize: PAGE_SIZE, Page: page, FoldersOnly: foldersOnly ? 1 : 0, }), silence: !showNotification, }, abortSignal ); return { links: Links.map((link) => linkMetaToEncryptedLink(link, shareId)), parents: [] }; }; /** * Fetches next page for the given folder. If request for `foldersOnly` * is made and there is already ongoing request for all files for the same folder, * it waits till its finished to not ask for the same links twice. */ const fetchChildrenNextPage = async ( abortSignal: AbortSignal, shareId: string, parentLinkId: string, sorting?: SortParams, foldersOnly?: boolean, showNotification = true ): Promise<boolean> => { const shareState = getShareFetchState(shareId); let linkFetchMeta = shareState.folders[parentLinkId]; if (!linkFetchMeta) { linkFetchMeta = { all: {}, foldersOnly: {}, }; shareState.folders[parentLinkId] = linkFetchMeta; } if (foldersOnly) { // If request to query all items is in progress, lets wait // as that might fetch all folder children as well. await waitFor(() => !linkFetchMeta.all.isInProgress, { abortSignal }); // If all items were downloaded, no need to perform fetch // for folders only. if (linkFetchMeta.all.isEverythingFetched) { return false; } } const fetchMeta = foldersOnly ? linkFetchMeta.foldersOnly : linkFetchMeta.all; return fetchNextPageWithSortingHelper( abortSignal, shareId, sorting, fetchMeta, (sorting: SortParams, page: number) => { return fetchChildrenPage( abortSignal, shareId, parentLinkId, sorting, page, foldersOnly, showNotification ); }, showNotification ); }; const fetchLinksMeta = async ( abortSignal: AbortSignal, shareId: string, linkIds: string[], loadThumbnails: boolean = false ): Promise<FetchResponse> => { const { Links, Parents } = await debouncedRequest<LinkMetaBatchPayload>( queryLinkMetaBatch(shareId, linkIds, loadThumbnails), abortSignal ); return { links: Links.map((link) => linkMetaToEncryptedLink(link, shareId)), parents: Parents ? Object.values(Parents).map((link) => linkMetaToEncryptedLink(link, shareId)) : [], }; }; const loadLinksMeta: FetchLoadLinksMeta = async (abortSignal, query, shareId, linkIds, options = {}) => { const shareState = getShareFetchState(shareId); let fetchMeta = shareState.links[query]; if (!fetchMeta) { fetchMeta = {}; state.current[shareId].links[query] = fetchMeta; } await waitFor(() => !fetchMeta.isInProgress, { abortSignal }); fetchMeta.isInProgress = true; const linksAcc: DecryptedLink[] = []; const parentsAcc: DecryptedLink[] = []; const errorsAcc: any[] = []; const load = async () => { const missingLinkIds: string[] = []; // Read cache to avoid unnescesary queries linkIds.forEach((linkId) => { const link = linksState.getLink(shareId, linkId); if (isLinkDecrypted(link)) { linksAcc.push(link.decrypted); } else { missingLinkIds.push(linkId); } }); for (const pageLinkIds of chunk(missingLinkIds, BATCH_REQUEST_SIZE)) { const { links, parents } = await fetchLinksMeta( abortSignal, shareId, pageLinkIds, options.loadThumbnails ); const cached = await cacheLoadedLinks(abortSignal, shareId, links, parents); if (cached.errors.length > 0) { errorsAcc.push(...cached.errors); } for (const { decrypted } of cached.links) { // Links should not include parents because parents need to be // processed first otherwise links would do fetch automatically // again before parents are properly handled. Normally loading // should focus on links only, but for example, not all endpoints // gives us clear separation (like listing per shared links where // we don't have info what link is parent and what is child). if (parents.find((link) => link.linkId === decrypted.linkId)) { parentsAcc.push(decrypted); } else { linksAcc.push(decrypted); } } } }; await load().finally(() => { fetchMeta.isInProgress = false; }); return { links: linksAcc, parents: parentsAcc, errors: errorsAcc, }; }; const loadChildren = async ( abortSignal: AbortSignal, shareId: string, linkId: string, foldersOnly?: boolean, showNotification = true ): Promise<void> => { // undefined means keep the sorting used the last time = lets reuse what we loaded so far. const sorting = undefined; return loadFullListing(() => fetchChildrenNextPage(abortSignal, shareId, linkId, sorting, foldersOnly, showNotification) ); }; const getCachedChildren = useCallback( ( abortSignal: AbortSignal, shareId: string, parentLinkId: string, foldersOnly: boolean = false ): { links: DecryptedLink[]; isDecrypting: boolean } => { return getDecryptedLinksAndDecryptRest( abortSignal, shareId, linksState.getChildren(shareId, parentLinkId, foldersOnly), getShareFetchState(shareId).folders[parentLinkId]?.all ); }, [linksState.getChildren] ); const getCachedChildrenCount = useCallback( (shareId: string, parentLinkId: string): number => { const links = linksState.getChildren(shareId, parentLinkId); return links.length; }, [linksState.getChildren] ); const getCachedLinks = useCallback( ( abortSignal: AbortSignal, fetchKey: string, shareId: string, linkIds: string[] ): { links: DecryptedLink[]; isDecrypting: boolean } => { const links = linkIds.map((linkId) => linksState.getLink(shareId, linkId)).filter(isTruthy); return getDecryptedLinksAndDecryptRest( abortSignal, shareId, links, getShareFetchState(shareId).links[fetchKey] ); }, [linksState.getLink] ); return { fetchChildrenNextPage, loadChildren, loadTrashedLinks: (signal: AbortSignal, volumeId: string) => { return trashedLinksListing.loadTrashedLinks(signal, volumeId, loadLinksMeta); }, loadLinksSharedByLink: (signal: AbortSignal, volumeId: string) => { return sharedLinksListing.loadSharedLinks(signal, volumeId, loadLinksMeta); }, loadLinksMeta, getCachedChildren, getCachedChildrenCount, getCachedTrashed: trashedLinksListing.getCachedTrashed, getCachedSharedByLink: sharedLinksListing.getCachedSharedLinks, getCachedLinks, }; } const LinksListingContext = createContext<ReturnType<typeof useLinksListingProvider> | null>(null); export function LinksListingProvider({ children }: { children: React.ReactNode }) { const value = useLinksListingProvider(); return <LinksListingContext.Provider value={value}>{children}</LinksListingContext.Provider>; } export default function useLinksListing() { const state = useContext(LinksListingContext); if (!state) { throw new Error('Trying to use uninitialized LinksListingProvider'); } return state; }
3,094
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/useLinksListingGetter.test.tsx
import { act, renderHook } from '@testing-library/react-hooks'; import { wait } from '@proton/shared/lib/helpers/promise'; import { VolumesStateProvider } from '../../_volumes/useVolumesState'; import { EncryptedLink } from '../interface'; import { useLinksListingProvider } from './useLinksListing'; jest.mock('../../_utils/errorHandler', () => { return { useErrorHandler: () => ({ showErrorNotification: jest.fn(), showAggregatedErrorNotification: jest.fn(), }), }; }); const mockRequest = jest.fn(); jest.mock('../../_api/useDebouncedRequest', () => { const useDebouncedRequest = () => { return mockRequest; }; return useDebouncedRequest; }); jest.mock('../../_events/useDriveEventManager', () => { const useDriveEventManager = () => { return { eventHandlers: { register: () => 'id', unregister: () => false, }, }; }; return { useDriveEventManager, }; }); jest.mock('../../_shares/useShare', () => { const useLink = () => { return {}; }; return useLink; }); const mockDecrypt = jest.fn(); jest.mock('../useLink', () => { const useLink = () => { return { decryptLink: mockDecrypt, }; }; return useLink; }); const mockGetChildren = jest.fn(); jest.mock('../useLinksState', () => { const useLinksState = () => { return { setLinks: jest.fn(), getChildren: mockGetChildren, }; }; return useLinksState; }); describe('useLinksListing', () => { const abortSignal = new AbortController().signal; let hook: { current: ReturnType<typeof useLinksListingProvider>; }; beforeEach(() => { jest.resetAllMocks(); mockDecrypt.mockImplementation((abortSignal: AbortSignal, shareId: string, encrypted: EncryptedLink) => Promise.resolve(encrypted) ); mockGetChildren.mockReturnValue([ { encrypted: { linkId: 'onlyEncrypted' } }, { encrypted: { linkId: 'decrypted' }, decrypted: { isStale: false } }, { encrypted: { linkId: 'stale' }, decrypted: { isStale: true } }, ]); const wrapper = ({ children }: { children: React.ReactNode }) => ( <VolumesStateProvider>{children}</VolumesStateProvider> ); const { result } = renderHook(() => useLinksListingProvider(), { wrapper }); hook = result; }); it('decrypts all non-decrypted links if listing is not fetching', async () => { act(() => { hook.current.getCachedChildren(abortSignal, 'shareId', 'parentLinkId'); }); expect(mockDecrypt.mock.calls.map(([, , { linkId }]) => linkId)).toMatchObject(['onlyEncrypted', 'stale']); }); it('re-decrypts only stale links if listing is fetching (and decrypting)', async () => { // Make some delay to make sure fetching is in progress when // getCachedChildren is called. mockRequest.mockImplementation(async () => { await wait(200); return { Links: [] }; }); await act(async () => { const fetchPromise = hook.current.fetchChildrenNextPage(abortSignal, 'shareId', 'parentLinkId'); await wait(100); // Wait till previous call sets inProgress. hook.current.getCachedChildren(abortSignal, 'shareId', 'parentLinkId'); await fetchPromise; }); expect(mockDecrypt.mock.calls.map(([, , { linkId }]) => linkId)).toMatchObject(['stale']); }); });
3,095
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/useLinksListingHelpers.tsx
import { c, msgid } from 'ttag'; import { SORT_DIRECTION } from '@proton/shared/lib/constants'; import { RESPONSE_CODE } from '@proton/shared/lib/drive/constants'; import isTruthy from '@proton/utils/isTruthy'; import { sendErrorReport } from '../../../utils/errorHandling'; import { useErrorHandler, waitFor } from '../../_utils'; import { DecryptedLink, EncryptedLink } from '../interface'; import useLinks from '../useLinks'; import useLinksState, { Link, isLinkDecrypted } from '../useLinksState'; export type FetchMeta = { isEverythingFetched?: boolean; isInProgress?: boolean; lastSorting?: SortParams; lastPage?: number; }; /** * Available sorting methods for listing. */ export type SortParams = { sortField: 'size' | 'createTime' | 'metaDataModifyTime'; sortOrder: SORT_DIRECTION; }; /** * FetchResponse is internal data holder of results from API. */ export type FetchResponse = { // links contain all requests links (that is links in specified folder // in case folder children were requested). links: EncryptedLink[]; // parents contain links not directly requested but needed so they can // be decrypted (useful for shared links which don't have the same // parent, for example). parents: EncryptedLink[]; }; // API supports up to 150 but we hardly fit 150 items on the page anyway. // Because decrypting takes time, lets do it in smaller batches. We could // optimise it to do it in smaller batches for listings when we can use // sorting on API, but the maximum what API allows for cases when we need // to load everything anyway. That is a bit tricky, as it needs more complex // paging algorithm (to properly compute page when page size differs). // Therefore, lets keep it simple for now unless it is really needed. export const PAGE_SIZE = 50; export const DEFAULT_SORTING: SortParams = { sortField: 'createTime', sortOrder: SORT_DIRECTION.ASC, }; /** * Provides helpers to list links. */ export function useLinksListingHelpers() { const { showErrorNotification, showAggregatedErrorNotification } = useErrorHandler(); const linksState = useLinksState(); const { decryptLinks } = useLinks(); /** * Decrypts links in parallel and caches them. */ const decryptAndCacheLinks = async (abortSignal: AbortSignal, shareId: string, links: EncryptedLink[]) => { if (!links.length) { return { links: [], errors: [], }; } const result = await decryptLinks(abortSignal, shareId, links); if (result.links.length) { linksState.setLinks(shareId, result.links); } if (result.errors.length) { showAggregatedErrorNotification(result.errors, (errors: any[]) => { const count = errors.length; return c('Notification').ngettext( msgid`${count} item failed to be decrypted`, `${count} items failed to be decrypted`, count ); }); } return result; }; /** * Caches encrypted links and decrypts them. */ const cacheLoadedLinks = async ( abortSignal: AbortSignal, shareId: string, links: EncryptedLink[], parents: EncryptedLink[] ) => { // Set encrypted data right away because it is needed for decryption const allEncryptedLinks = [...parents, ...links].map((encrypted) => ({ encrypted })); linksState.setLinks(shareId, allEncryptedLinks); // Decrypt only links which are not decrypted yet or need re-decryption const decryptedLinks: Required<Link>[] = []; const encryptedLinks: EncryptedLink[] = []; links.forEach((link) => { const cachedLink = linksState.getLink(shareId, link.linkId); if (isLinkDecrypted(cachedLink)) { decryptedLinks.push(cachedLink); } else { encryptedLinks.push(link); } }); // Merge results to return all provided links const result = await decryptAndCacheLinks(abortSignal, shareId, encryptedLinks); return { links: [...decryptedLinks, ...result.links], errors: result.errors, }; }; /** * Ensures only one fetch for the given * `fetchMeta` is in progress, and it never runs if all was already fetched * before. * The algorithm also ensures proper paging; e.g., if first page used sort * by create time, and next page uses the same sorting, paging can continue, * but if the sort is different, we need to start from page one again, so * we don't miss any link. * In case no sorting is provided, the previously used one is used; that is * useful for example when some pages were already loaded and then we just * don't care about sorting but we want to load everything. * The return value is boolean whether there is next page. */ const fetchNextPageWithSortingHelper = async ( abortSignal: AbortSignal, shareId: string, sorting: SortParams | undefined, fetchMeta: FetchMeta, fetchLinks: (sorting: SortParams, page: number) => Promise<FetchResponse>, showNotification = true ): Promise<boolean> => { await waitFor(() => !fetchMeta.isInProgress, { abortSignal }); if (fetchMeta.isEverythingFetched) { return false; } fetchMeta.isInProgress = true; const currentSorting = sorting || fetchMeta.lastSorting || DEFAULT_SORTING; const currentPage = isSameSorting(fetchMeta.lastSorting, currentSorting) && fetchMeta.lastPage !== undefined ? fetchMeta.lastPage + 1 : 0; const hasNextPage = await fetchLinks(currentSorting, currentPage) .then(async ({ links, parents }) => { fetchMeta.lastSorting = currentSorting; fetchMeta.lastPage = currentPage; fetchMeta.isEverythingFetched = links.length < PAGE_SIZE; await cacheLoadedLinks(abortSignal, shareId, links, parents); return !fetchMeta.isEverythingFetched; }) .catch((err) => { if (err?.data?.Code === RESPONSE_CODE.INVALID_LINK_TYPE) { throw err; } // If you do bigger changes around, consider this: // It looked like a good idea to handle errors by showing // notification here to handle all places nicely on one // place without need to duplicate the code. However, for // download, we need to throw exception back so it can be // properly handled by transfer manager. But still, for all // other places its convenient to handle here. Maybe in the // future we could do another helper which would wrap the // logic with notifications, similarly like we have hook // useActions, to have better freedom to chose what to use. if (showNotification) { showErrorNotification(err, c('Notification').t`Cannot load next page`); // Very probably the next page is still there, but to not cause // inifinite loop requesting next page, lets return false. return false; } throw err; }) .finally(() => { // Make sure isInProgress is always unset, even during failure, // and that it is the last thing after everything else is set. fetchMeta.isInProgress = false; }); return hasNextPage; }; /** * A wrapper around fetchNextPageWithSortingHelper. * Basically the same thing, just for cases when sorting is not available * (for example, listing trash or shared links). */ const fetchNextPageHelper = async ( abortSignal: AbortSignal, shareId: string, fetchMeta: FetchMeta, fetchLinks: (page: number) => Promise<FetchResponse> ): Promise<boolean> => { return fetchNextPageWithSortingHelper(abortSignal, shareId, undefined, fetchMeta, (_, page: number) => fetchLinks(page) ); }; /** * Invokes a callback function until all pages are loaded. The callback function must * return a boolean value representing a presence of the next page in listing. */ const loadFullListing = async (callback: () => Promise<boolean>): Promise<void> => { const hasNextPage = await callback(); if (hasNextPage) { await loadFullListing(callback); } }; /** * Returns cached decrypted links (including stale), decrypts * all encrypted or stale links in the background. */ const getDecryptedLinksAndDecryptRest = ( abortSignal: AbortSignal, shareId: string, links: Link[], fetchMeta?: FetchMeta ): { links: DecryptedLink[]; isDecrypting: boolean } => { // Return decrypted links right away. // Those links the have been updated in the background by an event, // still need to be decrypted. We run descryption asynchronous. const linksToBeDecrypted = links .filter( ({ decrypted }) => decrypted?.isStale || // Link was added outside of this listing and thus we need to decrypt it now // (if the listing is in progress, it might still be decrypted) (!decrypted && !fetchMeta?.isInProgress) // Link was added not by listing. ) .map(({ encrypted }) => encrypted); decryptAndCacheLinks(abortSignal, shareId, linksToBeDecrypted).catch(sendErrorReport); return { links: links.map(({ decrypted }) => decrypted).filter(isTruthy), isDecrypting: linksToBeDecrypted.length > 0, }; }; return { cacheLoadedLinks, fetchNextPageWithSortingHelper, fetchNextPageHelper, loadFullListing, getDecryptedLinksAndDecryptRest, }; } function isSameSorting(one?: SortParams, other?: SortParams): boolean { return ( one !== undefined && other !== undefined && one.sortField === other.sortField && one.sortOrder === other.sortOrder ); } export function sortParamsToServerSortArgs({ sortField, sortOrder }: SortParams): { Sort: string; Desc: 0 | 1 } { const Sort = { mimeType: 'MIMEType', size: 'Size', createTime: 'CreateTime', metaDataModifyTime: 'ModifyTime', }[sortField]; return { Sort, Desc: sortOrder === SORT_DIRECTION.ASC ? 0 : 1, }; }
3,096
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/usePublicLinksListing.tsx
import { createContext, useCallback, useContext, useRef } from 'react'; import { querySharedURLChildren } from '@proton/shared/lib/api/drive/sharing'; import { LinkChildrenResult } from '@proton/shared/lib/interfaces/drive/link'; import { linkMetaToEncryptedLink, usePublicSession } from '../../_api'; import { DecryptedLink } from '../interface'; import useLinksState from '../useLinksState'; import { FetchMeta, FetchResponse, PAGE_SIZE, SortParams, sortParamsToServerSortArgs, useLinksListingHelpers, } from './useLinksListingHelpers'; type FetchState = { [token: string]: FetchTokenState; }; type FetchTokenState = { [linkId: string]: FetchMeta; }; /** * usePublicLinksListingProvider provides way to list publicly shared folder. * The typical usage should be similar as for useLinksListingProvider. */ export function usePublicLinksListingProvider() { const { request: publicRequest } = usePublicSession(); const linksState = useLinksState(); const { fetchNextPageWithSortingHelper, loadFullListing, getDecryptedLinksAndDecryptRest } = useLinksListingHelpers(); const state = useRef<FetchState>({}); /** * getTokenFetchState returns state for given `token`. * It ensures that the token is present in the state. */ const getTokenFetchState = (token: string): FetchTokenState => { if (state.current[token]) { return state.current[token]; } state.current[token] = {}; return state.current[token]; }; const fetchPublicChildrenPage = async ( abortSignal: AbortSignal, token: string, parentLinkId: string, sorting: SortParams, page: number, showNotification = true ): Promise<FetchResponse> => { const { Links } = await publicRequest<LinkChildrenResult>( { ...querySharedURLChildren(token, parentLinkId, { ...sortParamsToServerSortArgs(sorting), PageSize: PAGE_SIZE, Page: page, }), silence: !showNotification, }, abortSignal ); return { links: Links.map((linkMeta) => linkMetaToEncryptedLink(linkMeta, '')), parents: [] }; }; const fetchPublicChildrenNextPage = async ( abortSignal: AbortSignal, token: string, parentLinkId: string, sorting?: SortParams, showNotification = true ): Promise<boolean> => { const tokenState = getTokenFetchState(token); let linkFetchMeta = tokenState[parentLinkId]; if (!linkFetchMeta) { linkFetchMeta = {}; tokenState[parentLinkId] = linkFetchMeta; } return fetchNextPageWithSortingHelper( abortSignal, token, sorting, linkFetchMeta, (sorting: SortParams, page: number) => { return fetchPublicChildrenPage(abortSignal, token, parentLinkId, sorting, page, showNotification); }, showNotification ); }; const loadChildren = async ( abortSignal: AbortSignal, token: string, linkId: string, showNotification = true ): Promise<void> => { // undefined means keep the sorting used the last time = lets reuse what we loaded so far. const sorting = undefined; return loadFullListing(() => fetchPublicChildrenNextPage(abortSignal, token, linkId, sorting, showNotification) ); }; const getCachedChildren = useCallback( ( abortSignal: AbortSignal, token: string, parentLinkId: string, foldersOnly: boolean = false ): { links: DecryptedLink[]; isDecrypting: boolean } => { return getDecryptedLinksAndDecryptRest( abortSignal, token, linksState.getChildren(token, parentLinkId, foldersOnly), getTokenFetchState(token)[parentLinkId] ); }, [linksState.getChildren] ); return { loadChildren, getCachedChildren, }; } const PublicLinksListingContext = createContext<ReturnType<typeof usePublicLinksListingProvider> | null>(null); export function PublicLinksListingProvider({ children }: { children: React.ReactNode }) { const value = usePublicLinksListingProvider(); return <PublicLinksListingContext.Provider value={value}>{children}</PublicLinksListingContext.Provider>; } export default function useLinksListing() { const state = useContext(PublicLinksListingContext); if (!state) { throw new Error('Trying to use uninitialized PublicLinksListingProvider'); } return state; }
3,097
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/useSharedLinksListing.test.tsx
import { act, renderHook } from '@testing-library/react-hooks'; import { VolumesStateProvider } from '../../_volumes/useVolumesState'; import { LinksStateProvider } from '../useLinksState'; import { PAGE_SIZE } from './useLinksListingHelpers'; import { useSharedLinksListing } from './useSharedLinksListing'; jest.mock('@proton/shared/lib/api/drive/volume', () => ({ queryVolumeSharedLinks: jest.fn(), })); const mockRequest = jest.fn(); jest.mock('../../_api/useDebouncedRequest', () => { const useDebouncedRequest = () => { return mockRequest; }; return useDebouncedRequest; }); jest.mock('../../_utils/errorHandler', () => { return { useErrorHandler: () => ({ showErrorNotification: jest.fn(), showAggregatedErrorNotification: jest.fn(), }), }; }); jest.mock('../useLink', () => { const useLink = () => { return { decryptLink: jest.fn(), }; }; return useLink; }); const queryVolumeSharedLinksMock = require('@proton/shared/lib/api/drive/volume').queryVolumeSharedLinks as jest.Mock; const generateArrayOfRandomStrings = (size: number): string[] => { return Array.from({ length: size }, () => Math.random().toString(36).substring(2)); }; describe('useSharedLinksListing', () => { let hook: { current: ReturnType<typeof useSharedLinksListing>; }; beforeEach(() => { jest.resetAllMocks(); const wrapper = ({ children }: { children: React.ReactNode }) => ( <VolumesStateProvider> <LinksStateProvider>{children}</LinksStateProvider> </VolumesStateProvider> ); const { result } = renderHook(() => useSharedLinksListing(), { wrapper }); hook = result; jest.resetAllMocks(); }); it('should fetch the first page of shared links for a given volume', async () => { const volumeId = '1'; const page = 0; const response = { ShareURLContexts: [ { ContextShareID: '1', LinkIDs: generateArrayOfRandomStrings(10), ShareURLs: generateArrayOfRandomStrings(10), }, ], }; mockRequest.mockResolvedValue(response); await act(async () => { await hook.current.loadSharedLinks(new AbortController().signal, volumeId, () => Promise.resolve({ links: [], parents: [], errors: [] }) ); }); expect(queryVolumeSharedLinksMock).toHaveBeenCalledWith(volumeId, { Page: page, PageSize: PAGE_SIZE }); }); it('should increment the page count when fetching the next page of shared links', async () => { const volumeId = '1'; const page = 0; let firstResponse = { ShareURLContexts: [ { ContextShareID: '1', LinkIDs: generateArrayOfRandomStrings(PAGE_SIZE), ShareURLs: generateArrayOfRandomStrings(PAGE_SIZE), }, ], }; let secondResponse = { ShareURLContexts: [ { ContextShareID: '1', LinkIDs: generateArrayOfRandomStrings(1), ShareURLs: generateArrayOfRandomStrings(1), }, ], }; mockRequest.mockResolvedValueOnce(firstResponse).mockResolvedValueOnce(secondResponse); const { loadSharedLinks } = hook.current; await act(async () => { await loadSharedLinks(new AbortController().signal, volumeId, () => Promise.resolve({ links: [], parents: [], errors: [] }) ); }); expect(queryVolumeSharedLinksMock).toHaveBeenCalledWith(volumeId, { Page: page + 1, PageSize: PAGE_SIZE }); // verify that the script terminates successfully expect(queryVolumeSharedLinksMock).toBeCalledTimes(2); }); });
3,098
0
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links
petrpan-code/ProtonMail/WebClients/applications/drive/src/app/store/_links/useLinksListing/useSharedLinksListing.tsx
import { useCallback, useRef } from 'react'; import { queryVolumeSharedLinks } from '@proton/shared/lib/api/drive/volume'; import { ListDriveVolumeSharedLinksPayload } from '@proton/shared/lib/interfaces/drive/volume'; import { useDebouncedRequest } from '../../_api'; import useVolumesState from '../../_volumes/useVolumesState'; import { DecryptedLink } from '../interface'; import useLinksState from '../useLinksState'; import { FetchLoadLinksMeta } from './interface'; import { DEFAULT_SORTING, FetchMeta, PAGE_SIZE, SortParams, useLinksListingHelpers } from './useLinksListingHelpers'; interface FetchSharedLinksMeta extends FetchMeta { lastPage: number; lastSorting: SortParams; } type SharedLinksFetchState = { [volumeId: string]: FetchSharedLinksMeta; }; /** * Custom hook for managing and fetching shared links for a given volume. */ export function useSharedLinksListing() { const debouncedRequest = useDebouncedRequest(); const linksState = useLinksState(); const volumesState = useVolumesState(); const { loadFullListing, getDecryptedLinksAndDecryptRest } = useLinksListingHelpers(); const sharedLinksFetchState = useRef<SharedLinksFetchState>({}); const getSharedLinksFetchState = useCallback((volumeId: string) => { if (sharedLinksFetchState.current[volumeId]) { return sharedLinksFetchState.current[volumeId]; } sharedLinksFetchState.current[volumeId] = { lastPage: 0, lastSorting: DEFAULT_SORTING, }; return sharedLinksFetchState.current[volumeId]; }, []); const queryVolumeSharedLinksPage = async ( volumeId: string, page: number ): Promise<{ response: ListDriveVolumeSharedLinksPayload; hasNextPage: boolean }> => { const response = await debouncedRequest<ListDriveVolumeSharedLinksPayload>( queryVolumeSharedLinks(volumeId, { Page: page, PageSize: PAGE_SIZE }) ); const totalLinks = Object.values(response.ShareURLContexts).reduce( (acc, share) => acc + share.ShareURLs.length, 0 ); const hasNextPage = totalLinks >= PAGE_SIZE; return { response, hasNextPage, }; }; const loadSharedLinksMeta = async ( signal: AbortSignal, transformedResponse: { [shareId: string]: string[]; }, loadLinksMeta: FetchLoadLinksMeta ) => { for (const shareId in transformedResponse) { await loadLinksMeta(signal, 'sharedByLink', shareId, transformedResponse[shareId]); } }; const fetchSharedLinksNextPage = async ( signal: AbortSignal, volumeId: string, loadLinksMeta: FetchLoadLinksMeta ): Promise<boolean> => { let sharedLinksFetchMeta = getSharedLinksFetchState(volumeId); if (sharedLinksFetchMeta.isEverythingFetched) { return false; } const { response, hasNextPage } = await queryVolumeSharedLinksPage(volumeId, sharedLinksFetchMeta.lastPage); const volumeShareIds = response.ShareURLContexts.map((share) => share.ContextShareID); volumesState.setVolumeShareIds(volumeId, volumeShareIds); const transformedResponse = transformSharedLinksResponseToLinkMap(response); await loadSharedLinksMeta(signal, transformedResponse, loadLinksMeta); sharedLinksFetchMeta.lastPage++; sharedLinksFetchMeta.isEverythingFetched = !hasNextPage; return hasNextPage; }; /** * Loads shared links for a given volume. */ const loadSharedLinks = async ( signal: AbortSignal, volumeId: string, loadLinksMeta: FetchLoadLinksMeta ): Promise<void> => { return loadFullListing(() => fetchSharedLinksNextPage(signal, volumeId, loadLinksMeta)); }; /** * Gets shared links that have already been fetched and cached. */ const getCachedSharedLinks = useCallback( (abortSignal: AbortSignal, volumeId?: string): { links: DecryptedLink[]; isDecrypting: boolean } => { if (!volumeId) { return { links: [], isDecrypting: false, }; } const associatedShareIds = volumesState.getVolumeShareIds(volumeId); const result = associatedShareIds.map((shareId) => { return getDecryptedLinksAndDecryptRest( abortSignal, shareId, linksState.getSharedByLink(shareId), getSharedLinksFetchState(volumeId) ); }); const links = result.reduce<DecryptedLink[]>((acc, element) => { return [...acc, ...element.links]; }, []); const isDecrypting = result.some((element) => { return element.isDecrypting; }); return { links, isDecrypting, }; }, [linksState.getSharedByLink] ); return { loadSharedLinks, getCachedSharedLinks, }; } /** * Transforms a shared links response from the API into an object with share IDs as keys, * and link IDs and parent IDs as values. */ function transformSharedLinksResponseToLinkMap(response: ListDriveVolumeSharedLinksPayload) { return response.ShareURLContexts.reduce<{ [shareId: string]: string[]; }>((acc, share) => { acc[share.ContextShareID] = share.LinkIDs; return acc; }, {}); }
3,099