diff --git a/web/apps/photos/package.json b/web/apps/photos/package.json
index 4f343147587..ba799e138a1 100644
--- a/web/apps/photos/package.json
+++ b/web/apps/photos/package.json
@@ -3,6 +3,8 @@
"version": "0.0.0",
"private": true,
"dependencies": {
+ "@hugeicons/core-free-icons": "^1.1.0",
+ "@hugeicons/react": "^1.1.4",
"chrono-node": "^2.9.0",
"debounce": "^2.2.0",
"ente-accounts": "*",
diff --git a/web/apps/photos/src/components/DownloadStatusNotifications.tsx b/web/apps/photos/src/components/DownloadStatusNotifications.tsx
index 88df1a27741..3004c21dab0 100644
--- a/web/apps/photos/src/components/DownloadStatusNotifications.tsx
+++ b/web/apps/photos/src/components/DownloadStatusNotifications.tsx
@@ -1,4 +1,12 @@
+import {
+ Download01Icon,
+ Loading03Icon,
+ Tick02Icon,
+} from "@hugeicons/core-free-icons";
+import { HugeiconsIcon } from "@hugeicons/react";
+import ErrorOutlineIcon from "@mui/icons-material/ErrorOutline";
import ReplayIcon from "@mui/icons-material/Replay";
+import { keyframes, styled, Typography } from "@mui/material";
import { useBaseContext } from "ente-base/context";
import {
isSaveComplete,
@@ -8,6 +16,15 @@ import {
import { Notification } from "ente-new/photos/components/Notification";
import { t } from "i18next";
+/** Maximum characters for album name before truncation */
+const MAX_ALBUM_NAME_LENGTH = 25;
+
+/** Truncate album name with ellipsis if it exceeds max length */
+const truncateAlbumName = (name: string): string => {
+ if (name.length <= MAX_ALBUM_NAME_LENGTH) return name;
+ return name.slice(0, MAX_ALBUM_NAME_LENGTH) + "...";
+};
+
interface DownloadStatusNotificationsProps {
/**
* A list of user-initiated downloads for which a status should be shown.
@@ -89,33 +106,116 @@ export const DownloadStatusNotifications: React.FC<
return saveGroups.map((group, index) => {
const hasErrors = isSaveCompleteWithErrors(group);
+ const isComplete = isSaveComplete(group);
const canRetry = hasErrors && !!group.retry;
- const failedTitle = `${t("download_failed")} (${group.failed}/${group.total})`;
+
+ // Determine if this is a ZIP download (web with multiple files or live photo)
+ const isZipDownload = !group.downloadDirPath && group.total > 1;
+ const isDesktopOrSingleFile =
+ !!group.downloadDirPath || group.total === 1;
+
+ // Build the status text for the caption
+ let statusText: React.ReactNode;
+ if (hasErrors) {
+ // Show specific error message based on failure reason
+ if (group.failureReason === "network_offline") {
+ statusText = t("download_failed_network_offline");
+ } else if (group.failureReason === "file_error") {
+ statusText = t("download_failed_file_error");
+ } else {
+ statusText = t("download_failed");
+ }
+ } else if (isComplete) {
+ statusText = t("download_complete");
+ } else if (isZipDownload) {
+ const part = group.currentPart ?? 1;
+ statusText = group.isDownloadingZip
+ ? t("downloading_part", { part })
+ : t("preparing_part", { part });
+ } else if (isDesktopOrSingleFile) {
+ statusText =
+ group.total === 1
+ ? t("downloading_file")
+ : t("downloading_files");
+ } else {
+ statusText = t("downloading");
+ }
+
+ // Build caption: "Status • X / Y files"
+ const progress = t("download_progress", {
+ count: group.success + group.failed,
+ total: group.total,
+ });
+ const caption = (
+
+ {statusText}
+ {!isComplete && <> • {progress}>}
+
+ );
+
+ // Determine the start icon based on state
+ let startIcon: React.ReactNode;
+ if (hasErrors) {
+ startIcon = ;
+ } else if (isComplete) {
+ startIcon = (
+
+
+
+ );
+ } else if (isZipDownload && !group.isDownloadingZip) {
+ // Preparing state - use loading icon
+ startIcon = ;
+ } else {
+ // Downloading state
+ startIcon = (
+
+
+
+ );
+ }
+
+ // Title is always the album name (truncated)
+ const truncatedName = truncateAlbumName(group.title);
return (
+
) : undefined,
onEndIconClick: canRetry
? () => group.retry?.()
@@ -125,3 +225,55 @@ export const DownloadStatusNotifications: React.FC<
);
});
};
+
+/** CSS keyframes for spinning animation */
+const spinAnimation = keyframes`
+ from { transform: rotate(0deg); }
+ to { transform: rotate(360deg); }
+`;
+
+/** CSS keyframes for drop from top animation */
+const dropAnimation = keyframes`
+ 0% { transform: translateY(-100%); opacity: 0.15; }
+ 50% { transform: translateY(10%); opacity: 0.6; }
+ 100% { transform: translateY(0); opacity: 1; }
+`;
+
+/** CSS keyframes for green glow animation */
+const glowAnimation = keyframes`
+ 0% { color: var(--mui-palette-fixed-success); }
+ 100% { color: inherit; }
+`;
+
+/** CSS keyframes for fade in animation */
+const fadeInAnimation = keyframes`
+ 0% { opacity: 0; }
+ 100% { opacity: 1; }
+`;
+
+/** Drop animation icon wrapper */
+const DroppingIconWrapper = styled("span")`
+ display: inline-flex;
+ animation: ${dropAnimation} 0.8s ease-out forwards;
+`;
+
+/** Glowing icon wrapper for success state */
+const GlowingIconWrapper = styled("span")`
+ display: inline-flex;
+ animation: ${glowAnimation} 2s ease-out forwards;
+`;
+
+/** Spinning loading icon wrapper */
+const SpinningIconWrapper = styled("span")`
+ display: inline-flex;
+ animation:
+ ${fadeInAnimation} 0.5s ease-out forwards,
+ ${spinAnimation} 3s linear infinite;
+`;
+
+/** Spinning loading icon */
+const SpinningIcon: React.FC = () => (
+
+
+
+);
diff --git a/web/packages/base/locales/en-US/translation.json b/web/packages/base/locales/en-US/translation.json
index f6fbfaed797..cbd9c149388 100644
--- a/web/packages/base/locales/en-US/translation.json
+++ b/web/packages/base/locales/en-US/translation.json
@@ -657,7 +657,17 @@
"unpreviewable_file_message": "This file could not be previewed",
"download_complete": "Download complete",
"downloading_album": "Downloading {{name}}",
+ "creating_zip": "Creating ZIP for {{name}}",
+ "preparing": "Preparing",
+ "downloading": "Downloading",
+ "preparing_part": "Preparing Part {{part}}",
+ "downloading_part": "Downloading Part {{part}}",
+ "downloading_file": "Downloading file",
+ "downloading_files": "Downloading files",
"download_failed": "Download failed",
+ "download_failed_network_offline": "Connection lost",
+ "download_failed_file_error": "Some files failed",
+ "retry": "Retry",
"download_progress": "{{count, number}} / {{total, number}} files",
"christmas": "Christmas",
"christmas_eve": "Christmas Eve",
diff --git a/web/packages/gallery/components/utils/save-groups.ts b/web/packages/gallery/components/utils/save-groups.ts
index e944685282e..22a776bdf54 100644
--- a/web/packages/gallery/components/utils/save-groups.ts
+++ b/web/packages/gallery/components/utils/save-groups.ts
@@ -73,6 +73,26 @@ export interface SaveGroup {
* An {@link AbortController} that can be used to cancel the save.
*/
canceller: AbortController;
+ /**
+ * The reason for the failure, if any.
+ *
+ * This is used to show a more specific error message to the user.
+ * - "network_offline": The network went offline during download
+ * - "file_error": One or more individual files failed to download
+ * - undefined: No specific reason (generic error)
+ */
+ failureReason?: "network_offline" | "file_error";
+ /**
+ * `true` when the ZIP file is being downloaded from memory to the user's
+ * device. This is only relevant for web downloads where files are first
+ * collected into a ZIP in memory, then saved to the device.
+ */
+ isDownloadingZip?: boolean;
+ /**
+ * The current ZIP part number being processed. Only relevant for web
+ * downloads where files are batched into multiple ZIP parts.
+ */
+ currentPart?: number;
}
/**
diff --git a/web/packages/gallery/package.json b/web/packages/gallery/package.json
index fe528a82605..496542b4149 100644
--- a/web/packages/gallery/package.json
+++ b/web/packages/gallery/package.json
@@ -9,6 +9,7 @@
"ente-utils": "*",
"exifreader": "^4.32.0",
"hls-video-element": "^1.5.8",
+ "jszip": "^3.10.1",
"leaflet": "^1.9.4",
"leaflet-defaulticon-compatibility": "^0.1.2",
"localforage": "^1.10.0",
diff --git a/web/packages/gallery/services/save.ts b/web/packages/gallery/services/save.ts
index a5d15d1339d..6554beb7714 100644
--- a/web/packages/gallery/services/save.ts
+++ b/web/packages/gallery/services/save.ts
@@ -1,10 +1,9 @@
import { assertionFailed } from "ente-base/assert";
-import { joinPath } from "ente-base/file-name";
+import { joinPath, nameAndExtension } from "ente-base/file-name";
import log from "ente-base/log";
import { type Electron } from "ente-base/types/ipc";
import { saveAsFileAndRevokeObjectURL } from "ente-base/utils/web";
import { downloadManager } from "ente-gallery/services/download";
-import { detectFileTypeInfo } from "ente-gallery/utils/detect-type";
import { writeStream } from "ente-gallery/utils/native-stream";
import type { EnteFile } from "ente-media/file";
import { fileFileName } from "ente-media/file-metadata";
@@ -14,12 +13,50 @@ import {
safeDirectoryName,
safeFileName,
} from "ente-new/photos/utils/native-fs";
-import { wait } from "ente-utils/promise";
+import JSZip from "jszip";
import type {
AddSaveGroup,
UpdateSaveGroup,
} from "../components/utils/save-groups";
+/**
+ * Download limits optimized for different devices and browsers.
+ */
+interface DownloadLimits {
+ /** Number of concurrent file downloads. */
+ concurrency: number;
+ /** Maximum size of a ZIP batch in bytes. */
+ maxZipSize: number;
+}
+
+let cachedLimits: DownloadLimits | undefined;
+
+/**
+ * Get download limits for the current device.
+ *
+ * - Mobile devices: 4 concurrent, 100MB max
+ * - Desktop: 8 concurrent, 250MB max
+ */
+const getDownloadLimits = (): DownloadLimits => {
+ if (cachedLimits) return cachedLimits;
+
+ const ua = navigator.userAgent.toLowerCase();
+ const isMobile =
+ ua.includes("iphone") ||
+ ua.includes("ipad") ||
+ ua.includes("ipod") ||
+ ua.includes("android") ||
+ ua.includes("mobile") ||
+ ua.includes("tablet") ||
+ (ua.includes("macintosh") && navigator.maxTouchPoints > 1);
+
+ cachedLimits = isMobile
+ ? { concurrency: 4, maxZipSize: 100 * 1024 * 1024 } // 100MB
+ : { concurrency: 8, maxZipSize: 250 * 1024 * 1024 }; // 250MB
+
+ return cachedLimits;
+};
+
/**
* Save the given {@link files} to the user's device.
*
@@ -117,12 +154,17 @@ const downloadAndSave = async (
const failedFiles: EnteFile[] = [];
let isDownloading = false;
let updateSaveGroup: UpdateSaveGroup = () => undefined;
+ // Track the next ZIP batch index across retries so part numbers continue
+ // sequentially (e.g., if initial download creates Parts 1-5 and fails,
+ // retry creates Part 6+ instead of starting over at Part 1).
+ let nextZipBatchIndex = 1;
- const downloadFiles = async (
+ const downloadFilesDesktop = async (
filesToDownload: EnteFile[],
resetFailedCount = false,
) => {
if (!filesToDownload.length || isDownloading) return;
+ if (!electron || !downloadDirPath) return;
isDownloading = true;
if (resetFailedCount) {
@@ -134,11 +176,7 @@ const downloadAndSave = async (
for (const file of filesToDownload) {
if (canceller.signal.aborted) break;
try {
- if (electron && downloadDirPath) {
- await saveFileDesktop(electron, file, downloadDirPath);
- } else {
- await saveAsFile(file);
- }
+ await saveFileDesktop(electron, file, downloadDirPath);
updateSaveGroup((g) => ({ ...g, success: g.success + 1 }));
} catch (e) {
log.error("File download failed", e);
@@ -155,6 +193,94 @@ const downloadAndSave = async (
}
};
+ const downloadFilesWeb = async (
+ filesToDownload: EnteFile[],
+ resetFailedCount = false,
+ ) => {
+ if (!filesToDownload.length || isDownloading) return;
+
+ // Reset counts first if this is a retry, before any other logic
+ if (resetFailedCount) {
+ updateSaveGroup((g) => ({
+ ...g,
+ failed: 0,
+ failureReason: undefined,
+ }));
+ failedFiles.length = 0;
+ }
+
+ // If already offline, mark all files as failed so retry is available
+ if (!navigator.onLine) {
+ log.info("Download skipped - network is offline");
+ for (const file of filesToDownload) {
+ failedFiles.push(file);
+ }
+ updateSaveGroup((g) => ({
+ ...g,
+ failed: filesToDownload.length,
+ failureReason: "network_offline",
+ }));
+ return;
+ }
+
+ isDownloading = true;
+ // Only clear on first download, not retry (already cleared above)
+ if (!resetFailedCount) {
+ failedFiles.length = 0;
+ }
+
+ try {
+ // Single non-live-photo file: download directly without zipping
+ const singleFile = filesToDownload[0];
+ if (
+ filesToDownload.length === 1 &&
+ singleFile &&
+ singleFile.metadata.fileType !== FileType.livePhoto
+ ) {
+ try {
+ const fileBlob = await downloadManager.fileBlob(singleFile);
+ const fileName = fileFileName(singleFile);
+ const url = URL.createObjectURL(fileBlob);
+ saveAsFileAndRevokeObjectURL(url, fileName);
+ updateSaveGroup((g) => ({ ...g, success: g.success + 1 }));
+ } catch (e) {
+ log.error("File download failed", e);
+ failedFiles.push(singleFile);
+ updateSaveGroup((g) => ({ ...g, failed: g.failed + 1 }));
+ }
+ } else {
+ // Multiple files or live photo: use ZIP
+ nextZipBatchIndex = await saveAsZip(
+ filesToDownload,
+ title,
+ () =>
+ updateSaveGroup((g) => ({
+ ...g,
+ success: g.success + 1,
+ })),
+ (file) => {
+ failedFiles.push(file);
+ updateSaveGroup((g) => ({
+ ...g,
+ failed: g.failed + 1,
+ }));
+ },
+ canceller,
+ updateSaveGroup,
+ nextZipBatchIndex,
+ );
+ }
+
+ if (!failedFiles.length) {
+ updateSaveGroup((g) => ({ ...g, retry: undefined }));
+ }
+ } finally {
+ isDownloading = false;
+ }
+ };
+
+ const downloadFiles = electron ? downloadFilesDesktop : downloadFilesWeb;
+
const retry = () => {
if (!failedFiles.length || isDownloading || canceller.signal.aborted)
return;
@@ -175,38 +301,352 @@ const downloadAndSave = async (
};
/**
- * Save the given {@link EnteFile} as a file in the user's download folder.
+ * A helper class to accumulate files into ZIP batches and download them when
+ * the batch size limit is reached.
*/
-const saveAsFile = async (file: EnteFile) => {
+class ZipBatcher {
+ private zip = new JSZip();
+ private currentBatchSize = 0;
+ private currentFileCount = 0;
+ private batchIndex: number;
+ private usedNames = new Set();
+ private baseName: string;
+ private maxZipSize: number;
+ private onStateChange?: (
+ isDownloading: boolean,
+ partNumber: number,
+ ) => void;
+
+ constructor(
+ baseName: string,
+ maxZipSize: number,
+ startingBatchIndex = 1,
+ onStateChange?: (isDownloading: boolean, partNumber: number) => void,
+ ) {
+ this.baseName = baseName;
+ this.maxZipSize = maxZipSize;
+ this.batchIndex = startingBatchIndex;
+ this.onStateChange = onStateChange;
+ }
+
+ /**
+ * Get the next batch index that would be used for the next ZIP file.
+ * This is useful for tracking progress across retries.
+ */
+ getNextBatchIndex(): number {
+ return this.batchIndex;
+ }
+
+ /**
+ * Get the current batch index being processed.
+ */
+ getCurrentBatchIndex(): number {
+ return this.batchIndex;
+ }
+
+ /**
+ * Add file data to the current ZIP batch. If adding this file would exceed
+ * the batch size limit, the current batch is downloaded first.
+ */
+ async addFile(data: Uint8Array | Blob, fileName: string): Promise {
+ const size = data instanceof Blob ? data.size : data.byteLength;
+
+ // If adding this file would exceed the limit and we have files in the
+ // batch, download the current batch first.
+ if (
+ this.currentBatchSize > 0 &&
+ this.currentBatchSize + size > this.maxZipSize
+ ) {
+ await this.downloadCurrentBatch();
+ // Notify that we're now preparing a new part
+ this.onStateChange?.(false, this.batchIndex);
+ }
+
+ // Ensure unique file names within the ZIP
+ const uniqueName = this.getUniqueName(fileName);
+ this.usedNames.add(uniqueName);
+ this.zip.file(uniqueName, data);
+ this.currentBatchSize += size;
+ this.currentFileCount++;
+ }
+
+ /**
+ * Download any remaining files in the current batch.
+ */
+ async flush(): Promise {
+ if (this.currentBatchSize > 0) {
+ await this.downloadCurrentBatch();
+ }
+ }
+
+ private async downloadCurrentBatch(): Promise {
+ this.onStateChange?.(true, this.batchIndex);
+ try {
+ const zipBlob = await this.zip.generateAsync({ type: "blob" });
+ const fileLabel =
+ this.currentFileCount === 1
+ ? "1 file"
+ : `${this.currentFileCount} files`;
+ const zipName = `${this.baseName} Part ${this.batchIndex} - ${fileLabel}.zip`;
+
+ const url = URL.createObjectURL(zipBlob);
+ saveAsFileAndRevokeObjectURL(url, zipName);
+ } finally {
+ this.onStateChange?.(false, this.batchIndex);
+ }
+
+ // Reset for next batch
+ this.zip = new JSZip();
+ this.currentBatchSize = 0;
+ this.currentFileCount = 0;
+ this.usedNames.clear();
+ this.batchIndex++;
+ }
+
+ /**
+ * Generate a unique file name within the ZIP by appending a suffix if the
+ * name already exists.
+ */
+ private getUniqueName(fileName: string): string {
+ if (!this.usedNames.has(fileName)) {
+ return fileName;
+ }
+
+ const [name, ext] = nameAndExtension(fileName);
+ let counter = 1;
+ let uniqueName: string;
+ do {
+ uniqueName = ext
+ ? `${name}(${counter}).${ext}`
+ : `${name}(${counter})`;
+ counter++;
+ } while (this.usedNames.has(uniqueName));
+
+ return uniqueName;
+ }
+}
+
+/** Result of downloading and processing a single file for ZIP inclusion. */
+type DownloadedFileData =
+ | { type: "regular"; fileName: string; data: Uint8Array }
+ | {
+ type: "livePhoto";
+ imageFileName: string;
+ imageData: Uint8Array;
+ videoFileName: string;
+ videoData: Uint8Array;
+ };
+
+/**
+ * Download and process a single file, returning the data ready for ZIP.
+ */
+const downloadFileForZip = async (
+ file: EnteFile,
+): Promise => {
const fileBlob = await downloadManager.fileBlob(file);
const fileName = fileFileName(file);
+
if (file.metadata.fileType == FileType.livePhoto) {
const { imageFileName, imageData, videoFileName, videoData } =
await decodeLivePhoto(fileName, fileBlob);
-
- await saveBlobPartAsFile(imageData, imageFileName);
-
- // Downloading multiple works everywhere except, you guessed it,
- // Safari. Make up for their incompetence by adding a setTimeout.
- await wait(300) /* arbitrary constant, 300ms */;
- await saveBlobPartAsFile(videoData, videoFileName);
+ return {
+ type: "livePhoto",
+ imageFileName,
+ imageData,
+ videoFileName,
+ videoData,
+ };
} else {
- await saveBlobPartAsFile(fileBlob, fileName);
+ const data = new Uint8Array(await fileBlob.arrayBuffer());
+ return { type: "regular", fileName, data };
}
};
/**
- * Save the given {@link blob} as a file in the user's download folder.
+ * Save multiple files as ZIP archives to the user's download folder.
+ *
+ * Files are batched into ZIPs of up to 100MB each. If the total exceeds 100MB,
+ * multiple ZIP files will be downloaded. Downloads are performed concurrently
+ * (up to {@link CONCURRENT_DOWNLOADS} at a time) for better performance.
+ *
+ * @param files The files to download and add to the ZIP.
+ * @param baseName The base name for the ZIP file(s).
+ * @param onSuccess Callback invoked after each file is successfully added.
+ * @param onError Callback invoked when a file fails to download.
+ * @param canceller An AbortController to check for cancellation.
+ * @param startingBatchIndex The batch index to start from (for retries).
+ * @returns The next batch index to use for subsequent ZIPs (useful for retries).
*/
-const saveBlobPartAsFile = async (blobPart: BlobPart, fileName: string) =>
- createTypedObjectURL(blobPart, fileName).then((url) =>
- saveAsFileAndRevokeObjectURL(url, fileName),
+const saveAsZip = async (
+ files: EnteFile[],
+ baseName: string,
+ onSuccess: () => void,
+ onError: (file: EnteFile, error: unknown) => void,
+ canceller: AbortController,
+ updateSaveGroup: UpdateSaveGroup,
+ startingBatchIndex = 1,
+): Promise => {
+ const { concurrency, maxZipSize } = getDownloadLimits();
+ const batcher = new ZipBatcher(
+ baseName,
+ maxZipSize,
+ startingBatchIndex,
+ (isDownloading, partNumber) =>
+ updateSaveGroup((g) => ({
+ ...g,
+ isDownloadingZip: isDownloading,
+ currentPart: partNumber,
+ })),
);
-const createTypedObjectURL = async (blobPart: BlobPart, fileName: string) => {
- const blob = blobPart instanceof Blob ? blobPart : new Blob([blobPart]);
- const { mimeType } = await detectFileTypeInfo(new File([blob], fileName));
- return URL.createObjectURL(new Blob([blob], { type: mimeType }));
+ // Set initial part number
+ updateSaveGroup((g) => ({ ...g, currentPart: startingBatchIndex }));
+
+ // Queue of files to process
+ let fileIndex = 0;
+
+ // Track if we've gone offline to stop processing immediately.
+ // Using an object so the value can be mutated by event handlers and
+ // checked synchronously by the async workers.
+ const networkState = { isOffline: !navigator.onLine };
+ const handleOffline = () => {
+ networkState.isOffline = true;
+ };
+ const handleOnline = () => {
+ networkState.isOffline = false;
+ };
+ window.addEventListener("offline", handleOffline);
+ window.addEventListener("online", handleOnline);
+
+ // Mutex for serializing ZIP additions (download is concurrent, but adding
+ // to the ZIP must be serialized to avoid race conditions with batching)
+ let zipMutex: Promise = Promise.resolve();
+ const withZipLock = async (fn: () => Promise): Promise => {
+ const prev = zipMutex;
+ let resolve: () => void;
+ zipMutex = new Promise((r) => (resolve = r));
+ await prev;
+ try {
+ return await fn();
+ } finally {
+ resolve!();
+ }
+ };
+
+ // Process a single file: download, then add to ZIP
+ const processFile = async (): Promise => {
+ // Stop immediately if offline or cancelled
+ if (networkState.isOffline || canceller.signal.aborted) {
+ return false;
+ }
+
+ // Get next file to process
+ const currentIndex = fileIndex++;
+ if (currentIndex >= files.length) {
+ return false;
+ }
+
+ const file = files[currentIndex]!;
+ try {
+ // Check again before starting download (value can change via event handler)
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
+ if (networkState.isOffline) {
+ // Put this file back for retry
+ onError(file, new Error("Network offline"));
+ return false;
+ }
+
+ // Download happens concurrently
+ const downloadedData = await downloadFileForZip(file);
+
+ // Adding to ZIP is serialized via mutex
+ await withZipLock(async () => {
+ if (downloadedData.type === "livePhoto") {
+ await batcher.addFile(
+ downloadedData.imageData,
+ downloadedData.imageFileName,
+ );
+ await batcher.addFile(
+ downloadedData.videoData,
+ downloadedData.videoFileName,
+ );
+ } else {
+ await batcher.addFile(
+ downloadedData.data,
+ downloadedData.fileName,
+ );
+ }
+ });
+ onSuccess();
+ } catch (e) {
+ // Individual file failed - mark it for retry but continue with others
+ // Only log non-network errors to avoid log spam when offline
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
+ if (!networkState.isOffline) {
+ log.error(`Failed to download file ${file.id}, skipping`, e);
+ }
+ onError(file, e);
+
+ // Only stop all processing if we went offline (not for individual failures)
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
+ if (networkState.isOffline) {
+ updateSaveGroup((g) => ({
+ ...g,
+ failureReason: "network_offline",
+ }));
+ return false;
+ }
+ // Mark as file error for individual failures
+ updateSaveGroup((g) => ({
+ ...g,
+ failureReason: g.failureReason ?? "file_error",
+ }));
+ // Continue processing remaining files even if this one failed
+ }
+
+ return true;
+ };
+
+ // Worker that continuously processes files until done
+ const worker = async (): Promise => {
+ while (await processFile()) {
+ // Continue processing
+ }
+ };
+
+ try {
+ // Start concurrent workers
+ const workers = Array.from(
+ { length: Math.min(concurrency, files.length) },
+ () => worker(),
+ );
+ await Promise.all(workers);
+
+ // If we went offline, mark remaining files as failed
+ if (networkState.isOffline) {
+ updateSaveGroup((g) => ({
+ ...g,
+ failureReason: "network_offline",
+ }));
+ while (fileIndex < files.length) {
+ const file = files[fileIndex++];
+ if (file) {
+ onError(file, new Error("Network offline"));
+ }
+ }
+ }
+
+ // Flush whatever we have (even partial) unless cancelled
+ if (!canceller.signal.aborted) {
+ await batcher.flush();
+ }
+
+ return batcher.getNextBatchIndex();
+ } finally {
+ // Clean up event listeners
+ window.removeEventListener("offline", handleOffline);
+ window.removeEventListener("online", handleOnline);
+ }
};
/**
diff --git a/web/packages/media/live-photo.ts b/web/packages/media/live-photo.ts
index 111d8a9e159..4830e23bd50 100644
--- a/web/packages/media/live-photo.ts
+++ b/web/packages/media/live-photo.ts
@@ -103,9 +103,7 @@ export const decodeLivePhoto = async (
);
if (!videoFileName || !videoData)
- throw new Error(
- `Decoded live photo ${fileName} does not have an image`,
- );
+ throw new Error(`Decoded live photo ${fileName} does not have a video`);
return { imageFileName, imageData, videoFileName, videoData };
};
diff --git a/web/yarn.lock b/web/yarn.lock
index 40ab80e6678..3c6b475fc35 100644
--- a/web/yarn.lock
+++ b/web/yarn.lock
@@ -645,6 +645,16 @@
resolved "https://registry.yarnpkg.com/@fontsource-variable/inter/-/inter-5.2.8.tgz#29b11476f5149f6a443b4df6516e26002d87941a"
integrity sha512-kOfP2D+ykbcX/P3IFnokOhVRNoTozo5/JxhAIVYLpea/UBmCQ/YWPBfWIDuBImXX/15KH+eKh4xpEUyS2sQQGQ==
+"@hugeicons/core-free-icons@^1.1.0":
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/@hugeicons/core-free-icons/-/core-free-icons-1.2.1.tgz#c741eaa8bbf1453e9282a64c19a8bbcc8cef5f19"
+ integrity sha512-ho0QdGMkgL+kt+QsZocCsKvJou1rfyVQWARrxIhNLi+9tCKayUUtD9jlHgioaRphmskSl84TxrDm9Ae0G4Uu1g==
+
+"@hugeicons/react@^1.1.4":
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/@hugeicons/react/-/react-1.1.4.tgz#1662f11ebb42c7e16fa7e16e04615db666813a06"
+ integrity sha512-gsc3eZyd2fGqRUThW9+lfjxxsOkz6KNVmRXRgJjP32GL0OnnLJnl3hytKt47CBbiQj2xE2kCw+rnP3UQCThcKw==
+
"@humanfs/core@^0.19.1":
version "0.19.1"
resolved "https://registry.yarnpkg.com/@humanfs/core/-/core-0.19.1.tgz#17c55ca7d426733fe3c561906b8173c336b40a77"