39948-vm/frontend/src/lib/offline/DownloadManager.ts
2026-04-07 16:42:56 +04:00

743 lines
21 KiB
TypeScript

/**
* DownloadManager
*
* Manages asset downloads with queue, retry, and progress tracking.
* Adapted from hoboken's videoProcessingQueue pattern for frontend use.
*/
import { PRELOAD_CONFIG } from '../../config/preload.config';
import { OFFLINE_CONFIG } from '../../config/offline.config';
import { downloadEventBus } from './DownloadEventBus';
import { StorageManager } from './StorageManager';
import { OfflineDbManager } from '../offlineDb/OfflineDbManager';
import { extractStoragePath } from '../assetUrl';
import { logger } from '../logger';
import type {
PreloadJobStatus,
AssetVariantType,
AssetType,
DownloadQueueItem,
} from '../../types/offline';
interface DownloadJob {
id: string;
assetId: string;
projectId: string;
url: string;
filename: string;
variantType: AssetVariantType;
assetType: AssetType;
priority: number;
status: PreloadJobStatus;
progress: number;
bytesLoaded: number;
totalBytes: number;
retryCount: number;
addedAt: number;
storageKey: string; // Canonical storage key for consistent caching
createBlobUrl?: boolean; // Create decoded blob URL after download
persist?: boolean; // Persist to IndexedDB for resume (default: true)
maxBytes?: number; // Partial download limit (undefined = full download)
isPartial?: boolean; // Whether this was a partial download (for tracking)
abortController?: AbortController;
resolve?: () => void;
reject?: (error: Error) => void;
}
class DownloadManagerClass {
private queue: DownloadJob[] = [];
private activeDownloads: Map<string, DownloadJob> = new Map();
private isPaused = false;
private isProcessing = false;
// Blob URL cache for instant lookup (storageKey → blobUrl)
private readyBlobUrls: Map<string, string> = new Map();
// Track partial downloads completed in this session (not persisted)
// Prevents re-downloading same partial content on repeated page visits
private partialDownloadsReady: Set<string> = new Set();
private config = {
maxConcurrent: PRELOAD_CONFIG.maxConcurrentDownloads,
chunkSize: PRELOAD_CONFIG.videoChunkSize,
maxRetries: PRELOAD_CONFIG.maxRetries,
retryDelayMs: PRELOAD_CONFIG.retryDelayMs,
largeFileThreshold: PRELOAD_CONFIG.largeFileThreshold,
};
/**
* Add a download job to the queue
*/
async addJob(params: {
assetId: string;
projectId: string;
url: string;
filename: string;
variantType: AssetVariantType;
assetType: AssetType;
priority?: number;
storageKey?: string; // Optional, will extract if not provided
createBlobUrl?: boolean; // Create blob URL after download
persist?: boolean; // Persist to IndexedDB for resume (default: true)
maxBytes?: number; // Download limit in bytes (for partial preload)
}): Promise<void> {
const storageKey = params.storageKey || extractStoragePath(params.url);
const isPartialDownload = params.maxBytes !== undefined;
// For partial downloads, check session cache (not persisted to storage)
if (isPartialDownload && this.partialDownloadsReady.has(storageKey)) {
logger.info('[DownloadManager] Partial download already ready (session)', {
storageKey: storageKey.slice(-50),
});
return;
}
// Check if already downloaded using canonical key (full downloads only)
if (!isPartialDownload) {
const hasAsset = await StorageManager.hasAsset(storageKey);
if (hasAsset) {
// Already cached - create blob URL if requested
if (params.createBlobUrl && !this.readyBlobUrls.has(storageKey)) {
await this.createBlobUrlFromCache(storageKey);
}
return;
}
}
// Check if already in queue (use storageKey for deduplication)
if (
this.queue.some((j) => j.storageKey === storageKey) ||
Array.from(this.activeDownloads.values()).some(
(j) => j.storageKey === storageKey,
)
) {
return;
}
return new Promise((resolve, reject) => {
// For partial downloads, don't persist and don't create blob URL
// (video will play from presigned URL, browser handles buffering)
const isPartialDownload = params.maxBytes !== undefined;
const shouldPersist = isPartialDownload
? false
: (params.persist ?? true);
const shouldCreateBlobUrl = isPartialDownload
? false
: (params.createBlobUrl ?? false);
const job: DownloadJob = {
id: `dl-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`,
assetId: params.assetId,
projectId: params.projectId,
url: params.url,
filename: params.filename,
variantType: params.variantType,
assetType: params.assetType,
priority:
params.priority ??
this.calculatePriority(params.assetType, params.variantType),
status: 'queued',
progress: 0,
bytesLoaded: 0,
totalBytes: 0,
retryCount: 0,
addedAt: Date.now(),
storageKey,
createBlobUrl: shouldCreateBlobUrl,
persist: shouldPersist,
maxBytes: params.maxBytes,
isPartial: isPartialDownload,
resolve,
reject,
};
// Persist to IndexedDB for resume capability (default true)
if (job.persist !== false) {
this.persistQueueItem(job);
}
// Insert in priority order (higher priority first)
const insertIndex = this.queue.findIndex(
(q) => q.priority < job.priority,
);
if (insertIndex === -1) {
this.queue.push(job);
} else {
this.queue.splice(insertIndex, 0, job);
}
downloadEventBus.emitQueueUpdate();
this.processQueue();
});
}
/**
* Calculate priority based on asset and variant type
*/
private calculatePriority(
assetType: AssetType,
variantType: AssetVariantType,
): number {
const typePriority = PRELOAD_CONFIG.priority.assetType[assetType] || 0;
const variantPriority = PRELOAD_CONFIG.priority.variant[variantType] || 0;
return typePriority + variantPriority;
}
/**
* Persist queue item to IndexedDB
*/
private async persistQueueItem(job: DownloadJob): Promise<void> {
const queueItem: DownloadQueueItem = {
id: job.id,
projectId: job.projectId,
assetId: job.assetId,
url: job.url,
filename: job.filename,
status: job.status,
priority: job.priority,
retryCount: job.retryCount,
bytesLoaded: job.bytesLoaded,
totalBytes: job.totalBytes,
addedAt: job.addedAt,
};
await OfflineDbManager.addToQueue(queueItem);
}
/**
* Process the download queue
*/
private async processQueue(): Promise<void> {
if (this.isProcessing || this.isPaused) return;
if (this.queue.length === 0 && this.activeDownloads.size === 0) return;
this.isProcessing = true;
while (
!this.isPaused &&
this.activeDownloads.size < this.config.maxConcurrent &&
this.queue.length > 0
) {
const job = this.queue.shift();
if (!job) break;
this.activeDownloads.set(job.url, job);
this.downloadAsset(job);
}
this.isProcessing = false;
}
/**
* Download a single asset with progress tracking
*/
private async downloadAsset(job: DownloadJob): Promise<void> {
job.status = 'downloading';
job.abortController = new AbortController();
await OfflineDbManager.updateQueueStatus(job.id, 'downloading');
downloadEventBus.emitPreloadStart({
jobId: job.id,
assetId: job.assetId,
url: job.url,
});
try {
// Build request headers - use Range header for partial downloads
const headers: HeadersInit = {};
if (job.maxBytes) {
headers['Range'] = `bytes=0-${job.maxBytes - 1}`;
logger.info('[DownloadManager] Partial download requested', {
url: job.url.slice(-50),
maxBytes: job.maxBytes,
});
}
const response = await fetch(job.url, {
signal: job.abortController.signal,
headers,
});
// Accept both 200 OK and 206 Partial Content
if (!response.ok && response.status !== 206) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const contentLength = response.headers.get('content-length');
job.totalBytes = contentLength ? parseInt(contentLength, 10) : 0;
// For partial downloads, track if we reached the limit
const isPartialResponse = response.status === 206 || job.maxBytes;
let blob: Blob;
if (response.body) {
// Stream with progress tracking
const reader = response.body.getReader();
const chunks: BlobPart[] = [];
let reachedLimit = false;
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
job.bytesLoaded += value.length;
// Check if we've reached the maxBytes limit
if (job.maxBytes && job.bytesLoaded >= job.maxBytes) {
reachedLimit = true;
logger.info('[DownloadManager] Reached partial download limit', {
bytesLoaded: job.bytesLoaded,
maxBytes: job.maxBytes,
});
// Cancel the remaining download gracefully
try {
await reader.cancel();
} catch {
// Ignore cancel errors - stream may already be closed
}
break;
}
job.progress =
job.totalBytes > 0
? Math.round((job.bytesLoaded / job.totalBytes) * 100)
: 0;
downloadEventBus.emitPreloadProgress({
jobId: job.id,
progress: job.progress,
bytesLoaded: job.bytesLoaded,
totalBytes: job.totalBytes,
});
// Only update queue progress if persisting
if (job.persist !== false) {
await OfflineDbManager.updateQueueProgress(
job.id,
job.bytesLoaded,
job.totalBytes,
);
}
}
blob = new Blob(chunks, {
type:
response.headers.get('content-type') || 'application/octet-stream',
});
// For partial downloads, mark as complete even if we didn't get everything
if (reachedLimit || isPartialResponse) {
job.progress = 100; // Consider partial download as "complete"
}
} else {
// No streaming, get blob directly
blob = await response.blob();
job.totalBytes = blob.size;
job.bytesLoaded = blob.size;
job.progress = 100;
}
// For partial downloads, don't store to cache (not useful for offline)
// Full downloads are stored for offline access
if (!job.isPartial) {
// Store the asset using canonical storage key
await StorageManager.storeAsset(job.storageKey, blob, {
id: job.assetId,
projectId: job.projectId,
filename: job.filename,
variantType: job.variantType,
assetType: job.assetType,
});
// Create blob URL if requested
if (job.createBlobUrl) {
await this.createBlobUrlFromCache(job.storageKey);
}
} else {
// Mark partial download as ready in session cache
this.partialDownloadsReady.add(job.storageKey);
// Register with Service Worker for full-file caching during playback
// When the browser fetches the full media, SW will cache it using the storage key
this.registerUrlForCaching(job.url, job.storageKey);
logger.info('[DownloadManager] Partial download complete', {
storageKey: job.storageKey.slice(-50),
bytesLoaded: job.bytesLoaded,
});
}
// Mark as completed
job.status = 'completed';
if (job.persist !== false) {
await OfflineDbManager.removeFromQueue(job.id);
}
downloadEventBus.emitPreloadComplete({
jobId: job.id,
assetId: job.assetId,
});
job.resolve?.();
} catch (error) {
if (job.abortController.signal.aborted) {
// Download was cancelled
job.status = 'paused';
return;
}
job.retryCount++;
const errorMessage =
error instanceof Error ? error.message : 'Unknown error';
if (job.retryCount < this.config.maxRetries) {
// Retry with backoff
job.status = 'queued';
await OfflineDbManager.updateQueueStatus(job.id, 'queued');
setTimeout(() => {
this.queue.unshift(job);
this.processQueue();
}, this.config.retryDelayMs * job.retryCount);
} else {
// Max retries exceeded
job.status = 'error';
await OfflineDbManager.updateQueueStatus(job.id, 'error', errorMessage);
downloadEventBus.emitPreloadError({
jobId: job.id,
assetId: job.assetId,
error: errorMessage,
});
job.reject?.(error instanceof Error ? error : new Error(errorMessage));
}
} finally {
this.activeDownloads.delete(job.url);
downloadEventBus.emitQueueUpdate();
this.processQueue();
}
}
/**
* Pause all downloads
*/
pauseAll(): void {
this.isPaused = true;
this.activeDownloads.forEach((job) => {
job.abortController?.abort();
job.status = 'paused';
});
downloadEventBus.emitQueueUpdate();
}
/**
* Resume all downloads
*/
resumeAll(): void {
this.isPaused = false;
// Move paused jobs back to queue
this.activeDownloads.forEach((job) => {
if (job.status === 'paused') {
job.status = 'queued';
this.queue.unshift(job);
}
});
this.activeDownloads.clear();
downloadEventBus.emitQueueUpdate();
this.processQueue();
}
/**
* Cancel a specific download
*/
cancelJob(jobId: string): void {
// Check active downloads
const entries = Array.from(this.activeDownloads.entries());
for (const [url, job] of entries) {
if (job.id === jobId) {
job.abortController?.abort();
this.activeDownloads.delete(url);
OfflineDbManager.removeFromQueue(jobId);
downloadEventBus.emitQueueUpdate();
return;
}
}
// Check queue
const index = this.queue.findIndex((j) => j.id === jobId);
if (index !== -1) {
this.queue.splice(index, 1);
OfflineDbManager.removeFromQueue(jobId);
downloadEventBus.emitQueueUpdate();
}
}
/**
* Cancel all downloads for a project
*/
cancelProjectDownloads(projectId: string): void {
// Cancel active downloads
const entries = Array.from(this.activeDownloads.entries());
for (const [url, job] of entries) {
if (job.projectId === projectId) {
job.abortController?.abort();
this.activeDownloads.delete(url);
}
}
// Remove from queue
this.queue = this.queue.filter((j) => j.projectId !== projectId);
// Clear from IndexedDB
OfflineDbManager.clearProjectQueue(projectId);
downloadEventBus.emitQueueUpdate();
}
/**
* Clear entire queue
*/
clearQueue(): void {
// Abort all active downloads
this.activeDownloads.forEach((job) => {
job.abortController?.abort();
});
this.activeDownloads.clear();
// Clear queue
this.queue = [];
// Clear IndexedDB
OfflineDbManager.clearQueue();
downloadEventBus.emitQueueUpdate();
}
/**
* Get current queue status
*/
getStatus(): {
queueLength: number;
activeCount: number;
isPaused: boolean;
} {
return {
queueLength: this.queue.length,
activeCount: this.activeDownloads.size,
isPaused: this.isPaused,
};
}
/**
* Restore queue from IndexedDB (for resume after page reload)
*/
async restoreQueue(): Promise<void> {
const pendingItems = await OfflineDbManager.getPendingQueue();
for (const item of pendingItems) {
// Get canonical storage key
const storageKey = extractStoragePath(item.url);
// Skip if already downloaded
const hasAsset = await StorageManager.hasAsset(storageKey);
if (hasAsset) {
await OfflineDbManager.removeFromQueue(item.id);
continue;
}
// Re-add to queue
const job: DownloadJob = {
id: item.id,
assetId: item.assetId,
projectId: item.projectId,
url: item.url,
filename: item.filename,
variantType: 'original',
assetType: 'other',
priority: item.priority,
status: 'queued',
progress: 0,
bytesLoaded: item.bytesLoaded,
totalBytes: item.totalBytes,
retryCount: item.retryCount,
addedAt: item.addedAt,
storageKey,
createBlobUrl: true, // Create blob URL for resumed downloads
persist: true,
};
this.queue.push(job);
}
// Sort by priority
this.queue.sort((a, b) => b.priority - a.priority);
if (this.queue.length > 0) {
downloadEventBus.emitQueueUpdate();
this.processQueue();
}
}
/**
* Get a ready blob URL for instant display (O(1) lookup)
*/
getReadyBlobUrl(url: string): string | null {
const storageKey = extractStoragePath(url);
return this.readyBlobUrls.get(storageKey) || null;
}
/**
* Cache an externally fetched blob and register blob URL for instant lookup.
* Use this when fetching via XHR (e.g., transition playback) to enable caching.
*/
async cacheBlob(
storageKey: string,
blob: Blob,
metadata: {
assetType: AssetType;
projectId?: string;
},
): Promise<string> {
// Store in Cache API / IndexedDB via existing StorageManager
await StorageManager.storeAsset(storageKey, blob, {
id: `cached-${storageKey}`,
projectId: metadata.projectId || '',
filename: storageKey.split('/').pop() || 'asset',
variantType: 'original',
assetType: metadata.assetType,
});
// Create blob URL and register for instant O(1) lookup
const blobUrl = URL.createObjectURL(blob);
this.readyBlobUrls.set(storageKey, blobUrl);
// Emit event for consumers (existing pattern)
downloadEventBus.emitBlobUrlReady({
storageKey,
blobUrl,
});
logger.info('[DownloadManager] Cached external blob', {
storageKey: storageKey.slice(-50),
size: blob.size,
});
return blobUrl;
}
/**
* Create blob URL from cached asset and store in readyBlobUrls map
*/
private async createBlobUrlFromCache(storageKey: string): Promise<void> {
try {
const blob = await StorageManager.getAsset(storageKey);
if (!blob) {
logger.info('[DownloadManager] No blob found for', {
storageKey: storageKey.slice(-50),
});
return;
}
const blobUrl = URL.createObjectURL(blob);
// Decode images to prevent white flash
if (this.isImageUrl(storageKey)) {
await this.decodeImage(blobUrl);
}
this.readyBlobUrls.set(storageKey, blobUrl);
// Emit event for consumers
downloadEventBus.emitBlobUrlReady({
storageKey,
blobUrl,
});
logger.info('[DownloadManager] Blob URL ready', {
storageKey: storageKey.slice(-50),
blobUrl: blobUrl.slice(0, 30),
});
} catch (error) {
logger.error('[DownloadManager] Failed to create blob URL', {
storageKey: storageKey.slice(-50),
error: error instanceof Error ? error.message : 'unknown',
});
}
}
/**
* Register a presigned URL → storage key mapping with the Service Worker.
* This enables the SW to cache the full response when the browser fetches the media
* during playback, using the canonical storage key instead of the expiring presigned URL.
*/
private registerUrlForCaching(presignedUrl: string, storageKey: string): void {
if (navigator.serviceWorker?.controller) {
navigator.serviceWorker.controller.postMessage({
type: 'REGISTER_CACHE_URL',
payload: { presignedUrl, storageKey },
});
logger.info('[DownloadManager] Registered URL for SW caching', {
storageKey: storageKey.slice(-40),
});
}
}
/**
* Clear blob URLs and partial downloads cache (call on unmount to prevent memory leaks)
*/
clearBlobUrls(): void {
this.readyBlobUrls.forEach((blobUrl) => URL.revokeObjectURL(blobUrl));
this.readyBlobUrls.clear();
this.partialDownloadsReady.clear();
// Clear SW URL mappings (optional, SW has its own cleanup interval)
if (navigator.serviceWorker?.controller) {
navigator.serviceWorker.controller.postMessage({
type: 'CLEAR_URL_MAPPINGS',
});
}
}
/**
* Check if URL is an image based on extension
*/
private isImageUrl(url: string): boolean {
const imageExtensions = [
'.jpg',
'.jpeg',
'.png',
'.gif',
'.webp',
'.avif',
'.svg',
];
const lowerUrl = url.toLowerCase();
return imageExtensions.some((ext) => lowerUrl.includes(ext));
}
/**
* Decode image for instant display (prevents white flash)
*/
private decodeImage(blobUrl: string): Promise<void> {
return new Promise((resolve) => {
const img = new Image();
img.src = blobUrl;
if (typeof img.decode === 'function') {
img
.decode()
.then(() => resolve())
.catch(() => resolve());
} else {
img.onload = () => resolve();
img.onerror = () => resolve();
}
});
}
}
// Singleton instance
export const downloadManager = new DownloadManagerClass();