ZMAYRGGFLPFVCHOWUI3AZQIKPJBFX7V3J6DVG3BGJFC7HBOBIAOQC 4W6PARYQUVT5TCIO2E74STUQVZXNG536LXDZQNOO65E74RSDJMTQC ID5NZJJGYEANAMRN2CTKRSNWKDT3I3ATJ7NHZT5ISOGRULMNORZQC OBXY6BHNROIV7W4O3MMQ6GTQXU2XUKF5A4DCJYDLUEUHV72E7BNAC 4667MSQANSFBWQVT7EDH7ZGWYVGNPCVLQRIXOECHY2BMS7V4SYUQC UFYD7GLFT2JHHKDWCHDUUYS42SGIVLFLRHQLZC6UYHSDOCHYAJAAC H2IBYJKO64HU6N2V2WUQPONZZGKAHLR7EG7T2UYMY5ENHSGGZZJAC 3IXYDVZ3CQBTNPSCYE3ZO7FVBAYF25HX5BAYEUERHYU3GRYZ7TRAC T4EU44HLZGB4GRJWFZYR72H2FACCMDPD2R3PCBAUFKCCIOAQRJWAC KNQ2D44EXRDVEF452BX32KPW4SBRLCX2KMNL6UHDYBED7JBXTU6QC IUHUM6OZ5KYEAYQCIYNG5Q4QLQRAQNMBWKYGV2ZDJFNY5W4DOUNQC DZURLJKGFFIAG4KUZTLCURERBXHFWNSNUFYYB7PXCXFUJWPIHWZQC SML4CPUO62JIWYGYWZGSWFVAMRRBNE2ECK7ATUSFPKC6XICKVIMAC YBJGFUTFTV4SBPUVC6LBETGMPMLS47LSQPL3OJAAPH7BROBQD7DQC TPANU3OQD53OXF5IGXJELSD5FFMY7KHDSVZTPSUG3VZBUAPNPK6QC I27QGYUJ66RJXCXQPY2MMSODRGL3SB7KO4UOGQSK2WRFG7LG23GAC LSAQ6ZM2NELU3FIWKEFBOXKVLSZS2ZOK2PHPHJRWPVZ5CVILSUYQC /*** Utilities for processing large batches of data in chunks with retry logic* and fault tolerance for file imports and other bulk operations*/export interface ChunkProcessorOptions {chunkSize: number;maxRetries: number;baseDelayMs: number;maxDelayMs: number;exponentialBackoff: boolean;}export interface ChunkProcessResult<T> {success: boolean;processedCount: number;failedCount: number;results: T[];errors: ChunkError[];}export interface ChunkError {chunkIndex: number;itemIndex: number;item: unknown;error: Error;retryCount: number;}export interface ChunkProgress {totalChunks: number;completedChunks: number;currentChunk: number;totalItems: number;processedItems: number;failedItems: number;}const DEFAULT_OPTIONS: ChunkProcessorOptions = {chunkSize: 5,maxRetries: 3,baseDelayMs: 1000,maxDelayMs: 30000,exponentialBackoff: true,};/*** Split an array into chunks of specified size*/export function chunkArray<T>(array: T[], chunkSize: number): T[][] {const chunks: T[][] = [];for (let i = 0; i < array.length; i += chunkSize) {chunks.push(array.slice(i, i + chunkSize));}return chunks;}/*** Calculate delay for retry with exponential backoff*/export function calculateRetryDelay(retryCount: number,baseDelayMs: number,maxDelayMs: number,exponentialBackoff: boolean): number {if (!exponentialBackoff) {return Math.min(baseDelayMs, maxDelayMs);}const delay = baseDelayMs * Math.pow(2, retryCount);return Math.min(delay, maxDelayMs);}/*** Check if an error is retryable (network, auth, temporary server issues)*/export function isRetryableError(error: Error): boolean {const message = error.message.toLowerCase();// Network errorsif (message.includes('network') || message.includes('fetch')) {return true;}// HTTP errors that might be temporaryif (message.includes('500') || message.includes('502') ||message.includes('503') || message.includes('504')) {return true;}// Auth token expiration (might be refreshable)if (message.includes('401') && message.includes('token')) {return true;}// Rate limitingif (message.includes('429') || message.includes('rate limit')) {return true;}// Connection timeoutsif (message.includes('timeout') || message.includes('ETIMEDOUT')) {return true;}return false;}/*** Sleep for specified milliseconds*/export function sleep(ms: number): Promise<void> {return new Promise(resolve => setTimeout(resolve, ms));}/*** Process items in chunks with retry logic and progress reporting*/export async function processInChunks<TInput, TOutput>(items: TInput[],processor: (chunk: TInput[], chunkIndex: number) => Promise<TOutput[]>,options: Partial<ChunkProcessorOptions> = {},onProgress?: (progress: ChunkProgress) => void,onChunkComplete?: (chunkIndex: number, results: TOutput[], errors: Error[]) => void): Promise<ChunkProcessResult<TOutput>> {const opts = { ...DEFAULT_OPTIONS, ...options };const chunks = chunkArray(items, opts.chunkSize);const allResults: TOutput[] = [];const allErrors: ChunkError[] = [];let processedItems = 0;let failedItems = 0;for (let chunkIndex = 0; chunkIndex < chunks.length; chunkIndex++) {const chunk = chunks[chunkIndex];let retryCount = 0;let chunkSuccess = false;let chunkResults: TOutput[] = [];const chunkErrors: Error[] = [];// Update progressif (onProgress) {onProgress({totalChunks: chunks.length,completedChunks: chunkIndex,currentChunk: chunkIndex + 1,totalItems: items.length,processedItems,failedItems,});}// Retry loop for current chunkwhile (retryCount <= opts.maxRetries && !chunkSuccess) {try {console.log(`Processing chunk ${chunkIndex + 1}/${chunks.length}, attempt ${retryCount + 1}`);chunkResults = await processor(chunk, chunkIndex);chunkSuccess = true;processedItems += chunk.length;console.log(`Chunk ${chunkIndex + 1} completed successfully with ${chunkResults.length} results`);} catch (error) {console.error(`Chunk ${chunkIndex + 1} failed on attempt ${retryCount + 1}:`, error);const err = error instanceof Error ? error : new Error(String(error));chunkErrors.push(err);// Check if error is retryableif (retryCount < opts.maxRetries && isRetryableError(err)) {const delay = calculateRetryDelay(retryCount, opts.baseDelayMs, opts.maxDelayMs, opts.exponentialBackoff);console.log(`Retrying chunk ${chunkIndex + 1} in ${delay}ms...`);await sleep(delay);retryCount++;} else {// Mark all items in chunk as failedchunk.forEach((item, itemIndex) => {allErrors.push({chunkIndex,itemIndex,item,error: err,retryCount,});});failedItems += chunk.length;break;}}}if (chunkSuccess) {allResults.push(...chunkResults);}// Notify about chunk completionif (onChunkComplete) {onChunkComplete(chunkIndex, chunkResults, chunkErrors);}}// Final progress updateif (onProgress) {onProgress({totalChunks: chunks.length,completedChunks: chunks.length,currentChunk: chunks.length,totalItems: items.length,processedItems,failedItems,});}return {success: allErrors.length === 0,processedCount: processedItems,failedCount: failedItems,results: allResults,errors: allErrors,};}/*** Specialized version for file import chunks*/export interface FileImportChunkProcessor {(files: unknown[],chunkIndex: number,getAuthToken: () => Promise<string>): Promise<{ fileIds: string[] }>;}export async function processFileImportChunks(files: unknown[],processor: FileImportChunkProcessor,getAuthToken: () => Promise<string>,options: Partial<ChunkProcessorOptions> = {},onProgress?: (progress: ChunkProgress) => void): Promise<ChunkProcessResult<{ fileIds: string[] }>> {return processInChunks(files,async (chunk, chunkIndex) => {const result = await processor(chunk, chunkIndex, getAuthToken);return [result]; // Return array to match expected type},options,onProgress,(chunkIndex, results, errors) => {console.log(`File import chunk ${chunkIndex + 1} completed:`, {successfulResults: results.length,errors: errors.length,fileIds: results.flatMap(r => r.fileIds),});});}
/*** Authentication utilities for handling token refresh and validation* Helps prevent 401 errors during long-running operations*/export interface TokenValidationResult {isValid: boolean;isExpiringSoon: boolean; // Expires within next 5 minutesexpiresAt?: Date;error?: string;}/*** Check if a JWT token is valid and not expiring soon*/export function validateToken(token: string): TokenValidationResult {try {// Decode JWT without verification (just to check expiration)const parts = token.split('.');if (parts.length !== 3) {return { isValid: false, isExpiringSoon: false, error: 'Invalid token format' };}const payload = JSON.parse(atob(parts[1]));const exp = payload.exp;if (!exp) {return { isValid: false, isExpiringSoon: false, error: 'No expiration in token' };}const expiresAt = new Date(exp * 1000);const now = new Date();const fiveMinutesFromNow = new Date(now.getTime() + 5 * 60 * 1000);const isValid = expiresAt > now;const isExpiringSoon = expiresAt <= fiveMinutesFromNow;return {isValid,isExpiringSoon,expiresAt,};} catch (error) {return {isValid: false,isExpiringSoon: false,error: error instanceof Error ? error.message : 'Token validation error'};}}/*** Check if an error is authentication-related*/export function isAuthError(error: Error): boolean {const message = error.message.toLowerCase();return message.includes('401') ||message.includes('unauthorized') ||message.includes('invalid token') ||message.includes('token expired');}/*** Extract error details from authentication failures*/export function getAuthErrorDetails(error: Error): {isTokenExpired: boolean;isTokenInvalid: boolean;shouldRetry: boolean;message: string;} {const message = error.message.toLowerCase();const isTokenExpired = message.includes('expired') || message.includes('exp');const isTokenInvalid = message.includes('invalid') || message.includes('malformed');const shouldRetry = isTokenExpired; // Only retry on expiration, not on invalid tokensreturn {isTokenExpired,isTokenInvalid,shouldRetry,message: error.message,};}
// Use chunked processing for better fault toleranceconst chunkSize = body.chunkSize || 5; // Default to 5 files per chunkconst createdFiles: string[] = [];const failedFiles: Array<{ fileName: string; error: string }> = [];const processChunk = async (chunk: typeof body.files, chunkIndex: number) => {const chunkFiles: string[] = [];const now = new Date();console.log(`Processing chunk ${chunkIndex + 1} with ${chunk.length} files`);// Process files in chunk sequentially for database consistencyfor (const fileData of chunk) {try {const fileId = nanoid(21);// Validate required fieldsif (!fileData.fileName ||!fileData.xxh64Hash ||!fileData.locationId ||!fileData.timestampLocal ||fileData.duration === undefined ||fileData.sampleRate === undefined) {throw new Error("Missing required file fields: fileName, xxh64Hash, locationId, timestampLocal, duration, sampleRate");}// Insert into file tableawait db.insert(file).values({id: fileId,fileName: fileData.fileName,path: fileData.path || null,xxh64Hash: fileData.xxh64Hash,locationId: fileData.locationId,timestampLocal: new Date(fileData.timestampLocal),clusterId: fileData.clusterId || null,duration: fileData.duration.toString(),sampleRate: fileData.sampleRate,description: fileData.description || null,upload: fileData.upload || false,maybeSolarNight: fileData.maybeSolarNight ?? null,maybeCivilNight: fileData.maybeCivilNight ?? null,moonPhase: fileData.moonPhase?.toString() || null,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,});// Insert into file_dataset tableawait db.insert(fileDataset).values({fileId: fileId,datasetId: body.datasetId,createdAt: now,createdBy: userId,lastModified: now,modifiedBy: userId,});// Insert moth metadata if providedif (fileData.mothMetadata) {await db.insert(mothMetadata).values({fileId: fileId,timestamp: new Date(fileData.mothMetadata.timestamp),recorderId: fileData.mothMetadata.recorderId || null,gain: fileData.mothMetadata.gain || null,batteryV: fileData.mothMetadata.batteryV?.toString() || null,tempC: fileData.mothMetadata.tempC?.toString() || null,createdAt: now,createdBy: userId,lastModified: now,modifiedBy: userId,active: true,});}chunkFiles.push(fileId);console.log(`Successfully processed file: ${fileData.fileName}`);} catch (error) {const errorMessage = error instanceof Error ? error.message : String(error);console.error(`Error processing file ${fileData.fileName}:`, errorMessage);failedFiles.push({fileName: fileData.fileName,error: errorMessage,});// For validation errors, don't retry the entire chunkif (errorMessage.includes("Missing required file fields")) {continue;}// For database errors, fail the chunk to trigger retrythrow error;}}return { fileIds: chunkFiles };};// Process files in chunks with retry logicconst result = await processInChunks<typeof body.files[0], { fileIds: string[] }>(body.files,async (chunk: typeof body.files, chunkIndex: number) => {const chunkResult = await processChunk(chunk, chunkIndex);return [chunkResult]; // Return array to match expected type},{chunkSize,maxRetries: 3,baseDelayMs: 1000,maxDelayMs: 10000,exponentialBackoff: true,},(progress: ChunkProgress) => {console.log(`Import progress: ${progress.processedItems}/${progress.totalItems} files processed`);});// Collect all successful file IDsresult.results.forEach((chunkResult: { fileIds: string[] }) => {if (chunkResult && typeof chunkResult === 'object' && 'fileIds' in chunkResult) {createdFiles.push(...chunkResult.fileIds);}});// Add failed files from chunk processing errorsresult.errors.forEach(chunkError => {const fileName = (chunkError.item as { fileName?: string })?.fileName || 'unknown';failedFiles.push({fileName,error: chunkError.error.message,});});const response: FileImportResponse = {message: result.success? "Files imported successfully": `Import completed with ${failedFiles.length} failures`,data: {importedCount: createdFiles.length,failedCount: failedFiles.length,fileIds: createdFiles,...(failedFiles.length > 0 && { errors: failedFiles }),},};return c.json(response, result.success ? 200 : 207); // 207 Multi-Status for partial success} catch (error) {console.error("Error importing files:", error);// Check for auth errorsif (error && typeof error === "object" && "message" in error) {const errorMessage = error.message as string;if (errorMessage.includes("401") || errorMessage.includes("Unauthorized")) {const errorResponse = handleAuthError("file import");return c.json(errorResponse, 401);}}const errorResponse = standardErrorResponse(error,"Failed to import files",);return c.json(errorResponse, 500);}});/*** POST /api/file-import/chunk* Import a single chunk of files - used by frontend for chunked processing* This endpoint handles authentication refresh and provides better error granularity*/fileImport.post("/chunk", authenticate, async (c) => {try {const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;const db = createDatabase(c.env);const body = (await c.req.json()) as ChunkedFileImportRequest;// Check user permissionconst hasPermission = await checkUserPermission(db,userId,body.datasetId,"EDIT",);if (!hasPermission) {return c.json({error: "Forbidden",message: "You do not have permission to upload files to this dataset",},403,);}// Validate requestif (!body.files || !Array.isArray(body.files) || body.files.length === 0) {return c.json({error: "Bad Request",message: "Files array is required and must not be empty",},400,);}console.log(`Processing chunk ${body.chunkIndex + 1}/${body.totalChunks} with ${body.files.length} files`);
return c.json({error: "Bad Request",message:"Missing required file fields: fileName, xxh64Hash, locationId, timestampLocal, duration, sampleRate",},400,
throw new Error("Missing required file fields: fileName, xxh64Hash, locationId, timestampLocal, duration, sampleRate"
console.error(`Error processing file ${fileData.fileName}:`, error);// Continue with next file rather than failing the entire batch// In a real transaction, we would rollback herethrow error; // For now, fail fast
const errorMessage = error instanceof Error ? error.message : String(error);console.error(`Error processing file ${fileData.fileName}:`, errorMessage);failedFiles.push({fileName: fileData.fileName,error: errorMessage,});
return c.json({message: "Files imported successfully",
const response = {message: failedFiles.length === 0? `Chunk ${body.chunkIndex + 1}/${body.totalChunks} processed successfully`: `Chunk ${body.chunkIndex + 1}/${body.totalChunks} completed with ${failedFiles.length} failures`,
const errorResponse = handleAuthError("file import");return c.json(errorResponse, 401);
const errorMessage = error.message as string;if (errorMessage.includes("401") || errorMessage.includes("Unauthorized")) {const errorResponse = handleAuthError("file import chunk");return c.json(errorResponse, 401);}
type ImportState = 'folder_selection' | 'scanning' | 'ready' | 'importing' | 'completed' | 'cancelled';
interface ChunkProgress {totalChunks: number;completedChunks: number;currentChunk: number;totalFiles: number;processedFiles: number;failedFiles: number;}interface ImportResult {totalFiles: number;successfulFiles: number;failedFiles: number;errors: Array<{ fileName: string; error: string }>;}
// Chunked import stateconst [chunkProgress, setChunkProgress] = useState<ChunkProgress>({totalChunks: 0,completedChunks: 0,currentChunk: 0,totalFiles: 0,processedFiles: 0,failedFiles: 0});const [importResult, setImportResult] = useState<ImportResult | null>(null);const [lastError, setLastError] = useState<string | null>(null);
// All files processed successfully, now update database in one transactionconsole.log(`Sending ${processedFileData.length} files to database...`);
// Process files in chunks for better fault toleranceconsole.log(`Processing ${processedFileData.length} files in chunks...`);
console.log('Import payload:', {fileCount: importPayload.files.length,datasetId: importPayload.datasetId,sampleFile: importPayload.files[0] // Log first file as sample
setChunkProgress({totalChunks: chunks.length,completedChunks: 0,currentChunk: 0,totalFiles: processedFileData.length,processedFiles: 0,failedFiles: 0,
const response = await fetch('/api/file-import', {method: 'POST',headers: {'Content-Type': 'application/json','Authorization': `Bearer ${accessToken}`,},body: JSON.stringify(importPayload),});
let totalSuccessful = 0;let totalFailed = 0;const allErrors: Array<{ fileName: string; error: string }> = [];
if (!response.ok) {const errorData = await response.json();throw new Error(`HTTP ${response.status}: ${errorData.message || errorData.error || 'Unknown error'}`);
// Process each chunk with retry logicfor (let chunkIndex = 0; chunkIndex < chunks.length; chunkIndex++) {const chunk = chunks[chunkIndex];let retryCount = 0;const maxRetries = 3;let chunkSuccessful = false;setChunkProgress(prev => ({...prev,currentChunk: chunkIndex + 1,completedChunks: chunkIndex,}));while (retryCount <= maxRetries && !chunkSuccessful) {try {console.log(`Processing chunk ${chunkIndex + 1}/${chunks.length}, attempt ${retryCount + 1}`);// Get fresh token for each chunk to prevent expirationconst accessToken = await getAccessToken();const chunkPayload = {files: chunk,datasetId: datasetId,chunkIndex: chunkIndex,totalChunks: chunks.length,};const response = await fetch('/api/file-import/chunk', {method: 'POST',headers: {'Content-Type': 'application/json','Authorization': `Bearer ${accessToken}`,},body: JSON.stringify(chunkPayload),});if (!response.ok) {const errorData = await response.json();throw new Error(`HTTP ${response.status}: ${errorData.message || errorData.error || 'Unknown error'}`);}const result = await response.json();console.log(`Chunk ${chunkIndex + 1} completed:`, result.data);totalSuccessful += result.data.importedCount;totalFailed += result.data.failedCount || 0;if (result.data.errors) {allErrors.push(...result.data.errors);}chunkSuccessful = true;} catch (error) {console.error(`Chunk ${chunkIndex + 1} failed on attempt ${retryCount + 1}:`, error);// Check if it's a retryable errorconst errorMessage = error instanceof Error ? error.message : String(error);const isRetryable = errorMessage.includes('500') ||errorMessage.includes('502') ||errorMessage.includes('503') ||errorMessage.includes('504') ||errorMessage.includes('network') ||errorMessage.includes('timeout');if (retryCount < maxRetries && isRetryable) {retryCount++;const delay = Math.min(1000 * Math.pow(2, retryCount), 10000);console.log(`Retrying chunk ${chunkIndex + 1} in ${delay}ms...`);await new Promise(resolve => setTimeout(resolve, delay));} else {// Mark all files in chunk as failedchunk.forEach((fileData: { fileName: string }) => {allErrors.push({fileName: fileData.fileName,error: errorMessage,});});totalFailed += chunk.length;break;}}}// Update progresssetChunkProgress(prev => ({...prev,completedChunks: chunkIndex + 1,processedFiles: totalSuccessful,failedFiles: totalFailed,}));
const result = await response.json();console.log('Database import successful:', result);
// Set final resultsconst finalResult: ImportResult = {totalFiles: processedFileData.length,successfulFiles: totalSuccessful,failedFiles: totalFailed,errors: allErrors,};
// Show success for a moment, then closesetTimeout(() => {onImportComplete();handleClose();}, 2000);
if (totalFailed === 0) {setImportState('completed');// Show success for a moment, then closesetTimeout(() => {onImportComplete();handleClose();}, 2000);} else {setImportState('error');setLastError(`Import completed with ${totalFailed} failures. See details below.`);}
} catch (dbError) {console.error('Database import failed:', dbError);setImportState('cancelled'); // Use cancelled state to show error// TODO: Better error handling - maybe show error state in UIthrow dbError;
} catch (error) {console.error('Database import failed:', error);setImportState('error');setLastError(error instanceof Error ? error.message : 'Unknown error occurred');
<p className="text-sm text-gray-600 mb-4">Importing audio files to cluster...</p>{processProgress.total > 0 && (
<p className="text-sm text-gray-600 mb-4">{chunkProgress.totalChunks > 0? `Processing chunk ${chunkProgress.currentChunk}/${chunkProgress.totalChunks}...`: 'Importing audio files to cluster...'}</p>{chunkProgress.totalFiles > 0 && (<div className="space-y-2"><div className="w-full bg-gray-200 rounded-full h-2"><divclassName="bg-green-600 h-2 rounded-full transition-all duration-300"style={{ width: `${(chunkProgress.processedFiles / chunkProgress.totalFiles) * 100}%` }}></div></div><div className="flex justify-between text-xs text-gray-500"><span>{chunkProgress.processedFiles} / {chunkProgress.totalFiles} files processed</span>{chunkProgress.failedFiles > 0 && (<span className="text-red-500">{chunkProgress.failedFiles} failed</span>)}</div></div>)}{processProgress.total > 0 && chunkProgress.totalFiles === 0 && (
Successfully imported {audioFiles.length} audio files
{importResult? `Successfully imported ${importResult.successfulFiles} of ${importResult.totalFiles} audio files`: `Successfully imported ${audioFiles.length} audio files`}</p></div>) : importState === 'error' ? (// Error state<div className="text-center"><div className="mb-4"><svg className="w-12 h-12 text-red-500 mx-auto" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" /></svg></div><h3 className="text-lg font-medium text-gray-900 mb-2">Import Issues</h3><p className="text-sm text-gray-600 mb-4">{lastError || 'Some files could not be imported'}
{importResult && (<div className="bg-gray-50 p-3 rounded-md text-sm text-left space-y-1"><div className="flex justify-between"><span>Total files:</span><span>{importResult.totalFiles}</span></div><div className="flex justify-between text-green-600"><span>Successful:</span><span>{importResult.successfulFiles}</span></div><div className="flex justify-between text-red-600"><span>Failed:</span><span>{importResult.failedFiles}</span></div>{importResult.errors.length > 0 && (<div className="mt-2 pt-2 border-t border-gray-200"><p className="text-xs text-gray-500 mb-1">Recent errors:</p><div className="max-h-20 overflow-y-auto space-y-1">{importResult.errors.slice(0, 3).map((error, index) => (<div key={index} className="text-xs text-red-600"><span className="font-medium">{error.fileName}:</span> {error.error}</div>))}{importResult.errors.length > 3 && (<p className="text-xs text-gray-500">...and {importResult.errors.length - 3} more</p>)}</div></div>)}</div>)}
<ButtononClick={handleCancel}variant="secondary">Cancel</Button>{isSupported && selectedFolder && importState === 'folder_selection' && (
{importState === 'error' ? (<><ButtononClick={handleClose}variant="secondary">Close</Button>{importResult && importResult.successfulFiles > 0 && (<ButtononClick={() => {onImportComplete();handleClose();}}variant="default">Continue with {importResult.successfulFiles} files</Button>)}</>) : importState === 'completed' ? (
) : (<><ButtononClick={handleCancel}variant="secondary"disabled={importState === 'importing'}>{importState === 'importing' ? 'Processing...' : 'Cancel'}</Button>{isSupported && selectedFolder && importState === 'folder_selection' && (<ButtononClick={handleStartImport}variant="default">Next</Button>)}</>
}}, [state.wavesurfer]);// Zoom controlsconst zoomIn = useCallback(() => {if (state.zoomPlugin && typeof state.zoomPlugin === "object" && "zoom" in state.zoomPlugin) {(state.zoomPlugin as { zoom: (factor: number) => void }).zoom(1.5);}}, [state.zoomPlugin]);const zoomOut = useCallback(() => {if (state.zoomPlugin && typeof state.zoomPlugin === "object" && "zoom" in state.zoomPlugin) {(state.zoomPlugin as { zoom: (factor: number) => void }).zoom(0.75);}}, [state.zoomPlugin]);const resetZoom = useCallback(() => {if (state.wavesurfer) {state.wavesurfer.zoom(0);
[ ] Check render of opus spectrogram[ ] Check safari supports .opus, handle error if not
[X] Check render of opus spectrogram[X] Check safari supports .opus, handle error if not. ios fine, old machines not ok.