OSGJQXKCMTGBJROHYLRRHCO4BYH2DZMQWTPI3ZCDWKOGO73DW7JQC };});}/*** Escapes a CSV field if it contains special characters*/export function escapeCSVField(field: string | number | null | undefined): string {if (field === null || field === undefined) {return '';}const value = String(field);// If the field contains comma, newline, or quotes, wrap it in quotes and escape internal quotesif (value.includes(',') || value.includes('\n') || value.includes('\r') || value.includes('"')) {return `"${value.replace(/"/g, '""')}"`;}return value;}/*** Transforms CSV export database rows and generates dynamic CSV content*/export function generateCSVContent(rows: unknown[]): string {if (rows.length === 0) {return 'hour_bucket,cluster,location,date_UTC,files_in_bucket\n';}// Transform rows to typed formatconst csvRows = rows.map(row => {const r = row as Record<string, unknown>;return {hour_of_day: Number(r.hour_of_day),date_utc: String(r.date_utc),cluster_name: String(r.cluster_name || ''),location_name: String(r.location_name || ''),species_name: String(r.species_name || ''),call_type_name: r.call_type_name ? String(r.call_type_name) : null,call_count: Number(r.call_count || 0),files_in_bucket: Number(r.files_in_bucket || 0)
// Determine dynamic columns based on unique call typesconst callTypes = [...new Set(csvRows.map(row => row.call_type_name).filter(Boolean))].sort();const speciesName = csvRows[0]?.species_name || 'Unknown Species';// Generate headersconst fixedHeaders = ['hour_bucket', 'cluster', 'location', 'date_UTC'];const dynamicHeaders = callTypes.length > 0? callTypes.map(callType => `${speciesName} - ${callType}`): [speciesName];const allHeaders = [...fixedHeaders, ...dynamicHeaders, 'files_in_bucket'];// Group data by hour bucket (date + hour combination)const groupedData = new Map<string, {hour_bucket: string;cluster: string;location: string;date_UTC: string;files_in_bucket: number;call_counts: Map<string, number>;}>();csvRows.forEach(row => {const hourBucket = `${row.date_utc}T${String(row.hour_of_day).padStart(2, '0')}:00:00.0`;const key = `${hourBucket}_${row.cluster_name}_${row.location_name}`;if (!groupedData.has(key)) {groupedData.set(key, {hour_bucket: hourBucket,cluster: row.cluster_name,location: row.location_name,date_UTC: row.date_utc,files_in_bucket: row.files_in_bucket,call_counts: new Map()});}const group = groupedData.get(key)!;if (row.call_type_name) {group.call_counts.set(row.call_type_name, (group.call_counts.get(row.call_type_name) || 0) + row.call_count);} else {// If no call type, add to total count (for cases where no call type breakdown exists)group.call_counts.set('_total', (group.call_counts.get('_total') || 0) + row.call_count);}});// Generate CSV contentconst csvLines = [allHeaders.map(escapeCSVField).join(',')];// Sort by date and hourconst sortedGroups = Array.from(groupedData.values()).sort((a, b) =>a.hour_bucket.localeCompare(b.hour_bucket));sortedGroups.forEach(group => {const fixedColumns = [group.hour_bucket,group.cluster,group.location,group.date_UTC];// Generate dynamic columnsconst dynamicColumns = callTypes.length > 0? callTypes.map(callType => callType ? group.call_counts.get(callType) || 0 : 0): [group.call_counts.get('_total') || Array.from(group.call_counts.values()).reduce((sum, count) => sum + count, 0)];const allColumns = [...fixedColumns,...dynamicColumns,group.files_in_bucket];csvLines.push(allColumns.map(escapeCSVField).join(','));});return csvLines.join('\n') + '\n';
}/*** Gets CSV export data for cluster analysis with call type breakdown*/export async function getClusterCSVExport(db: Database, params: CallRateQueryParams) {if (!params.clusterId) {throw new Error("clusterId is required for cluster CSV export");}const timeConditions = getTimeFilterConditions(params.timeFilter);const callTypeFilter = buildCallTypeFilter(params.callTypeId);// Time filtering conditions for fileslet fileTimeFilter = sql``;if (timeConditions && params.timeFilter !== "all") {fileTimeFilter = sql`AND f.${sql.identifier(timeConditions.fileTimeColumn)} = ${timeConditions.fileTimeValue}`;}return db.execute(sql`WITH hourly_data AS (SELECTEXTRACT(HOUR FROM f.timestamp_local) as hour_of_day,DATE(f.timestamp_local) as date_utc,c.name as cluster_name,loc.name as location_name,s.label as species_name,ct.label as call_type_name,COUNT(l.id) as call_count,COUNT(DISTINCT f.id) as files_in_bucketFROM selection selJOIN label l ON sel.id = l.selection_idJOIN file f ON sel.file_id = f.idJOIN cluster c ON f.cluster_id = c.idJOIN location loc ON c.location_id = loc.idJOIN species s ON l.species_id = s.idLEFT JOIN label_subtype ls ON l.id = ls.label_id AND ls.active = trueLEFT JOIN call_type ct ON ls.calltype_id = ct.idWHERE f.cluster_id = ${params.clusterId}AND l.species_id = ${params.speciesId}AND l.filter_id = ${params.filterId}AND l.active = trueAND sel.active = trueAND f.active = true${callTypeFilter}${fileTimeFilter}GROUP BYEXTRACT(HOUR FROM f.timestamp_local),DATE(f.timestamp_local),c.name,loc.name,s.label,ct.label),all_hourly_files AS (SELECTEXTRACT(HOUR FROM f.timestamp_local) as hour_of_day,DATE(f.timestamp_local) as date_utc,c.name as cluster_name,loc.name as location_name,COUNT(f.id) as total_files_in_bucketFROM file fJOIN cluster c ON f.cluster_id = c.idJOIN location loc ON c.location_id = loc.idWHERE f.cluster_id = ${params.clusterId}AND f.active = true${fileTimeFilter}GROUP BYEXTRACT(HOUR FROM f.timestamp_local),DATE(f.timestamp_local),c.name,loc.name)SELECThd.hour_of_day,hd.date_utc,hd.cluster_name,hd.location_name,hd.species_name,hd.call_type_name,hd.call_count,COALESCE(ahf.total_files_in_bucket, 0) as files_in_bucketFROM hourly_data hdLEFT JOIN all_hourly_files ahf ON hd.hour_of_day = ahf.hour_of_dayAND hd.date_utc = ahf.date_utcAND hd.cluster_name = ahf.cluster_nameAND hd.location_name = ahf.location_nameORDER BY hd.date_utc, hd.hour_of_day, hd.call_type_name`);}/*** Gets CSV export data for dataset analysis with call type breakdown*/export async function getDatasetCSVExport(db: Database, params: CallRateQueryParams) {if (!params.datasetId) {throw new Error("datasetId is required for dataset CSV export");}const timeConditions = getTimeFilterConditions(params.timeFilter);const callTypeFilter = buildCallTypeFilter(params.callTypeId);// Time filtering conditions for fileslet fileTimeFilter = sql``;if (timeConditions && params.timeFilter !== "all") {fileTimeFilter = sql`AND f.${sql.identifier(timeConditions.fileTimeColumn)} = ${timeConditions.fileTimeValue}`;}return db.execute(sql`WITH hourly_data AS (SELECTEXTRACT(HOUR FROM f.timestamp_local) as hour_of_day,DATE(f.timestamp_local) as date_utc,c.name as cluster_name,loc.name as location_name,s.label as species_name,ct.label as call_type_name,COUNT(l.id) as call_count,COUNT(DISTINCT f.id) as files_in_bucketFROM selection selJOIN label l ON sel.id = l.selection_idJOIN file f ON sel.file_id = f.idJOIN file_dataset fd ON f.id = fd.file_idJOIN cluster c ON f.cluster_id = c.idJOIN location loc ON c.location_id = loc.idJOIN species s ON l.species_id = s.idLEFT JOIN label_subtype ls ON l.id = ls.label_id AND ls.active = trueLEFT JOIN call_type ct ON ls.calltype_id = ct.idWHERE fd.dataset_id = ${params.datasetId}AND l.species_id = ${params.speciesId}AND l.filter_id = ${params.filterId}AND l.active = trueAND sel.active = trueAND f.active = true${callTypeFilter}${fileTimeFilter}GROUP BYEXTRACT(HOUR FROM f.timestamp_local),DATE(f.timestamp_local),c.name,loc.name,s.label,ct.label),all_hourly_files AS (SELECTEXTRACT(HOUR FROM f.timestamp_local) as hour_of_day,DATE(f.timestamp_local) as date_utc,c.name as cluster_name,loc.name as location_name,COUNT(f.id) as total_files_in_bucketFROM file fJOIN file_dataset fd ON f.id = fd.file_idJOIN cluster c ON f.cluster_id = c.idJOIN location loc ON c.location_id = loc.idWHERE fd.dataset_id = ${params.datasetId}AND f.active = true${fileTimeFilter}GROUP BYEXTRACT(HOUR FROM f.timestamp_local),DATE(f.timestamp_local),c.name,loc.name)SELECThd.hour_of_day,hd.date_utc,hd.cluster_name,hd.location_name,hd.species_name,hd.call_type_name,hd.call_count,COALESCE(ahf.total_files_in_bucket, 0) as files_in_bucketFROM hourly_data hdLEFT JOIN all_hourly_files ahf ON hd.hour_of_day = ahf.hour_of_dayAND hd.date_utc = ahf.date_utcAND hd.cluster_name = ahf.cluster_nameAND hd.location_name = ahf.location_nameORDER BY hd.date_utc, hd.hour_of_day, hd.call_type_name`);
error instanceof Error ? error.message : String(error)),500);}});/*** Export CSV data for cluster statistics** @route GET /api/statistics/csv-export* @authentication Required* @query {string} clusterId - The cluster ID to export (for cluster mode)* @query {string} datasetId - The dataset ID to export (for dataset mode)* @query {string} speciesId - The species ID to filter by* @query {string} filterId - The filter ID to apply* @query {string} [timeFilter=civil_night] - Time filter* @query {string} [callTypeId] - Optional call type filter* @returns {String} CSV content with appropriate headers*/statistics.get("/csv-export", authenticate, async (c) => {try {const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Parse query parametersconst params: CallRateQueryParams = {clusterId: c.req.query("clusterId"),datasetId: c.req.query("datasetId"),speciesId: c.req.query("speciesId") || "",filterId: c.req.query("filterId") || "",timeFilter: (c.req.query("timeFilter") || "civil_night") as TimeFilter,callTypeId: c.req.query("callTypeId")};const db = createDatabase(c.env);let datasetId: string;// Determine mode and validate parametersif (params.clusterId) {// Cluster modeconst validationError = validateClusterCallRateParams(params);if (validationError) {return c.json(createErrorResponse(validationError), 400);}// Get dataset for permission checkconst clusterResult = await getClusterDataset(db, params.clusterId);if (clusterResult.length === 0) {return c.json(createErrorResponse("Cluster not found or inactive"), 404);}datasetId = clusterResult[0].dataset_id;} else if (params.datasetId) {// Dataset modeconst validationError = validateDatasetCallRateParams(params);if (validationError) {return c.json(createErrorResponse(validationError), 400);}datasetId = params.datasetId;} else {return c.json(createErrorResponse("Either clusterId or datasetId must be provided"), 400);}// Check permissionsconst hasPermission = await checkUserPermission(db, userId, datasetId, 'READ');if (!hasPermission) {return c.json(createErrorResponse("Access denied: No READ permission for this dataset"), 403);}// Get CSV data based on modeconst csvResult = params.clusterId? await getClusterCSVExport(db, params): await getDatasetCSVExport(db, params);// Generate CSV contentconst csvContent = generateCSVContent(csvResult.rows);// Set CSV headersc.header('Content-Type', 'text/csv; charset=utf-8');c.header('Content-Disposition', 'attachment; filename="statistics-export.csv"');return c.text(csvContent);} catch (error) {console.error("Error exporting CSV:", error);return c.json(createErrorResponse("Failed to export CSV",
}export interface CSVExportRow {hour_of_day: number;date_utc: string;cluster_name: string;location_name: string;species_name: string;call_type_name: string | null;call_count: number;files_in_bucket: number;}export interface CSVExportParams {clusterId?: string;datasetId?: string;speciesId: string;filterId: string;timeFilter: TimeFilter;callTypeId?: string;
// Handle CSV exportconst handleCSVExport = async () => {if (!isAuthenticated || !callRateData) {return;}try {const accessToken = await getAccessToken();const params = new URLSearchParams();// Add parameters based on modeif (isDatasetMode) {params.append('datasetId', datasetId);} else {params.append('clusterId', clusterId!);}params.append('speciesId', selectedSpeciesId);params.append('filterId', selectedFilterId);params.append('timeFilter', timeFilter);// Add callTypeId if a specific call type is selectedif (selectedCallTypeId) {params.append("callTypeId", selectedCallTypeId);}const response = await fetch(`/api/statistics/csv-export?${params}`, {headers: {Authorization: `Bearer ${accessToken}`,},});if (!response.ok) {throw new Error(`Failed to export CSV: ${response.statusText}`);}// Get the CSV contentconst csvContent = await response.text();// Create downloadconst blob = new Blob([csvContent], { type: 'text/csv;charset=utf-8;' });const link = document.createElement('a');const url = URL.createObjectURL(blob);// Generate filename with current selectionsconst selectedSpecies = availableSpecies.find(s => s.id === selectedSpeciesId);const speciesLabel = selectedSpecies?.label || 'statistics';const timestamp = new Date().toISOString().slice(0, 19).replace(/:/g, '-');const filename = `${speciesLabel.replace(/\s+/g, '-')}_${timestamp}.csv`;link.setAttribute('href', url);link.setAttribute('download', filename);link.style.visibility = 'hidden';document.body.appendChild(link);link.click();document.body.removeChild(link);URL.revokeObjectURL(url);} catch (error) {console.error('Error exporting CSV:', error);// You could add a toast notification here}};
</div>{/* CSV Export button */}<div className="flex items-center"><buttononClick={handleCSVExport}disabled={loading || !callRateData}className="rounded-md border border-gray-300 bg-white px-3 py-1 text-sm font-medium text-gray-700 shadow-sm hover:bg-gray-50 focus:border-primary focus:outline-none focus:ring-1 focus:ring-primary disabled:bg-gray-100 disabled:text-gray-400 disabled:cursor-not-allowed">Export CSV</button>