ONSQYCF6NFUEA24ORB62W4P62LKUMV7C5PLYRZQULHFDNROEY2HQC
import { authenticate } from "../middleware/auth";
import type { Env } from "../types";
import { eq, sql as sqlExpr } from "drizzle-orm";
import { nanoid } from 'nanoid';
import {
species,
speciesDataset,
callType
} from "../../../db/schema";
import { authenticate, checkUserPermission } from "../middleware/auth";
import { createDatabase } from "../utils/database";
import type { Env, JWTPayload } from "../types";
species.get("/", authenticate, async (c) => {
return c.json({ error: "Not implemented yet" }, 501);
/**
* Protected API route to fetch species and call types for a dataset
*
* @route GET /api/species
* @authentication Required
* @param {string} datasetId - Required query parameter specifying the dataset to fetch species from
* @returns {Object} Response containing:
* - data: Array of species objects, each with an array of associated call types
* @error 400 - If datasetId is missing
* @description Returns species associated with the specified dataset along with their call types
* Each species object includes:
* - id, label, ebirdCode, description
* - callTypes: Array of call type objects with id and label
*
* Uses an efficient JOIN approach to fetch data in a single query to avoid N+1 query problems
* Results are transformed to provide a nested structure with call types inside species objects
*/
speciesRouter.get("/", authenticate, async (c) => {
try {
// Get query parameter for datasetId
const datasetId = c.req.query("datasetId");
// Validate parameters
if (!datasetId) {
return c.json({
error: "Missing required query parameter: datasetId"
}, 400);
}
// Connect to the database
const db = createDatabase(c.env);
// Use JOIN approach to get species and call types in a single query
// This eliminates the N+1 query problem and reduces round trips to the database
const queryResults = await db
.select({
id: species.id,
label: species.label,
ebirdCode: species.ebirdCode,
description: species.description,
callTypeId: callType.id,
callTypeLabel: callType.label
})
.from(species)
.innerJoin(
speciesDataset,
eq(species.id, speciesDataset.speciesId)
)
.leftJoin(
callType,
sqlExpr`${callType.speciesId} = ${species.id} AND ${callType.active} = true`
)
.where(eq(speciesDataset.datasetId, datasetId))
.orderBy(species.label);
// If no results are found, return an empty array
if (queryResults.length === 0) {
return c.json({
data: []
});
}
// Transform the flat results into nested objects
// Using a Map for better performance with large datasets
const speciesMap = new Map();
queryResults.forEach(row => {
if (!speciesMap.has(row.id)) {
// Create a new species entry if not already in the map
speciesMap.set(row.id, {
id: row.id,
label: row.label,
ebirdCode: row.ebirdCode,
description: row.description,
callTypes: []
});
}
// Add the call type if it exists and isn't already in the array
if (row.callTypeId) {
const species = speciesMap.get(row.id);
const existingCallType = species.callTypes.find((ct: { id: string }) => ct.id === row.callTypeId);
if (!existingCallType) {
species.callTypes.push({
id: row.callTypeId,
label: row.callTypeLabel
});
}
}
});
// Convert map to array
const enrichedSpecies = Array.from(speciesMap.values());
// Return the enriched species data
return c.json({
data: enrichedSpecies
});
} catch (error) {
console.error("Error fetching species:", error);
return c.json(
{
error: "Failed to fetch species",
details: error instanceof Error ? error.message : String(error),
},
500
);
}
/**
* Protected API route to create a new species
*
* @route POST /api/species
* @authentication Required
* @param {Object} body - Species data including datasetId, label, description, ebirdCode, callTypes
* @returns {Object} Response containing:
* - data: Created species object with call types
* @description Creates a new species with optional eBird integration and call types.
* Requires EDIT permission on the dataset. Creates entries in species, species_dataset,
* and optionally call_type tables in a single transaction.
*/
speciesRouter.post("/", authenticate, async (c) => {
try {
// Get user ID from JWT
const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;
const userId = jwtPayload.sub;
species.post("/", authenticate, async (c) => {
return c.json({ error: "Not implemented yet" }, 501);
// Database connection
const db = createDatabase(c.env);
// Parse and validate request body
const body = await c.req.json();
const { id, datasetId, label, description, ebirdCode, callTypes } = body;
// Field validation
if (!id || typeof id !== 'string') {
return c.json({
error: "Missing or invalid required field: id"
}, 400);
}
if (!datasetId || typeof datasetId !== 'string') {
return c.json({
error: "Missing or invalid required field: datasetId"
}, 400);
}
if (!label || typeof label !== 'string' || label.trim().length === 0) {
return c.json({
error: "Missing or invalid required field: label"
}, 400);
}
// Length validation
if (id.length !== 12) {
return c.json({
error: "Field 'id' must be exactly 12 characters (nanoid)"
}, 400);
}
if (label.length > 100) {
return c.json({
error: "Field 'label' must be 100 characters or less"
}, 400);
}
if (description && description.length > 255) {
return c.json({
error: "Field 'description' must be 255 characters or less"
}, 400);
}
if (ebirdCode && ebirdCode.length > 12) {
return c.json({
error: "Field 'ebirdCode' must be 12 characters or less"
}, 400);
}
// Validate call types if provided
if (callTypes && (!Array.isArray(callTypes) || callTypes.some((ct: any) => !ct.label || ct.label.length > 100))) {
return c.json({
error: "Field 'callTypes' must be an array of objects with 'label' field (max 100 chars)"
}, 400);
}
// Check if user has EDIT permission on the dataset
const hasPermission = await checkUserPermission(db, userId, datasetId.trim(), 'EDIT');
if (!hasPermission) {
return c.json({
error: "You don't have permission to create species in this dataset"
}, 403);
}
// Start transaction for atomic creation
const result = await db.transaction(async (tx) => {
const now = new Date();
// Create species record
const newSpecies = {
id: id.trim(),
label: label.trim(),
ebirdCode: ebirdCode?.trim() || null,
taxonomyVersion: ebirdCode ? '2024' : null,
description: description?.trim() || null,
createdBy: userId,
createdAt: now,
lastModified: now,
modifiedBy: userId,
owner: userId,
active: true,
};
const [createdSpecies] = await tx.insert(species).values(newSpecies).returning({
id: species.id,
label: species.label,
ebirdCode: species.ebirdCode,
description: species.description,
});
// Link species to dataset
await tx.insert(speciesDataset).values({
speciesId: createdSpecies.id,
datasetId: datasetId.trim(),
createdBy: userId,
createdAt: now,
lastModified: now,
modifiedBy: userId,
});
// Create call types if provided
const createdCallTypes = [];
if (callTypes && Array.isArray(callTypes)) {
for (const callTypeData of callTypes) {
const callTypeId = nanoid(12);
const [createdCallType] = await tx.insert(callType).values({
id: callTypeId,
speciesId: createdSpecies.id,
label: callTypeData.label.trim(),
createdBy: userId,
createdAt: now,
lastModified: now,
modifiedBy: userId,
active: true,
}).returning({
id: callType.id,
label: callType.label,
});
createdCallTypes.push(createdCallType);
}
}
return {
...createdSpecies,
callTypes: createdCallTypes
};
});
console.log("Created species:", result.id, "for dataset:", datasetId, "by user:", userId);
return c.json({
data: result
}, 201);
} catch (error) {
console.error("Error creating species:", error);
// Handle unique constraint violations
if (error instanceof Error && error.message.includes('duplicate key')) {
return c.json({
error: "A species with this ID already exists"
}, 400);
}
return c.json(
{
error: "Failed to create species",
details: error instanceof Error ? error.message : String(error),
},
500
);
}
/**
* Protected API route to fetch files by dataset and species with pagination and filters
*
* @route GET /api/selection
* @authentication Required
* @param {string} datasetId - Required query parameter specifying the dataset to fetch files from
* @param {string} speciesId - Required query parameter specifying the species to filter files by
* @param {number} [page=1] - Optional page number for pagination (starts at 1)
* @param {number} [pageSize=100] - Optional page size (10-500, defaults to 100)
* @param {string} [solarNight] - Optional filter: 'true', 'false', or omit for all
* @param {string} [civilNight] - Optional filter: 'true', 'false', or omit for all
* @returns {Object} Response containing:
* - data: Array of file objects with metadata, mothMetadata, and species found
* - pagination: Object with pagination metadata (currentPage, pageSize, totalPages, totalItems, etc.)
* - filters: Object showing applied filters
* @error 400 - If datasetId or speciesId is missing, or page is invalid
* @description Returns active files for the specified dataset that have selections labeled with the specified species
* Each file includes:
* - Basic file information (id, fileName, path, duration, etc.)
* - metadata: Parsed JSON metadata from fileMetadata table
* - mothMetadata: Moth recorder metadata (gain, battery, temperature)
* - Species found in each file (filtered to the specified species)
*
* This route is called when clicking on a species in the frontend to show all files
* that contain recordings of that species.
*/
return c.json({ error: "Not implemented yet" }, 501);
try {
// Get query parameters
const datasetId = c.req.query("datasetId");
const speciesId = c.req.query("speciesId");
const page = parseInt(c.req.query("page") || "1", 10);
const pageSize = parseInt(c.req.query("pageSize") || "100", 10);
const solarNight = c.req.query("solarNight");
const civilNight = c.req.query("civilNight");
// Validate parameters
if (!datasetId) {
return c.json({
error: "Missing required query parameter: datasetId"
}, 400);
}
if (!speciesId) {
return c.json({
error: "Missing required query parameter: speciesId"
}, 400);
}
// Validate and limit page size to prevent excessive queries
const limitedPageSize = Math.min(Math.max(pageSize, 10), 500);
const offset = (page - 1) * limitedPageSize;
// Validate page number
if (page < 1) {
return c.json({
error: "Invalid page parameter: must be greater than 0"
}, 400);
}
// Connect to the database
const db = createDatabase(c.env);
// Build base filter condition - active files with selections of the specified species
// This forms the core of our WHERE clause for both queries
let baseCondition = sqlExpr`
${file.active} = true
AND ${location.datasetId} = ${datasetId}
AND ${label.speciesId} = ${speciesId}
AND ${label.active} = true
`;
// Add filters for day/night if specified
if (solarNight === 'true') {
baseCondition = sqlExpr`${baseCondition} AND ${file.maybeSolarNight} = true`;
} else if (solarNight === 'false') {
baseCondition = sqlExpr`${baseCondition} AND (${file.maybeSolarNight} = false OR ${file.maybeSolarNight} IS NULL)`;
}
if (civilNight === 'true') {
baseCondition = sqlExpr`${baseCondition} AND ${file.maybeCivilNight} = true`;
} else if (civilNight === 'false') {
baseCondition = sqlExpr`${baseCondition} AND (${file.maybeCivilNight} = false OR ${file.maybeCivilNight} IS NULL)`;
}
// Get total count for pagination using a more efficient COUNT(1)
const countResult = await db
.select({
count: sqlExpr<number>`COUNT(DISTINCT ${file.id})`
})
.from(file)
.innerJoin(cluster, eq(file.clusterId, cluster.id))
.innerJoin(location, eq(cluster.locationId, location.id))
.innerJoin(selection, eq(selection.fileId, file.id))
.innerJoin(label, eq(label.selectionId, selection.id))
.where(baseCondition);
const totalFiles = Number(countResult[0].count);
const totalPages = Math.ceil(totalFiles / limitedPageSize);
// Early return if there are no matching files
if (totalFiles === 0) {
return c.json({
data: [],
pagination: {
currentPage: page,
pageSize: limitedPageSize,
totalPages: 0,
totalItems: 0,
hasNextPage: false,
hasPreviousPage: false,
},
filters: {
datasetId,
speciesId,
solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,
civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,
}
});
}
// Fetch files with pagination
const filesResult = await db
.select({
id: file.id,
fileName: file.fileName,
path: file.path,
timestampLocal: file.timestampLocal,
duration: file.duration,
sampleRate: file.sampleRate,
locationId: file.locationId,
clusterId: file.clusterId,
description: file.description,
maybeSolarNight: file.maybeSolarNight,
maybeCivilNight: file.maybeCivilNight,
moonPhase: file.moonPhase,
})
.from(file)
.innerJoin(cluster, eq(file.clusterId, cluster.id))
.innerJoin(location, eq(cluster.locationId, location.id))
.innerJoin(selection, eq(selection.fileId, file.id))
.innerJoin(label, eq(label.selectionId, selection.id))
.where(baseCondition)
.orderBy(file.timestampLocal)
.groupBy(file.id, file.fileName, file.path, file.timestampLocal, file.duration,
file.sampleRate, file.locationId, file.clusterId, file.description, file.maybeSolarNight,
file.maybeCivilNight, file.moonPhase)
.limit(limitedPageSize)
.offset(offset);
// Get all file IDs for metadata queries
const fileIds = filesResult.map(f => f.id);
// Safety check - shouldn't happen with our early return, but just in case
if (fileIds.length === 0) {
return c.json({
data: [],
pagination: {
currentPage: page,
pageSize: limitedPageSize,
totalPages: totalPages,
totalItems: totalFiles,
hasNextPage: page < totalPages,
hasPreviousPage: page > 1,
},
filters: {
datasetId,
speciesId,
solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,
civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,
}
});
}
// Convert file IDs to a properly formatted SQL string
const fileIdsQuoted = fileIds.map(id => `'${id}'`).join(',');
// Execute metadata queries in parallel using Promise.all for better performance
const [metadataResults, mothMetadataResults, speciesResults] = await Promise.all([
// Fetch file metadata
db.select({
fileId: fileMetadata.fileId,
json: fileMetadata.json
})
.from(fileMetadata)
.where(sqlExpr`${fileMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),
// Fetch moth metadata
db.select({
fileId: mothMetadata.fileId,
gain: mothMetadata.gain,
batteryV: mothMetadata.batteryV,
tempC: mothMetadata.tempC
})
.from(mothMetadata)
.where(sqlExpr`${mothMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),
// Fetch species data - pre-filtered to just get the relevant species
db.select({
fileId: selection.fileId,
speciesId: species.id,
speciesLabel: species.label,
ebirdCode: species.ebirdCode,
description: species.description
})
.from(selection)
.innerJoin(label, eq(label.selectionId, selection.id))
.innerJoin(species, eq(species.id, label.speciesId))
.where(sqlExpr`
${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})
AND ${label.speciesId} = ${speciesId}
AND ${label.active} = true
`)
]);
// Process metadata results into maps for efficient lookups
const metadataMap = metadataResults.reduce((acc, item) => {
let processedJson = item.json;
try {
if (typeof item.json === 'string' && (item.json.startsWith('{') || item.json.startsWith('['))) {
processedJson = JSON.parse(item.json);
} else if (typeof item.json === 'string' && item.json.includes('\\"')) {
const unescaped = item.json.replace(/\\"/g, '"');
processedJson = JSON.parse(unescaped);
}
} catch (e) {
console.error("Error processing metadata JSON:", e);
processedJson = item.json;
}
acc[item.fileId] = processedJson;
return acc;
}, {} as Record<string, unknown>);
// Process moth metadata
const mothMetadataMap = mothMetadataResults.reduce((acc, item) => {
acc[item.fileId] = {
gain: item.gain,
batteryV: item.batteryV !== null ? Number(item.batteryV) : null,
tempC: item.tempC !== null ? Number(item.tempC) : null
};
return acc;
}, {} as Record<string, {
gain: string | null;
batteryV: number | null;
tempC: number | null;
}>);
// Process species data with Map for better performance
const speciesMap = new Map<string, Array<{
id: string;
label: string;
ebirdCode: string | null;
description: string | null;
}>>();
speciesResults.forEach(item => {
if (!speciesMap.has(item.fileId)) {
speciesMap.set(item.fileId, []);
}
// Get the current species array for this file
const fileSpecies = speciesMap.get(item.fileId)!;
// Check if we already have this species (deduplication)
const existingSpeciesIndex = fileSpecies.findIndex(s => s.id === item.speciesId);
if (existingSpeciesIndex === -1) {
// Add species if it doesn't already exist for this file
fileSpecies.push({
id: item.speciesId,
label: item.speciesLabel,
ebirdCode: item.ebirdCode,
description: item.description
});
}
});
// Combine file data with metadata in a single operation
const files = filesResult.map(fileData => ({
...fileData,
metadata: metadataMap[fileData.id] || null,
mothMetadata: mothMetadataMap[fileData.id] || null,
species: speciesMap.get(fileData.id) || []
}));
// Return paginated results with metadata and filters
return c.json({
data: files,
pagination: {
currentPage: page,
pageSize: limitedPageSize,
totalPages: totalPages,
totalItems: totalFiles,
hasNextPage: page < totalPages,
hasPreviousPage: page > 1,
},
filters: {
datasetId,
speciesId,
solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,
civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,
}
});
} catch (error) {
console.error("Error fetching files by dataset and species:", error);
return c.json(
{
error: "Failed to fetch files by dataset and species",
details: error instanceof Error ? error.message : String(error),
},
500
);
}
/**
* Protected API route to fetch files by cluster and species with pagination and filters
*
* @route GET /api/files
* @authentication Required
* @param {string} clusterId - Required query parameter specifying the cluster to fetch files from
* @param {number} [page=1] - Optional page number for pagination (starts at 1)
* @param {number} [pageSize=100] - Optional page size (10-500, defaults to 100)
* @param {string} [solarNight] - Optional filter: 'true', 'false', or omit for all
* @param {string} [civilNight] - Optional filter: 'true', 'false', or omit for all
* @param {string} [speciesId] - Optional filter to only include files with selections labeled with this species
* @returns {Object} Response containing:
* - data: Array of file objects with metadata, mothMetadata, and species found
* - pagination: Object with pagination metadata (currentPage, pageSize, totalPages, totalItems, etc.)
* - filters: Object showing applied filters
* @error 400 - If clusterId is missing or page is invalid
* @description Returns active files for the specified cluster with optional filters
* Each file includes:
* - Basic file information (id, fileName, path, duration, etc.)
* - metadata: Parsed JSON metadata from fileMetadata table
* - mothMetadata: Moth recorder metadata (gain, battery, temperature)
* - Species found in each file
*
* When speciesId is provided, only returns files that have at least one
* selection labeled with the specified species.
*/
// Large complex route - implementation extracted from original index.ts
return c.json({ error: "Not implemented yet" }, 501);
try {
// Get query parameters
const clusterId = c.req.query("clusterId");
const page = parseInt(c.req.query("page") || "1", 10);
const pageSize = parseInt(c.req.query("pageSize") || "100", 10);
const solarNight = c.req.query("solarNight");
const civilNight = c.req.query("civilNight");
const speciesId = c.req.query("speciesId"); // Optional filter for species
// Validate parameters
if (!clusterId) {
console.log("Missing clusterId in request");
return c.json({
error: "Missing required query parameter: clusterId"
}, 400);
}
// Validate and limit page size to prevent excessive queries
const limitedPageSize = Math.min(Math.max(pageSize, 10), 500);
const offset = (page - 1) * limitedPageSize;
// Validate page number
if (page < 1) {
console.log("Invalid page number in request:", page);
return c.json({
error: "Invalid page parameter: must be greater than 0"
}, 400);
}
// Connect to the database
const db = createDatabase(c.env);
// Build filter conditions
let whereConditions = sqlExpr`${file.clusterId} = ${clusterId} AND ${file.active} = true`;
// Add filters for solarNight if specified
if (solarNight === 'true') {
whereConditions = sqlExpr`${whereConditions} AND ${file.maybeSolarNight} = true`;
} else if (solarNight === 'false') {
whereConditions = sqlExpr`${whereConditions} AND (${file.maybeSolarNight} = false OR ${file.maybeSolarNight} IS NULL)`;
}
// Add filters for civilNight if specified
if (civilNight === 'true') {
whereConditions = sqlExpr`${whereConditions} AND ${file.maybeCivilNight} = true`;
} else if (civilNight === 'false') {
whereConditions = sqlExpr`${whereConditions} AND (${file.maybeCivilNight} = false OR ${file.maybeCivilNight} IS NULL)`;
}
// First, get the total count of files for pagination metadata
let countResult;
if (speciesId) {
// Count only files that have at least one selection labeled with the specified species
countResult = await db
.select({
count: sqlExpr<number>`COUNT(DISTINCT ${file.id})`
})
.from(file)
.innerJoin(selection, eq(selection.fileId, file.id))
.innerJoin(label, eq(label.selectionId, selection.id))
.where(sqlExpr`${whereConditions} AND ${label.speciesId} = ${speciesId} AND ${label.active} = true`);
} else {
// Standard count without species filter
countResult = await db
.select({
count: sqlExpr<number>`COUNT(1)`
})
.from(file)
.where(whereConditions);
}
const totalFiles = Number(countResult[0].count);
const totalPages = Math.ceil(totalFiles / limitedPageSize);
// Query files for the specified cluster with pagination
let filesResult;
if (speciesId) {
// Get only files that have at least one selection labeled with the specified species
filesResult = await db
.select({
id: file.id,
fileName: file.fileName,
path: file.path,
timestampLocal: file.timestampLocal,
duration: file.duration,
sampleRate: file.sampleRate,
locationId: file.locationId,
description: file.description,
maybeSolarNight: file.maybeSolarNight,
maybeCivilNight: file.maybeCivilNight,
moonPhase: file.moonPhase,
})
.from(file)
.innerJoin(selection, eq(selection.fileId, file.id))
.innerJoin(label, eq(label.selectionId, selection.id))
.where(sqlExpr`${whereConditions} AND ${label.speciesId} = ${speciesId} AND ${label.active} = true`)
.orderBy(file.timestampLocal)
.groupBy(file.id, file.fileName, file.path, file.timestampLocal, file.duration,
file.sampleRate, file.locationId, file.description, file.maybeSolarNight,
file.maybeCivilNight, file.moonPhase)
.limit(limitedPageSize)
.offset(offset);
} else {
// Standard query without species filter
filesResult = await db
.select({
id: file.id,
fileName: file.fileName,
path: file.path,
timestampLocal: file.timestampLocal,
duration: file.duration,
sampleRate: file.sampleRate,
locationId: file.locationId,
description: file.description,
maybeSolarNight: file.maybeSolarNight,
maybeCivilNight: file.maybeCivilNight,
moonPhase: file.moonPhase,
})
.from(file)
.where(whereConditions)
.orderBy(file.timestampLocal)
.limit(limitedPageSize)
.offset(offset);
}
// Early return if no files found
if (filesResult.length === 0) {
return c.json({
data: [],
pagination: {
currentPage: page,
pageSize: limitedPageSize,
totalPages: totalPages,
totalItems: totalFiles,
hasNextPage: page < totalPages,
hasPreviousPage: page > 1,
},
filters: {
solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,
civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,
speciesId: speciesId || null,
}
});
}
// Get all file IDs for metadata queries
const fileIds = filesResult.map(f => f.id);
// Convert array to proper SQL format for IN clause
const fileIdsQuoted = fileIds.map(id => `'${id}'`).join(',');
// Execute metadata queries in parallel using Promise.all
const [metadataResults, mothMetadataResults, speciesResults] = await Promise.all([
// Fetch file metadata
db.select({
fileId: fileMetadata.fileId,
json: fileMetadata.json
})
.from(fileMetadata)
.where(sqlExpr`${fileMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),
// Fetch moth metadata
db.select({
fileId: mothMetadata.fileId,
gain: mothMetadata.gain,
batteryV: mothMetadata.batteryV,
tempC: mothMetadata.tempC
})
.from(mothMetadata)
.where(sqlExpr`${mothMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),
// Fetch species data via selections and labels
db.select({
fileId: selection.fileId,
speciesId: species.id,
speciesLabel: species.label,
ebirdCode: species.ebirdCode,
description: species.description
})
.from(selection)
.innerJoin(label, eq(label.selectionId, selection.id))
.innerJoin(species, eq(species.id, label.speciesId))
.where(
speciesId
? sqlExpr`${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)}) AND ${label.active} = true AND ${label.speciesId} = ${speciesId}`
: sqlExpr`${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)}) AND ${label.active} = true`
)
]);
// Process metadata results
const metadataMap = metadataResults.reduce((acc, item) => {
let processedJson = item.json;
try {
if (typeof item.json === 'string' && (item.json.startsWith('{') || item.json.startsWith('['))) {
processedJson = JSON.parse(item.json);
} else if (typeof item.json === 'string' && item.json.includes('\\"')) {
const unescaped = item.json.replace(/\\"/g, '"');
processedJson = JSON.parse(unescaped);
}
} catch (e) {
console.error("Error processing metadata JSON:", e);
processedJson = item.json;
}
acc[item.fileId] = processedJson;
return acc;
}, {} as Record<string, unknown>);
// Process moth metadata
const mothMetadataMap = mothMetadataResults.reduce((acc, item) => {
acc[item.fileId] = {
gain: item.gain,
batteryV: item.batteryV !== null ? Number(item.batteryV) : null,
tempC: item.tempC !== null ? Number(item.tempC) : null
};
return acc;
}, {} as Record<string, {
gain: string | null;
batteryV: number | null;
tempC: number | null;
}>);
// Process species data
const speciesMap = speciesResults.reduce((acc, item) => {
if (!acc[item.fileId]) {
acc[item.fileId] = [];
}
// Deduplicate species entries
const existingSpecies = acc[item.fileId].find(s => s.id === item.speciesId);
if (!existingSpecies) {
acc[item.fileId].push({
id: item.speciesId,
label: item.speciesLabel,
ebirdCode: item.ebirdCode,
description: item.description
});
}
return acc;
}, {} as Record<string, Array<{
id: string;
label: string;
ebirdCode: string | null;
description: string | null;
}>>);
// Combine file data with metadata
const files = filesResult.map(fileData => ({
...fileData,
metadata: metadataMap[fileData.id] || null,
mothMetadata: mothMetadataMap[fileData.id] || null,
species: speciesMap[fileData.id] || []
}));
// Return paginated results with metadata and filters
return c.json({
data: files,
pagination: {
currentPage: page,
pageSize: limitedPageSize,
totalPages: totalPages,
totalItems: totalFiles,
hasNextPage: page < totalPages,
hasPreviousPage: page > 1,
},
filters: {
solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,
civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,
speciesId: speciesId || null,
}
});
} catch (error) {
console.error("Error fetching files:", error);
return c.json(
{
error: "Failed to fetch files",
details: error instanceof Error ? error.message : String(error),
},
500
);
}