OBXY6BHNROIV7W4O3MMQ6GTQXU2XUKF5A4DCJYDLUEUHV72E7BNAC PPUYJO5DER6JV6V4MVNMQ4O7L3R3XYVCJCDFSHPZOAPCHGBG4AQQC GIAIXNFCQIP5ZHBFXWFWDTLDUGV35MDC3ARQCVAN6FEWIX7Q32GQC GDEZHFJTAHWBJGQ3UD6DAZNWORGIR5RV6VM3IURGL3R7OGXSNFMAC YX7LU4WRAUDMWS3DEDXZDSF6DXBHLYDWVSMSRK6KIW3MO6GRXSVQC 2WKGHT2TVFMQT7VUL5OCYVNE365QOYNRXA34LOZ4DBFJ2ZVB4XQQC M3JUJ2WWZGCVMBITKRM5FUJMHFYL2QRMXJUVRUE4AC2RF74AOL5AC EUEH65HBT4XZXCNWECJXNDEQAWR2NLNSAXFPXLMQ27NOVMQBJT5QC QTSC7SK437F5SFMYSP74I6WRWU2KSKYQJNPCPSSXWRMIFZZPQ6WQC 4M3EBLTLSS2BRCM42ZP7WVD4YMRRLGV2P2XF47IAV5XHHJD52HTQC ROQGXQWL2V363K3W7TVVYKIAX4N4IWRERN5BJ7NYJRRVB6OMIJ4QC 7ESBJZLIH3TAERLH2HIZAAQHVNVHQWLWCOBLMJRFTIL33YITJNIAC LYPSC7BOH6T45FCPRHSCXILAJSJ74D5WSQTUIKPWD5ECXOYGUY5AC HVD2NGYM4J2PKQ72SFMXLBGQPKPCGDRB54IK3ISOQZDWKGM3WGQQC ZYT3JRERMYXLMJHLPZYQHAINVMPQLBKGGN7A4C7OTVZDY42ZLTKQC 4RBE543WLHA7PIYT4W7YEJPF6XKZ2UGKPJBQ3CTLJ44AOMGHCEYQC POIBWSL3JFHT2KN3STFSJX3INSYKEJTX6KSW3N7BVEKWX2GJ6T7QC OSNBT6AANZB3TF7HAJ35N3Z2EGDU5VQ4LGQORKMA25ACMNV35CQQC 7FCKFQUZIWQR7GYO7KNQDTZBM72PAE7EQBHSWM2J3MXBGDUVWQZAC J2RLNDEXTGAV4BB6ANIIR7XJLJBHSB4NFQWSBWHNAFB6DMLGS5RAC KGUU3ZMRY65ZN5G6QC7P7ORPAXXTIMTDBJ37IC6AOKMGQJQ7FVMQC RLH37YB4D7O42IFM2T7GJG4AVVAURWBZ7AOTHAWR7YJZRG3JOPLQC PVQBFR72OCQGYF2G2KDWNKBHWJ24N6D653X6KARBGUSYBIHIXPRQC 4A64WM7PDB5LRDEIRPMKE3SFOULYEHTMMJ3PC4NR2N34ZKRY4XYQC 36IKBCJKMV6NPBP2TYPZFE7IY22K33VTKETTFYLB7AN2Y7TI6ZAQC 6WNXYJBM63UZH5WW6S73IAKVJBMMUMITJHEHA5OZQXVRBLWUXSWQC HM75N4NTZ4BBSSDC7TUSYOQ4SIF3G6KPZA5QRYCVCVRSKQVTJAXAC 2SXAHYTJY4S3NBJVNGQZQOYACHU5MEPMV2HH3BGW4DTF42AQSBLAC JHFIJJSLVMQNYIDE6CXWUK5UTB7BTUSY7NCI33WKCWIRZHCSMBHQC OWHNUYOKSGQTG6ZSVQZ3DNMYCW3IOEMXGRRS5QRRDAPIG4MOODNAC U4CVCPSGPGYWJ4PA72HHHCHKJSQW3GU2QFK4YFSMKQOEM2MMY3PQC XTU6PGJEQEQ4WLCZDUQPDBB3H3JST4NO7QBPYJ3ZMKTZ4QH3U3OAC UCDTBEK3CF6YT2H6V57HI6FAFW44BIYYAK3Z2QJ5LJE7QWX7OEYAC import { drizzle } from "drizzle-orm/neon-http";import { neon } from "@neondatabase/serverless";import type { Env } from "../types";/*** Create a database connection using the DATABASE_URL from environment** @param env - Environment variables containing DATABASE_URL* @returns Drizzle database instance*/export function createDatabase(env: Env) {const sql = neon(env.DATABASE_URL);return drizzle(sql);}
/*** Environment variables required for the Worker** @interface Env* @description Defines the environment variables that must be configured in the Cloudflare Workers environment* - DATABASE_URL: Connection string for the Neon PostgreSQL database* - KINDE_ISSUER_URL: URL of the Kinde identity provider for JWT verification*/export type Env = {/** Connection string for the Neon PostgreSQL database */DATABASE_URL: string;/** URL of the Kinde identity provider (for JWT verification) */KINDE_ISSUER_URL: string;};/*** Structure of the JWT token payload from Kinde identity provider** @interface JWTPayload* @description Defines the structure of the JWT token payload used for authentication* Contains standard JWT claims (sub, iat, exp, aud, iss) plus any additional custom claims* Used for extracting user information after successful token verification*/export interface JWTPayload {sub: string; // Subject (user ID)iat: number; // Issued atexp: number; // Expiration timeaud: string[]; // Audienceiss: string; // Issuer[key: string]: unknown; // Additional claims}
import { Hono } from "hono";import { authenticate } from "../middleware/auth";import type { Env } from "../types";const species = new Hono<{ Bindings: Env }>();species.get("/", authenticate, async (c) => {return c.json({ error: "Not implemented yet" }, 501);});species.post("/", authenticate, async (c) => {return c.json({ error: "Not implemented yet" }, 501);});species.put("/:id", authenticate, async (c) => {return c.json({ error: "Not implemented yet" }, 501);});export default species;
import { Hono } from "hono";import { authenticate } from "../middleware/auth";import type { Env } from "../types";const selections = new Hono<{ Bindings: Env }>();selections.get("/", authenticate, async (c) => {return c.json({ error: "Not implemented yet" }, 501);});export default selections;
import { Hono } from "hono";import { eq, sql as sqlExpr } from "drizzle-orm";import { location } from "../../../db/schema";import { authenticate, checkUserPermission } from "../middleware/auth";import { createDatabase } from "../utils/database";import type { Env, JWTPayload } from "../types";const locations = new Hono<{ Bindings: Env }>();/*** Protected API route to fetch locations for a specific dataset** @route GET /api/locations* @authentication Required* @param {string} datasetId - Required query parameter specifying the dataset to fetch locations from* @param {number} [page=1] - Optional page number for pagination (starts at 1)* @param {number} [pageSize=100] - Optional page size (1-100, defaults to 100)* @returns {Object} Response containing:* - data: Array of location objects with id, name, latitude, longitude, description* - pagination: Object with pagination metadata (currentPage, pageSize, totalPages, totalItems, etc.)* @error 400 - If datasetId is missing or page is invalid* @description Returns active locations for the specified dataset with pagination support*/locations.get("/", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT // Subject claim usually contains the user ID// Get the dataset ID from query parameterconst datasetId = c.req.query("datasetId");const page = parseInt(c.req.query("page") || "1", 10);const pageSize = parseInt(c.req.query("pageSize") || "100", 10);console.log("Locations API called with datasetId:", datasetId, "userId:", userId, "page:", page);if (!datasetId) {console.log("Missing datasetId in request");return c.json({error: "Missing required query parameter: datasetId"}, 400);}// Validate page numberif (page < 1) {return c.json({error: "Invalid page parameter: must be greater than 0"}, 400);}// Limit page size between 1 and 100const limitedPageSize = Math.min(Math.max(pageSize, 1), 100);const offset = (page - 1) * limitedPageSize;// Connect to the databaseconst db = createDatabase(c.env);// First, get total count for paginationconsole.log("Counting locations for datasetId:", datasetId);const countResult = await db.select({count: sqlExpr<number>`COUNT(*)`}).from(location).where(sqlExpr`${location.datasetId} = ${datasetId} AND ${location.active} = true`);const totalLocations = Number(countResult[0].count);const totalPages = Math.ceil(totalLocations / limitedPageSize);// Query locations for the specified dataset with pagination (using 100 as default limit)console.log("Querying locations for datasetId:", datasetId, "page:", page);const results = await db.select({id: location.id,name: location.name,latitude: location.latitude,longitude: location.longitude,description: location.description,}).from(location).where(sqlExpr`${location.datasetId} = ${datasetId} AND ${location.active} = true`).orderBy(location.name).limit(limitedPageSize).offset(offset);console.log("Found", results.length, "locations for dataset", datasetId, "page:", page);return c.json({data: results,pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalLocations,hasNextPage: page < totalPages,hasPreviousPage: page > 1,}});} catch (error) {console.error("Error fetching locations:", error);return c.json({error: "Failed to fetch locations",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new location** @route POST /api/locations* @authentication Required* @param {Object} body - Location data including datasetId, name, description, latitude, longitude* @returns {Object} Response containing:* - data: Created location object* @description Creates a new location with the authenticated user as owner.* Requires EDIT permission on the dataset to create locations within it.*/locations.post("/", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst db = createDatabase(c.env);// Parse and validate request bodyconst body = await c.req.json();const { id, datasetId, name, description, latitude, longitude } = body;// Field validationif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!datasetId || typeof datasetId !== 'string') {return c.json({error: "Missing or invalid required field: datasetId"}, 400);}if (!name || typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Missing or invalid required field: name"}, 400);}// Length validationif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}// Validate latitude and longitude if providedif (latitude !== null && latitude !== undefined) {const lat = Number(latitude);if (isNaN(lat) || lat < -90 || lat > 90) {return c.json({error: "Field 'latitude' must be a valid number between -90 and 90"}, 400);}}if (longitude !== null && longitude !== undefined) {const lng = Number(longitude);if (isNaN(lng) || lng < -180 || lng > 180) {return c.json({error: "Field 'longitude' must be a valid number between -180 and 180"}, 400);}}// Check if user has EDIT permission on the datasetconst hasPermission = await checkUserPermission(db, userId, datasetId.trim(), 'EDIT');if (!hasPermission) {return c.json({error: "You don't have permission to create locations in this dataset"}, 403);}// Create location objectconst now = new Date();const newLocation = {id: id.trim(),datasetId: datasetId.trim(),name: name.trim(),description: description?.trim() || null,latitude: latitude !== null && latitude !== undefined ? String(Number(latitude)) : null,longitude: longitude !== null && longitude !== undefined ? String(Number(longitude)) : null,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,};// Insert the locationconst result = await db.insert(location).values(newLocation).returning({id: location.id,datasetId: location.datasetId,name: location.name,description: location.description,latitude: location.latitude,longitude: location.longitude,createdAt: location.createdAt,createdBy: location.createdBy,lastModified: location.lastModified,modifiedBy: location.modifiedBy,active: location.active,});console.log("Created location:", result[0].id, "for dataset:", datasetId, "by user:", userId);return c.json({data: result[0]}, 201);} catch (error) {console.error("Error creating location:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A location with this ID already exists"}, 400);}return c.json({error: "Failed to create location",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing location** @route PUT /api/locations/:id* @authentication Required* @param {string} id - Location ID in URL path* @param {Object} body - Location data to update (name, description, latitude, longitude, active)* @returns {Object} Response containing:* - data: Updated location object* @description Updates an existing location. Requires EDIT permission on the dataset.*/locations.put("/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get location ID from URL parametersconst locationId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { name, description, latitude, longitude, active } = body;// Connect to databaseconst db = createDatabase(c.env);// Check if location exists and get its dataset IDconst existingLocation = await db.select({id: location.id,datasetId: location.datasetId,active: location.active}).from(location).where(eq(location.id, locationId)).limit(1);if (existingLocation.length === 0) {return c.json({error: "Location not found"}, 404);}const locationRecord = existingLocation[0];// Check if user has EDIT permission on the datasetconst hasEditPermission = await checkUserPermission(db, userId, locationRecord.datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to edit this location"}, 403);}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add name if providedif (name !== undefined) {if (typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Field 'name' must be a non-empty string"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}updateData.name = name.trim();}// Validate and add description if providedif (description !== undefined) {if (description !== null && typeof description !== 'string') {return c.json({error: "Field 'description' must be a string or null"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}updateData.description = description?.trim() || null;}// Validate and add latitude if providedif (latitude !== undefined) {if (latitude !== null) {const lat = Number(latitude);if (isNaN(lat) || lat < -90 || lat > 90) {return c.json({error: "Field 'latitude' must be a valid number between -90 and 90"}, 400);}updateData.latitude = String(lat);} else {updateData.latitude = null;}}// Validate and add longitude if providedif (longitude !== undefined) {if (longitude !== null) {const lng = Number(longitude);if (isNaN(lng) || lng < -180 || lng > 180) {return c.json({error: "Field 'longitude' must be a valid number between -180 and 180"}, 400);}updateData.longitude = String(lng);} else {updateData.longitude = null;}}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;}// Update the locationconst result = await db.update(location).set(updateData).where(eq(location.id, locationId)).returning({id: location.id,datasetId: location.datasetId,name: location.name,description: location.description,latitude: location.latitude,longitude: location.longitude,createdAt: location.createdAt,createdBy: location.createdBy,lastModified: location.lastModified,modifiedBy: location.modifiedBy,active: location.active,});if (result.length === 0) {return c.json({error: "Failed to update location"}, 500);}console.log("Updated location:", result[0].id, "by user:", userId);return c.json({data: result[0]});} catch (error) {console.error("Error updating location:", error);return c.json({error: "Failed to update location",details: error instanceof Error ? error.message : String(error),},500);}});export default locations;
import { Hono } from "hono";import { authenticate } from "../middleware/auth";import type { Env } from "../types";const files = new Hono<{ Bindings: Env }>();files.get("/", authenticate, async (c) => {// Large complex route - implementation extracted from original index.tsreturn c.json({ error: "Not implemented yet" }, 501);});export default files;
import { Hono } from "hono";import { authenticate } from "../middleware/auth";import type { Env } from "../types";const ebird = new Hono<{ Bindings: Env }>();ebird.get("/search", authenticate, async (c) => {return c.json({ error: "Not implemented yet" }, 501);});export default ebird;
import { Hono } from "hono";import { eq, sql as sqlExpr } from "drizzle-orm";import {dataset,accessGrant,userRole} from "../../../db/schema";import { authenticate, checkUserPermission } from "../middleware/auth";import { createDatabase } from "../utils/database";import type { Env, JWTPayload } from "../types";const datasets = new Hono<{ Bindings: Env }>();/*** Protected API route to fetch datasets with role-based access control** @route GET /api/datasets* @authentication Required* @returns {Object} Response containing:* - data: Array of dataset objects with permissions for the authenticated user* - userId: The authenticated user's ID (for debugging)* @description Returns datasets the user has READ access to, along with their specific permissions* @example Response:* {* "data": [* {* "id": "123",* "name": "Bird Sounds 2024",* "description": "Costa Rica recordings",* "public": true,* "permissions": ["READ", "EDIT", "DELETE"]* }* ],* "userId": "user_123abc"* }*/datasets.get("/", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Connect to the databaseconst db = createDatabase(c.env);// First, get the user's roleconst userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';// Query to get datasets with permissions// This complex query gets datasets where the user has READ access either through:// 1. Direct user-specific grants// 2. Role-based grants for their role// 3. Being the owner of the datasetconst results = await db.select({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,createdAt: dataset.createdAt,owner: dataset.owner,type: dataset.type,permission: accessGrant.permission,userId: accessGrant.userId}).from(dataset).leftJoin(accessGrant,sqlExpr`${accessGrant.datasetId} = ${dataset.id} AND ${accessGrant.active} = true AND ((${accessGrant.userId} = ${userId}) OR(${accessGrant.userId} IS NULL AND ${accessGrant.role} = ${userRoleName}))`).where(sqlExpr`${dataset.active} = true AND (${dataset.owner} = ${userId} OR${accessGrant.permission} IS NOT NULL)`).orderBy(dataset.name);// Group results by dataset and collect permissionsconst datasetMap = new Map<string, {id: string;name: string;description: string | null;public: boolean;createdAt: string;owner: string;type: string;permissions: string[];}>();results.forEach(row => {if (!datasetMap.has(row.id)) {// If user is owner, they have all permissionsconst isOwner = row.owner === userId;const basePermissions = isOwner ? ['READ', 'UPLOAD', 'DOWNLOAD', 'EDIT', 'DELETE'] : [];datasetMap.set(row.id, {id: row.id,name: row.name,description: row.description,public: row.public ?? false,createdAt: row.createdAt?.toISOString() ?? new Date().toISOString(),owner: row.owner,type: row.type,permissions: basePermissions});}// Add permission from access grants (avoid duplicates)if (row.permission && !datasetMap.get(row.id)!.permissions.includes(row.permission)) {datasetMap.get(row.id)!.permissions.push(row.permission);}});// Filter datasets to only include those with READ permissionconst datasetsWithReadAccess = Array.from(datasetMap.values()).filter(dataset => dataset.permissions.includes('READ')).slice(0, 20); // Limit to 20 datasetsreturn c.json({data: datasetsWithReadAccess,userId: userId,userRole: userRoleName});} catch (error) {console.error("Error fetching datasets:", error);return c.json({error: "Failed to fetch datasets",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new dataset** @route POST /api/datasets* @authentication Required* @body {Object} Dataset creation payload:* - id: string (nanoid(12) - user generated)* - name: string (required, max 255 chars)* - description?: string (optional, max 255 chars)* - public?: boolean (optional, defaults to false)* - type?: string (optional, defaults to 'organise')* @returns {Object} Response containing:* - data: The created dataset object* @error 400 - If required fields are missing or invalid* @error 500 - If database operation fails* @description Creates a new dataset for the authenticated user* The user becomes the owner, creator, and modifier of the dataset*/datasets.post("/", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Connect to the database first to check permissionsconst db = createDatabase(c.env);// Check if user has permission to create datasets (ADMIN or CURATOR roles)const userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';if (userRoleName !== 'ADMIN' && userRoleName !== 'CURATOR') {return c.json({error: "You don't have permission to create datasets"}, 403);}// Parse request bodyconst body = await c.req.json();const { id, name, description, public: isPublic, type } = body;// Validate required fieldsif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!name || typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Missing or invalid required field: name"}, 400);}// Validate field lengthsif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (name.length > 255) {return c.json({error: "Field 'name' must be 255 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}// Validate type if providedconst validTypes = ['organise', 'test', 'train'];const datasetType = type || 'organise';if (!validTypes.includes(datasetType)) {return c.json({error: `Field 'type' must be one of: ${validTypes.join(', ')}`}, 400);}// Create the datasetconst now = new Date();const newDataset = {id: id.trim(),name: name.trim(),description: description?.trim() || null,public: Boolean(isPublic),type: datasetType,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,owner: userId,active: true,};// Insert the datasetconst result = await db.insert(dataset).values(newDataset).returning({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,type: dataset.type,createdAt: dataset.createdAt,owner: dataset.owner,});console.log("Created dataset:", result[0].id, "for user:", userId);return c.json({data: result[0]}, 201);} catch (error) {console.error("Error creating dataset:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A dataset with this ID already exists"}, 400);}return c.json({error: "Failed to create dataset",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing dataset** @route PUT /api/datasets/:id* @authentication Required* @param {string} id - Dataset ID in URL path* @body {Object} Dataset update payload:* - name?: string (optional, max 255 chars)* - description?: string (optional, max 255 chars)* - public?: boolean (optional)* - type?: string (optional)* - active?: boolean (optional, for soft delete)* @returns {Object} Response containing:* - data: The updated dataset object* @error 400 - If fields are invalid or dataset not found* @error 403 - If user doesn't own the dataset* @error 500 - If database operation fails* @description Updates an existing dataset owned by the authenticated user* Only the dataset owner can modify it*/datasets.put("/:id", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Get dataset ID from URL parametersconst datasetId = c.req.param("id");if (!datasetId) {return c.json({error: "Missing dataset ID in URL"}, 400);}// Parse request bodyconst body = await c.req.json();const { name, description, public: isPublic, type, active } = body;// Connect to the databaseconst db = createDatabase(c.env);// First, check if the dataset exists and if the user owns itconst existingDataset = await db.select({id: dataset.id,owner: dataset.owner,active: dataset.active}).from(dataset).where(eq(dataset.id, datasetId)).limit(1);if (existingDataset.length === 0) {return c.json({error: "Dataset not found"}, 404);}// Check if user has permission to edit this datasetconst hasEditPermission = await checkUserPermission(db, userId, datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to modify this dataset"}, 403);}// Validate fields if providedif (name !== undefined) {if (typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Invalid field: name must be a non-empty string"}, 400);}if (name.length > 255) {return c.json({error: "Field 'name' must be 255 characters or less"}, 400);}}if (description !== undefined && description !== null && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}if (type !== undefined) {const validTypes = ['organise', 'test', 'train'];if (!validTypes.includes(type)) {return c.json({error: `Field 'type' must be one of: ${validTypes.join(', ')}`}, 400);}}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};if (name !== undefined) {updateData.name = name.trim();}if (description !== undefined) {updateData.description = description?.trim() || null;}if (isPublic !== undefined) {updateData.public = Boolean(isPublic);}if (type !== undefined) {updateData.type = type;}if (active !== undefined) {updateData.active = Boolean(active);}// Update the datasetconst result = await db.update(dataset).set(updateData).where(eq(dataset.id, datasetId)).returning({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,type: dataset.type,createdAt: dataset.createdAt,lastModified: dataset.lastModified,owner: dataset.owner,active: dataset.active,});if (result.length === 0) {return c.json({error: "Failed to update dataset"}, 500);}console.log("Updated dataset:", result[0].id, "for user:", userId);return c.json({data: result[0]});} catch (error) {console.error("Error updating dataset:", error);return c.json({error: "Failed to update dataset",details: error instanceof Error ? error.message : String(error),},500);}});export default datasets;
import { Hono } from "hono";import { eq } from "drizzle-orm";import {cluster,cyclicRecordingPattern} from "../../../db/schema";import { authenticate } from "../middleware/auth";import { createDatabase } from "../utils/database";import type { Env } from "../types";const clusters = new Hono<{ Bindings: Env }>();clusters.get("/", authenticate, async (c) => {try {// Authentication handled by middlewareconst locationId = c.req.query("locationId");if (!locationId) {return c.json({error: "Missing required query parameter: locationId"}, 400);}const db = createDatabase(c.env);const joinedResults = await db.select({id: cluster.id,datasetId: cluster.datasetId,locationId: cluster.locationId,name: cluster.name,description: cluster.description,createdBy: cluster.createdBy,createdAt: cluster.createdAt,lastModified: cluster.lastModified,modifiedBy: cluster.modifiedBy,active: cluster.active,timezoneId: cluster.timezoneId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId,sampleRate: cluster.sampleRate,recordS: cyclicRecordingPattern.recordS,sleepS: cyclicRecordingPattern.sleepS}).from(cluster).leftJoin(cyclicRecordingPattern,eq(cluster.cyclicRecordingPatternId, cyclicRecordingPattern.id)).where(eq(cluster.locationId, locationId)).orderBy(cluster.name);const enrichedResults = joinedResults.map(row => ({id: row.id,datasetId: row.datasetId,locationId: row.locationId,name: row.name,description: row.description,createdBy: row.createdBy,createdAt: row.createdAt,lastModified: row.lastModified,modifiedBy: row.modifiedBy,active: row.active,timezoneId: row.timezoneId,cyclicRecordingPatternId: row.cyclicRecordingPatternId,sampleRate: row.sampleRate,recordingPattern: (row.recordS !== null && row.sleepS !== null) ? {recordS: row.recordS,sleepS: row.sleepS} : null}));return c.json({data: enrichedResults});} catch (error) {console.error("Error fetching clusters:", error);return c.json({error: "Failed to fetch clusters",details: error instanceof Error ? error.message : String(error),},500);}});// POST and PUT routes simplified for space - they follow similar patternsclusters.post("/", authenticate, async (c) => {// Implementation would go here - similar to locations patternreturn c.json({ error: "Not implemented yet" }, 501);});clusters.put("/:id", authenticate, async (c) => {// Implementation would go here - similar to locations patternreturn c.json({ error: "Not implemented yet" }, 501);});clusters.delete("/:id", authenticate, async (c) => {// Implementation would go here - similar to locations patternreturn c.json({ error: "Not implemented yet" }, 501);});export default clusters;
import { Hono } from "hono";import { authenticate } from "../middleware/auth";import type { Env } from "../types";const callTypes = new Hono<{ Bindings: Env }>();callTypes.post("/", authenticate, async (c) => {return c.json({ error: "Not implemented yet" }, 501);});callTypes.put("/:id", authenticate, async (c) => {return c.json({ error: "Not implemented yet" }, 501);});export default callTypes;
import { Context } from "hono";import { eq, sql as sqlExpr } from "drizzle-orm";import * as jose from 'jose';import { dataset, accessGrant, userRole } from "../../../db/schema";import type { Env, JWTPayload } from "../types";/*** Helper function to check if a user has a specific permission for a dataset** @param db - Database connection* @param userId - User ID to check permissions for* @param datasetId - Dataset ID to check permissions for* @param permission - Permission to check (READ, EDIT, DELETE, etc.)* @returns Promise<boolean> - True if user has permission, false otherwise*/export async function checkUserPermission(// eslint-disable-next-line @typescript-eslint/no-explicit-anydb: any,userId: string,datasetId: string,permission: string): Promise<boolean> {try {// First check if user is the owner (owners have all permissions)const ownerCheck = await db.select({ owner: dataset.owner }).from(dataset).where(eq(dataset.id, datasetId)).limit(1);if (ownerCheck.length > 0 && ownerCheck[0].owner === userId) {return true;}// Get user's roleconst userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';// Check access grantsconst grants = await db.select({ permission: accessGrant.permission }).from(accessGrant).where(sqlExpr`${accessGrant.datasetId} = ${datasetId} AND${accessGrant.permission} = ${permission} AND${accessGrant.active} = true AND((${accessGrant.userId} = ${userId}) OR(${accessGrant.userId} IS NULL AND ${accessGrant.role} = ${userRoleName}))`).limit(1);return grants.length > 0;} catch (error) {console.error('Error checking user permission:', error);return false;}}/*** Authentication middleware for protected routes** Verifies JWT tokens from the Kinde identity provider using JWKS.* On successful verification, adds the JWT payload to the context* for use in route handlers.** @param c - Hono context containing request, response, and environment bindings* @param next - Next middleware in the chain* @returns Response with 401 status on auth failure, or passes to next middleware*/export const authenticate = async (c: Context<{ Bindings: Env }>, next: () => Promise<void>): Promise<Response | void> => {// Get the authorization headerconst authHeader = c.req.header("Authorization");if (!authHeader || !authHeader.startsWith("Bearer ")) {return c.json({ error: "Unauthorized: Missing or invalid token" }, 401);}try {// Extract tokenconst token = authHeader.split(" ")[1];console.log("Received token:", token.substring(0, 10) + "...");// Get the JWKS endpoint from the Kinde issuerconst issuerUrl = c.env.KINDE_ISSUER_URL;const jwksUrl = `${issuerUrl}/.well-known/jwks.json`;console.log("JWKS URL:", jwksUrl);// Fetch the JWKSconsole.log("Fetching JWKS...");const jwksResponse = await fetch(jwksUrl);console.log("JWKS response status:", jwksResponse.status);if (!jwksResponse.ok) {throw new Error(`Failed to fetch JWKS: ${jwksResponse.status}`);}await jwksResponse.json(); // Parse JSON to validate responseconsole.log("JWKS keys received");// Create JWKS from the fetched keysconst keyStore = jose.createRemoteJWKSet(new URL(jwksUrl));console.log("Verifying token...");console.log("Using issuer:", issuerUrl);try {// Verify the token with issuer only (no audience validation)const { payload } = await jose.jwtVerify(token, keyStore, {issuer: issuerUrl,// No audience validation - Kinde tokens may have empty audience array});console.log("Token verified successfully!");// Store the payload in the context for use in the route handler// Use unknown-casting to work around Hono's type limitations(c as unknown as { jwtPayload: JWTPayload }).jwtPayload = payload as JWTPayload;} catch (verifyError) {console.error("Token verification failed:", verifyError);throw verifyError;}// If we get here, the token is validawait next();} catch (error) {console.error("JWT validation error:", error);return c.json({ error: "Unauthorized: Invalid token" }, 401);}};
/*** Worker API implementation for Skraak** This file implements the server-side API using Hono on Cloudflare Workers.* It handles:* - JWT-based authentication via Kinde identity provider* - API endpoints for retrieving acoustic datasets, locations, clusters* - File and selection management with filtering capabilities* - Species and call type information** The API connects to a Neon PostgreSQL database for data storage and retrieval.* All endpoints except the health check (/api) require authentication.*/import { Context, Hono } from "hono";import { drizzle } from "drizzle-orm/neon-http";import { eq, and, sql as sqlExpr } from "drizzle-orm";import { neon } from "@neondatabase/serverless";import {dataset,location,cluster,cyclicRecordingPattern,file,fileMetadata,mothMetadata,species,speciesDataset,callType,selection,label,accessGrant,userRole} from "../../db/schema";import * as jose from 'jose';import { nanoid } from 'nanoid';/*** Environment variables required for the Worker** @interface Env* @description Defines the environment variables that must be configured in the Cloudflare Workers environment* - DATABASE_URL: Connection string for the Neon PostgreSQL database* - KINDE_ISSUER_URL: URL of the Kinde identity provider for JWT verification*/export type Env = {/** Connection string for the Neon PostgreSQL database */DATABASE_URL: string;/** URL of the Kinde identity provider (for JWT verification) */KINDE_ISSUER_URL: string;};/*** Structure of the JWT token payload from Kinde identity provider** @interface JWTPayload* @description Defines the structure of the JWT token payload used for authentication* Contains standard JWT claims (sub, iat, exp, aud, iss) plus any additional custom claims* Used for extracting user information after successful token verification*/interface JWTPayload {sub: string; // Subject (user ID)iat: number; // Issued atexp: number; // Expiration timeaud: string[]; // Audienceiss: string; // Issuer[key: string]: unknown; // Additional claims}/*** Main Hono application instance with environment bindings** @type {Hono<{Bindings: Env}>}* @description The core Hono application instance that handles all API routes* Uses generic typing to bind the Env interface for environment variable access*/const app = new Hono<{ Bindings: Env }>();/*** Helper function to check if a user has a specific permission for a dataset** @param db - Database connection* @param userId - User ID to check permissions for* @param datasetId - Dataset ID to check permissions for* @param permission - Permission to check (READ, EDIT, DELETE, etc.)* @returns Promise<boolean> - True if user has permission, false otherwise*/async function checkUserPermission(// eslint-disable-next-line @typescript-eslint/no-explicit-anydb: any,userId: string,datasetId: string,permission: string): Promise<boolean> {try {// First check if user is the owner (owners have all permissions)const ownerCheck = await db.select({ owner: dataset.owner }).from(dataset).where(eq(dataset.id, datasetId)).limit(1);if (ownerCheck.length > 0 && ownerCheck[0].owner === userId) {return true;}// Get user's roleconst userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';// Check access grantsconst grants = await db.select({ permission: accessGrant.permission }).from(accessGrant).where(sqlExpr`${accessGrant.datasetId} = ${datasetId} AND${accessGrant.permission} = ${permission} AND${accessGrant.active} = true AND((${accessGrant.userId} = ${userId}) OR(${accessGrant.userId} IS NULL AND ${accessGrant.role} = ${userRoleName}))`).limit(1);return grants.length > 0;} catch (error) {console.error('Error checking user permission:', error);return false;}}/*** Public API health check endpoint** @route GET /api* @authentication None - publicly accessible* @returns {Object} Simple status object with "ok" status* @description Simple endpoint to verify API is running and responding* Can be used for health checks and monitoring*/app.get("/api", (c) => c.json({ status: "ok" }));/*** Authentication middleware for protected routes** Verifies JWT tokens from the Kinde identity provider using JWKS.* On successful verification, adds the JWT payload to the context* for use in route handlers.** @param c - Hono context containing request, response, and environment bindings* @param next - Next middleware in the chain* @returns Response with 401 status on auth failure, or passes to next middleware*/const authenticate = async (c: Context<{ Bindings: Env }>, next: () => Promise<void>): Promise<Response | void> => {// Get the authorization headerconst authHeader = c.req.header("Authorization");if (!authHeader || !authHeader.startsWith("Bearer ")) {return c.json({ error: "Unauthorized: Missing or invalid token" }, 401);}try {// Extract tokenconst token = authHeader.split(" ")[1];console.log("Received token:", token.substring(0, 10) + "...");// Get the JWKS endpoint from the Kinde issuerconst issuerUrl = c.env.KINDE_ISSUER_URL;const jwksUrl = `${issuerUrl}/.well-known/jwks.json`;console.log("JWKS URL:", jwksUrl);// Fetch the JWKSconsole.log("Fetching JWKS...");const jwksResponse = await fetch(jwksUrl);console.log("JWKS response status:", jwksResponse.status);if (!jwksResponse.ok) {throw new Error(`Failed to fetch JWKS: ${jwksResponse.status}`);}await jwksResponse.json(); // Parse JSON to validate responseconsole.log("JWKS keys received");// Create JWKS from the fetched keysconst keyStore = jose.createRemoteJWKSet(new URL(jwksUrl));console.log("Verifying token...");console.log("Using issuer:", issuerUrl);try {// Verify the token with issuer only (no audience validation)const { payload } = await jose.jwtVerify(token, keyStore, {issuer: issuerUrl,// No audience validation - Kinde tokens may have empty audience array});console.log("Token verified successfully!");// Store the payload in the context for use in the route handler// Use unknown-casting to work around Hono's type limitations(c as unknown as { jwtPayload: JWTPayload }).jwtPayload = payload as JWTPayload;} catch (verifyError) {console.error("Token verification failed:", verifyError);throw verifyError;}// If we get here, the token is validawait next();} catch (error) {console.error("JWT validation error:", error);return c.json({ error: "Unauthorized: Invalid token" }, 401);}};// ============================================================================// API ROUTES - Organized by resource// ============================================================================// ============================================================================// DATASETS// ============================================================================/*** Protected API route to fetch datasets with role-based access control** @route GET /api/datasets* @authentication Required* @returns {Object} Response containing:* - data: Array of dataset objects with permissions for the authenticated user* - userId: The authenticated user's ID (for debugging)* @description Returns datasets the user has READ access to, along with their specific permissions* @example Response:* {* "data": [* {* "id": "123",* "name": "Bird Sounds 2024",* "description": "Costa Rica recordings",* "public": true,* "permissions": ["READ", "EDIT", "DELETE"]* }* ],* "userId": "user_123abc"* }*/app.get("/api/datasets", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// First, get the user's roleconst userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';// Query to get datasets with permissions// This complex query gets datasets where the user has READ access either through:// 1. Direct user-specific grants// 2. Role-based grants for their role// 3. Being the owner of the datasetconst results = await db.select({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,createdAt: dataset.createdAt,owner: dataset.owner,type: dataset.type,permission: accessGrant.permission,userId: accessGrant.userId}).from(dataset).leftJoin(accessGrant,sqlExpr`${accessGrant.datasetId} = ${dataset.id} AND ${accessGrant.active} = true AND ((${accessGrant.userId} = ${userId}) OR(${accessGrant.userId} IS NULL AND ${accessGrant.role} = ${userRoleName}))`).where(sqlExpr`${dataset.active} = true AND (${dataset.owner} = ${userId} OR${accessGrant.permission} IS NOT NULL)`).orderBy(dataset.name);// Group results by dataset and collect permissionsconst datasetMap = new Map<string, {id: string;name: string;description: string | null;public: boolean;createdAt: string;owner: string;type: string;permissions: string[];}>();results.forEach(row => {if (!datasetMap.has(row.id)) {// If user is owner, they have all permissionsconst isOwner = row.owner === userId;const basePermissions = isOwner ? ['READ', 'UPLOAD', 'DOWNLOAD', 'EDIT', 'DELETE'] : [];datasetMap.set(row.id, {id: row.id,name: row.name,description: row.description,public: row.public ?? false,createdAt: row.createdAt?.toISOString() ?? new Date().toISOString(),owner: row.owner,type: row.type,permissions: basePermissions});}// Add permission from access grants (avoid duplicates)if (row.permission && !datasetMap.get(row.id)!.permissions.includes(row.permission)) {datasetMap.get(row.id)!.permissions.push(row.permission);}});// Filter datasets to only include those with READ permissionconst datasetsWithReadAccess = Array.from(datasetMap.values()).filter(dataset => dataset.permissions.includes('READ')).slice(0, 20); // Limit to 20 datasetsreturn c.json({data: datasetsWithReadAccess,userId: userId,userRole: userRoleName});} catch (error) {console.error("Error fetching datasets:", error);return c.json({error: "Failed to fetch datasets",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new dataset** @route POST /api/datasets* @authentication Required* @body {Object} Dataset creation payload:* - id: string (nanoid(12) - user generated)* - name: string (required, max 255 chars)* - description?: string (optional, max 255 chars)* - public?: boolean (optional, defaults to false)* - type?: string (optional, defaults to 'organise')* @returns {Object} Response containing:* - data: The created dataset object* @error 400 - If required fields are missing or invalid* @error 500 - If database operation fails* @description Creates a new dataset for the authenticated user* The user becomes the owner, creator, and modifier of the dataset*/app.post("/api/datasets", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Connect to the database first to check permissionsconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if user has permission to create datasets (ADMIN or CURATOR roles)const userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';if (userRoleName !== 'ADMIN' && userRoleName !== 'CURATOR') {return c.json({error: "You don't have permission to create datasets"}, 403);}// Parse request bodyconst body = await c.req.json();const { id, name, description, public: isPublic, type } = body;// Validate required fieldsif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!name || typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Missing or invalid required field: name"}, 400);}// Validate field lengthsif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (name.length > 255) {return c.json({error: "Field 'name' must be 255 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}// Validate type if providedconst validTypes = ['organise', 'test', 'train'];const datasetType = type || 'organise';if (!validTypes.includes(datasetType)) {return c.json({error: `Field 'type' must be one of: ${validTypes.join(', ')}`}, 400);}// Create the datasetconst now = new Date();const newDataset = {id: id.trim(),name: name.trim(),description: description?.trim() || null,public: Boolean(isPublic),type: datasetType,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,owner: userId,active: true,};// Insert the datasetconst result = await db.insert(dataset).values(newDataset).returning({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,type: dataset.type,createdAt: dataset.createdAt,owner: dataset.owner,});console.log("Created dataset:", result[0].id, "for user:", userId);return c.json({data: result[0]}, 201);} catch (error) {console.error("Error creating dataset:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A dataset with this ID already exists"}, 400);}return c.json({error: "Failed to create dataset",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing dataset** @route PUT /api/datasets/:id* @authentication Required* @param {string} id - Dataset ID in URL path* @body {Object} Dataset update payload:* - name?: string (optional, max 255 chars)* - description?: string (optional, max 255 chars)* - public?: boolean (optional)* - type?: string (optional)* - active?: boolean (optional, for soft delete)* @returns {Object} Response containing:* - data: The updated dataset object* @error 400 - If fields are invalid or dataset not found* @error 403 - If user doesn't own the dataset* @error 500 - If database operation fails* @description Updates an existing dataset owned by the authenticated user* Only the dataset owner can modify it*/app.put("/api/datasets/:id", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Get dataset ID from URL parametersconst datasetId = c.req.param("id");if (!datasetId) {return c.json({error: "Missing dataset ID in URL"}, 400);}// Parse request bodyconst body = await c.req.json();const { name, description, public: isPublic, type, active } = body;// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// First, check if the dataset exists and if the user owns itconst existingDataset = await db.select({id: dataset.id,owner: dataset.owner,active: dataset.active}).from(dataset).where(eq(dataset.id, datasetId)).limit(1);if (existingDataset.length === 0) {return c.json({error: "Dataset not found"}, 404);}// Check if user has permission to edit this datasetconst hasEditPermission = await checkUserPermission(db, userId, datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to modify this dataset"}, 403);}// Validate fields if providedif (name !== undefined) {if (typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Invalid field: name must be a non-empty string"}, 400);}if (name.length > 255) {return c.json({error: "Field 'name' must be 255 characters or less"}, 400);}}if (description !== undefined && description !== null && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}if (type !== undefined) {const validTypes = ['organise', 'test', 'train'];if (!validTypes.includes(type)) {return c.json({error: `Field 'type' must be one of: ${validTypes.join(', ')}`}, 400);}}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};if (name !== undefined) {updateData.name = name.trim();}if (description !== undefined) {updateData.description = description?.trim() || null;}if (isPublic !== undefined) {updateData.public = Boolean(isPublic);}if (type !== undefined) {updateData.type = type;}if (active !== undefined) {updateData.active = Boolean(active);}// Update the datasetconst result = await db.update(dataset).set(updateData).where(eq(dataset.id, datasetId)).returning({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,type: dataset.type,createdAt: dataset.createdAt,lastModified: dataset.lastModified,owner: dataset.owner,active: dataset.active,});if (result.length === 0) {return c.json({error: "Failed to update dataset"}, 500);}console.log("Updated dataset:", result[0].id, "for user:", userId);return c.json({data: result[0]});} catch (error) {console.error("Error updating dataset:", error);return c.json({error: "Failed to update dataset",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// LOCATIONS// ============================================================================/*** Protected API route to fetch locations for a specific dataset** @route GET /api/locations* @authentication Required* @param {string} datasetId - Required query parameter specifying the dataset to fetch locations from* @param {number} [page=1] - Optional page number for pagination (starts at 1)* @param {number} [pageSize=100] - Optional page size (1-100, defaults to 100)* @returns {Object} Response containing:* - data: Array of location objects with id, name, latitude, longitude, description* - pagination: Object with pagination metadata (currentPage, pageSize, totalPages, totalItems, etc.)* @error 400 - If datasetId is missing or page is invalid* @description Returns active locations for the specified dataset with pagination support*/app.get("/api/locations", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT // Subject claim usually contains the user ID// Get the dataset ID from query parameterconst datasetId = c.req.query("datasetId");const page = parseInt(c.req.query("page") || "1", 10);const pageSize = parseInt(c.req.query("pageSize") || "100", 10);console.log("Locations API called with datasetId:", datasetId, "userId:", userId, "page:", page);if (!datasetId) {console.log("Missing datasetId in request");return c.json({error: "Missing required query parameter: datasetId"}, 400);}// Validate page numberif (page < 1) {return c.json({error: "Invalid page parameter: must be greater than 0"}, 400);}// Limit page size between 1 and 100const limitedPageSize = Math.min(Math.max(pageSize, 1), 100);const offset = (page - 1) * limitedPageSize;// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// First, get total count for paginationconsole.log("Counting locations for datasetId:", datasetId);const countResult = await db.select({count: sqlExpr<number>`COUNT(*)`}).from(location).where(sqlExpr`${location.datasetId} = ${datasetId} AND ${location.active} = true`);const totalLocations = Number(countResult[0].count);const totalPages = Math.ceil(totalLocations / limitedPageSize);// Query locations for the specified dataset with pagination (using 100 as default limit)console.log("Querying locations for datasetId:", datasetId, "page:", page);const results = await db.select({id: location.id,name: location.name,latitude: location.latitude,longitude: location.longitude,description: location.description,}).from(location).where(sqlExpr`${location.datasetId} = ${datasetId} AND ${location.active} = true`).orderBy(location.name).limit(limitedPageSize).offset(offset);console.log("Found", results.length, "locations for dataset", datasetId, "page:", page);return c.json({data: results,pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalLocations,hasNextPage: page < totalPages,hasPreviousPage: page > 1,}});} catch (error) {console.error("Error fetching locations:", error);return c.json({error: "Failed to fetch locations",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new location** @route POST /api/locations* @authentication Required* @param {Object} body - Location data including datasetId, name, description, latitude, longitude* @returns {Object} Response containing:* - data: Created location object* @description Creates a new location with the authenticated user as owner.* Requires EDIT permission on the dataset to create locations within it.*/app.post("/api/locations", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Parse and validate request bodyconst body = await c.req.json();const { id, datasetId, name, description, latitude, longitude } = body;// Field validationif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!datasetId || typeof datasetId !== 'string') {return c.json({error: "Missing or invalid required field: datasetId"}, 400);}if (!name || typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Missing or invalid required field: name"}, 400);}// Length validationif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}// Validate latitude and longitude if providedif (latitude !== null && latitude !== undefined) {const lat = Number(latitude);if (isNaN(lat) || lat < -90 || lat > 90) {return c.json({error: "Field 'latitude' must be a valid number between -90 and 90"}, 400);}}if (longitude !== null && longitude !== undefined) {const lng = Number(longitude);if (isNaN(lng) || lng < -180 || lng > 180) {return c.json({error: "Field 'longitude' must be a valid number between -180 and 180"}, 400);}}// Check if user has EDIT permission on the datasetconst hasPermission = await checkUserPermission(db, userId, datasetId.trim(), 'EDIT');if (!hasPermission) {return c.json({error: "You don't have permission to create locations in this dataset"}, 403);}// Create location objectconst now = new Date();const newLocation = {id: id.trim(),datasetId: datasetId.trim(),name: name.trim(),description: description?.trim() || null,latitude: latitude !== null && latitude !== undefined ? String(Number(latitude)) : null,longitude: longitude !== null && longitude !== undefined ? String(Number(longitude)) : null,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,};// Insert the locationconst result = await db.insert(location).values(newLocation).returning({id: location.id,datasetId: location.datasetId,name: location.name,description: location.description,latitude: location.latitude,longitude: location.longitude,createdAt: location.createdAt,createdBy: location.createdBy,lastModified: location.lastModified,modifiedBy: location.modifiedBy,active: location.active,});console.log("Created location:", result[0].id, "for dataset:", datasetId, "by user:", userId);return c.json({data: result[0]}, 201);} catch (error) {console.error("Error creating location:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A location with this ID already exists"}, 400);}return c.json({error: "Failed to create location",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing location** @route PUT /api/locations/:id* @authentication Required* @param {string} id - Location ID in URL path* @param {Object} body - Location data to update (name, description, latitude, longitude, active)* @returns {Object} Response containing:* - data: Updated location object* @description Updates an existing location. Requires EDIT permission on the dataset.*/app.put("/api/locations/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get location ID from URL parametersconst locationId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { name, description, latitude, longitude, active } = body;// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if location exists and get its dataset IDconst existingLocation = await db.select({id: location.id,datasetId: location.datasetId,active: location.active}).from(location).where(eq(location.id, locationId)).limit(1);if (existingLocation.length === 0) {return c.json({error: "Location not found"}, 404);}const locationRecord = existingLocation[0];// Check if user has EDIT permission on the datasetconst hasEditPermission = await checkUserPermission(db, userId, locationRecord.datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to edit this location"}, 403);}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add name if providedif (name !== undefined) {if (typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Field 'name' must be a non-empty string"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}updateData.name = name.trim();}// Validate and add description if providedif (description !== undefined) {if (description !== null && typeof description !== 'string') {return c.json({error: "Field 'description' must be a string or null"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}updateData.description = description?.trim() || null;}// Validate and add latitude if providedif (latitude !== undefined) {if (latitude !== null) {const lat = Number(latitude);if (isNaN(lat) || lat < -90 || lat > 90) {return c.json({error: "Field 'latitude' must be a valid number between -90 and 90"}, 400);}updateData.latitude = String(lat);} else {updateData.latitude = null;}}// Validate and add longitude if providedif (longitude !== undefined) {if (longitude !== null) {const lng = Number(longitude);if (isNaN(lng) || lng < -180 || lng > 180) {return c.json({error: "Field 'longitude' must be a valid number between -180 and 180"}, 400);}updateData.longitude = String(lng);} else {updateData.longitude = null;}}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;}// Update the locationconst result = await db.update(location).set(updateData).where(eq(location.id, locationId)).returning({id: location.id,datasetId: location.datasetId,name: location.name,description: location.description,latitude: location.latitude,longitude: location.longitude,createdAt: location.createdAt,createdBy: location.createdBy,lastModified: location.lastModified,modifiedBy: location.modifiedBy,active: location.active,});if (result.length === 0) {return c.json({error: "Failed to update location"}, 500);}console.log("Updated location:", result[0].id, "by user:", userId);return c.json({data: result[0]});} catch (error) {console.error("Error updating location:", error);return c.json({error: "Failed to update location",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// CLUSTERS// ============================================================================/*** Protected API route to fetch clusters for a specific location** @route GET /api/clusters* @authentication Required* @param {string} locationId - Required query parameter specifying the location to fetch clusters from* @returns {Object} Response containing:* - data: Array of cluster objects with recording pattern information* @error 400 - If locationId is missing* @description Returns clusters for the specified location along with their recording patterns* Performs a LEFT JOIN with the cyclicRecordingPattern table to include recording duration data* Results are returned with recording pattern information embedded in each cluster object*/app.get("/api/clusters", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT // Subject claim usually contains the user ID// Get the location ID from query parameterconst locationId = c.req.query("locationId");console.log("Cluster API called with locationId:", locationId, "userId:", userId);if (!locationId) {console.log("Missing locationId in request");return c.json({error: "Missing required query parameter: locationId"}, 400);}// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Query clusters with a LEFT JOIN on recording patterns to avoid N+1 queriesconsole.log("Querying clusters for locationId:", locationId);const joinedResults = await db.select({// Cluster fieldsid: cluster.id,datasetId: cluster.datasetId,locationId: cluster.locationId,name: cluster.name,description: cluster.description,createdBy: cluster.createdBy,createdAt: cluster.createdAt,lastModified: cluster.lastModified,modifiedBy: cluster.modifiedBy,active: cluster.active,timezoneId: cluster.timezoneId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId,sampleRate: cluster.sampleRate,// Recording pattern fieldsrecordS: cyclicRecordingPattern.recordS,sleepS: cyclicRecordingPattern.sleepS}).from(cluster).leftJoin(cyclicRecordingPattern,eq(cluster.cyclicRecordingPatternId, cyclicRecordingPattern.id)).where(eq(cluster.locationId, locationId)).orderBy(cluster.name);console.log("Found", joinedResults.length, "clusters for location", locationId);// Transform results to match the expected structureconst enrichedResults = joinedResults.map(row => ({id: row.id,datasetId: row.datasetId,locationId: row.locationId,name: row.name,description: row.description,createdBy: row.createdBy,createdAt: row.createdAt,lastModified: row.lastModified,modifiedBy: row.modifiedBy,active: row.active,timezoneId: row.timezoneId,cyclicRecordingPatternId: row.cyclicRecordingPatternId,sampleRate: row.sampleRate,recordingPattern: (row.recordS !== null && row.sleepS !== null) ? {recordS: row.recordS,sleepS: row.sleepS} : null}));return c.json({data: enrichedResults});} catch (error) {console.error("Error fetching clusters:", error);return c.json({error: "Failed to fetch clusters",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new cluster** @route POST /api/clusters* @authentication Required* @param {Object} body - Cluster data including datasetId, locationId, name, description, timezoneId, sampleRate, recordingPattern* - recordingPattern?: Object (optional) - { recordS: number, sleepS: number }* @returns {Object} Response containing:* - data: Created cluster object with optional recordingPattern* @description Creates a new cluster with the authenticated user as owner.* Requires EDIT permission on the dataset to create clusters within it.* If recordingPattern is provided, creates a new cyclic recording pattern record.*/app.post("/api/clusters", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Parse and validate request bodyconst body = await c.req.json();const { id, datasetId, locationId, name, description, timezoneId, sampleRate, recordingPattern } = body;// Field validationif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!datasetId || typeof datasetId !== 'string') {return c.json({error: "Missing or invalid required field: datasetId"}, 400);}if (!locationId || typeof locationId !== 'string') {return c.json({error: "Missing or invalid required field: locationId"}, 400);}if (!name || typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Missing or invalid required field: name"}, 400);}if (!sampleRate || typeof sampleRate !== 'number' || sampleRate <= 0) {return c.json({error: "Missing or invalid required field: sampleRate"}, 400);}// Length validationif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}if (timezoneId && timezoneId.length > 40) {return c.json({error: "Field 'timezoneId' must be 40 characters or less"}, 400);}// Validate recording pattern if providedif (recordingPattern) {if (typeof recordingPattern !== 'object' || recordingPattern === null) {return c.json({error: "Field 'recordingPattern' must be an object"}, 400);}const { recordS, sleepS } = recordingPattern;if (typeof recordS !== 'number' || recordS <= 0 || !Number.isInteger(recordS)) {return c.json({error: "Field 'recordingPattern.recordS' must be a positive integer"}, 400);}if (typeof sleepS !== 'number' || sleepS <= 0 || !Number.isInteger(sleepS)) {return c.json({error: "Field 'recordingPattern.sleepS' must be a positive integer"}, 400);}}// Check if user has EDIT permission on the datasetconst hasPermission = await checkUserPermission(db, userId, datasetId.trim(), 'EDIT');if (!hasPermission) {return c.json({error: "You don't have permission to create clusters in this dataset"}, 403);}// Create cluster with optional recording pattern in transactionconst result = await db.transaction(async (tx) => {const now = new Date();let cyclicRecordingPatternId = null;// Create cyclic recording pattern if providedif (recordingPattern) {const patternId = nanoid(12);const [createdPattern] = await tx.insert(cyclicRecordingPattern).values({id: patternId,recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,}).returning({id: cyclicRecordingPattern.id,});cyclicRecordingPatternId = createdPattern.id;}// Create cluster objectconst newCluster = {id: id.trim(),datasetId: datasetId.trim(),locationId: locationId.trim(),name: name.trim(),description: description?.trim() || null,timezoneId: timezoneId?.trim() || null,cyclicRecordingPatternId: cyclicRecordingPatternId,sampleRate: sampleRate,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,};// Insert the clusterconst [createdCluster] = await tx.insert(cluster).values(newCluster).returning({id: cluster.id,datasetId: cluster.datasetId,locationId: cluster.locationId,name: cluster.name,description: cluster.description,timezoneId: cluster.timezoneId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId,sampleRate: cluster.sampleRate,createdAt: cluster.createdAt,createdBy: cluster.createdBy,lastModified: cluster.lastModified,modifiedBy: cluster.modifiedBy,active: cluster.active,});// If recording pattern was created, include it in the responseconst recordingPatternData = recordingPattern ? {recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS} : null;return {...createdCluster,recordingPattern: recordingPatternData};});console.log("Created cluster:", result.id, "for location:", locationId, "by user:", userId);return c.json({data: result}, 201);} catch (error) {console.error("Error creating cluster:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A cluster with this ID already exists"}, 400);}return c.json({error: "Failed to create cluster",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing cluster** @route PUT /api/clusters/:id* @authentication Required* @param {string} id - Cluster ID in URL path* @param {Object} body - Cluster data to update (name, description, timezoneId, sampleRate, active, recordingPattern)* - recordingPattern?: Object | null (optional) - { recordS: number, sleepS: number } or null to remove* @returns {Object} Response containing:* - data: Updated cluster object with recordingPattern if present* @description Updates an existing cluster. Requires EDIT permission on the dataset.* recordingPattern can be provided to add/update or set to null to remove recording pattern.*/app.put("/api/clusters/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get cluster ID from URL parametersconst clusterId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { name, description, timezoneId, sampleRate, active, recordingPattern } = body;// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if cluster exists and get its dataset ID and current recording patternconst existingCluster = await db.select({id: cluster.id,datasetId: cluster.datasetId,active: cluster.active,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId}).from(cluster).where(eq(cluster.id, clusterId)).limit(1);if (existingCluster.length === 0) {return c.json({error: "Cluster not found"}, 404);}const clusterRecord = existingCluster[0];// Check if user has EDIT permission on the datasetconst hasEditPermission = await checkUserPermission(db, userId, clusterRecord.datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to edit this cluster"}, 403);}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add name if providedif (name !== undefined) {if (typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Field 'name' must be a non-empty string"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}updateData.name = name.trim();}// Validate and add description if providedif (description !== undefined) {if (description !== null && typeof description !== 'string') {return c.json({error: "Field 'description' must be a string or null"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}updateData.description = description?.trim() || null;}// Validate and add timezoneId if providedif (timezoneId !== undefined) {if (timezoneId !== null && typeof timezoneId !== 'string') {return c.json({error: "Field 'timezoneId' must be a string or null"}, 400);}if (timezoneId && timezoneId.length > 40) {return c.json({error: "Field 'timezoneId' must be 40 characters or less"}, 400);}updateData.timezoneId = timezoneId?.trim() || null;}// Validate and add sampleRate if providedif (sampleRate !== undefined) {if (typeof sampleRate !== 'number' || sampleRate <= 0) {return c.json({error: "Field 'sampleRate' must be a positive number"}, 400);}updateData.sampleRate = sampleRate;}// Validate recording pattern if providedif (recordingPattern !== undefined) {if (recordingPattern !== null && typeof recordingPattern !== 'object') {return c.json({error: "Field 'recordingPattern' must be an object or null"}, 400);}if (recordingPattern) {const { recordS, sleepS } = recordingPattern;if (typeof recordS !== 'number' || recordS <= 0 || !Number.isInteger(recordS)) {return c.json({error: "Field 'recordingPattern.recordS' must be a positive integer"}, 400);}if (typeof sleepS !== 'number' || sleepS <= 0 || !Number.isInteger(sleepS)) {return c.json({error: "Field 'recordingPattern.sleepS' must be a positive integer"}, 400);}}}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;}// Update cluster with optional recording pattern changes in transactionconst result = await db.transaction(async (tx) => {const currentPatternId = clusterRecord.cyclicRecordingPatternId;// Handle recording pattern updatesif (recordingPattern !== undefined) {if (recordingPattern === null) {// Remove recording patternif (currentPatternId) {await tx.delete(cyclicRecordingPattern).where(eq(cyclicRecordingPattern.id, currentPatternId));updateData.cyclicRecordingPatternId = null;}} else {// Add or update recording patternif (currentPatternId) {// Update existing patternawait tx.update(cyclicRecordingPattern).set({recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS,lastModified: new Date(),modifiedBy: userId,}).where(eq(cyclicRecordingPattern.id, currentPatternId));} else {// Create new patternconst patternId = nanoid(12);await tx.insert(cyclicRecordingPattern).values({id: patternId,recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS,createdBy: userId,createdAt: new Date(),lastModified: new Date(),modifiedBy: userId,});updateData.cyclicRecordingPatternId = patternId;}}}// Update the clusterconst [updatedCluster] = await tx.update(cluster).set(updateData).where(eq(cluster.id, clusterId)).returning({id: cluster.id,datasetId: cluster.datasetId,locationId: cluster.locationId,name: cluster.name,description: cluster.description,timezoneId: cluster.timezoneId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId,sampleRate: cluster.sampleRate,createdAt: cluster.createdAt,createdBy: cluster.createdBy,lastModified: cluster.lastModified,modifiedBy: cluster.modifiedBy,active: cluster.active,});// Include recording pattern data in response if it existslet recordingPatternData = null;if (updatedCluster.cyclicRecordingPatternId) {if (recordingPattern !== undefined && recordingPattern !== null) {// Use the provided pattern datarecordingPatternData = {recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS};} else {// Fetch existing pattern dataconst existingPattern = await tx.select({recordS: cyclicRecordingPattern.recordS,sleepS: cyclicRecordingPattern.sleepS}).from(cyclicRecordingPattern).where(eq(cyclicRecordingPattern.id, updatedCluster.cyclicRecordingPatternId)).limit(1);if (existingPattern.length > 0) {recordingPatternData = existingPattern[0];}}}return {...updatedCluster,recordingPattern: recordingPatternData};});if (!result) {return c.json({error: "Failed to update cluster"}, 500);}console.log("Updated cluster:", result.id, "by user:", userId);return c.json({data: result});} catch (error) {console.error("Error updating cluster:", error);return c.json({error: "Failed to update cluster",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to delete a cluster** @route DELETE /api/clusters/:id* @authentication Required* @param {string} id - Cluster ID in URL path* @returns {Object} Response containing:* - message: Success message* @description Soft deletes a cluster (sets active=false) and hard deletes its associated cyclic recording pattern if present.* Requires EDIT permission on the dataset.*/app.delete("/api/clusters/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get cluster ID from URL parametersconst clusterId = c.req.param("id");// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if cluster exists and get its dataset ID and recording patternconst existingCluster = await db.select({id: cluster.id,datasetId: cluster.datasetId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId}).from(cluster).where(eq(cluster.id, clusterId)).limit(1);if (existingCluster.length === 0) {return c.json({error: "Cluster not found"}, 404);}const clusterRecord = existingCluster[0];// Check if user has EDIT permission on the datasetconst hasEditPermission = await checkUserPermission(db, userId, clusterRecord.datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to delete this cluster"}, 403);}// Soft delete cluster and hard delete associated recording pattern in transactionawait db.transaction(async (tx) => {// Soft delete the cluster (set active = false)await tx.update(cluster).set({active: false,lastModified: new Date(),modifiedBy: userId}).where(eq(cluster.id, clusterId));// Hard delete associated cyclic recording pattern if it exists (no active field in this table)if (clusterRecord.cyclicRecordingPatternId) {await tx.delete(cyclicRecordingPattern).where(eq(cyclicRecordingPattern.id, clusterRecord.cyclicRecordingPatternId));}});console.log("Soft deleted cluster:", clusterId, "and hard deleted associated recording pattern by user:", userId);return c.json({message: "Cluster deleted successfully"});} catch (error) {console.error("Error deleting cluster:", error);return c.json({error: "Failed to delete cluster",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// FILES// ============================================================================/*** Protected API route to fetch audio files for a specific cluster** @route GET /api/files* @authentication Required* @param {string} clusterId - Required query parameter specifying the cluster to fetch files from* @param {number} [page=1] - Optional page number for pagination (starts at 1)* @param {number} [pageSize=100] - Optional page size (10-500, defaults to 100)* @param {string} [solarNight] - Optional filter for files recorded during solar night ('true'/'false')* @param {string} [civilNight] - Optional filter for files recorded during civil night ('true'/'false')* @param {string} [speciesId] - Optional filter for files with selections labeled with specific species* @returns {Object} Response containing:* - data: Array of file objects with metadata, mothMetadata, and species information* - pagination: Object with pagination metadata* - filters: Object showing the filters that were applied* @error 400 - If clusterId is missing or page is invalid* @description Returns audio files for the specified cluster with comprehensive metadata:* - Basic file information (name, path, timestamp, duration, etc.)* - File metadata (JSON format)* - Recording device metadata (gain, battery voltage, temperature)* - Species found in each file** When speciesId is provided, only returns files that have at least one* selection labeled with the specified species.*/app.get("/api/files", authenticate, async (c) => {try {// Get query parametersconst clusterId = c.req.query("clusterId");const page = parseInt(c.req.query("page") || "1", 10);const pageSize = parseInt(c.req.query("pageSize") || "100", 10);const solarNight = c.req.query("solarNight");const civilNight = c.req.query("civilNight");const speciesId = c.req.query("speciesId"); // Optional filter for species// Validate parametersif (!clusterId) {console.log("Missing clusterId in request");return c.json({error: "Missing required query parameter: clusterId"}, 400);}// Validate and limit page size to prevent excessive queriesconst limitedPageSize = Math.min(Math.max(pageSize, 10), 500);const offset = (page - 1) * limitedPageSize;// Validate page numberif (page < 1) {console.log("Invalid page number in request:", page);return c.json({error: "Invalid page parameter: must be greater than 0"}, 400);}// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Build filter conditionslet whereConditions = sqlExpr`${file.clusterId} = ${clusterId} AND ${file.active} = true`;// Add filters for solarNight if specifiedif (solarNight === 'true') {whereConditions = sqlExpr`${whereConditions} AND ${file.maybeSolarNight} = true`;} else if (solarNight === 'false') {whereConditions = sqlExpr`${whereConditions} AND (${file.maybeSolarNight} = false OR ${file.maybeSolarNight} IS NULL)`;}// Add filters for civilNight if specifiedif (civilNight === 'true') {whereConditions = sqlExpr`${whereConditions} AND ${file.maybeCivilNight} = true`;} else if (civilNight === 'false') {whereConditions = sqlExpr`${whereConditions} AND (${file.maybeCivilNight} = false OR ${file.maybeCivilNight} IS NULL)`;}// First, get the total count of files for pagination metadatalet countResult;if (speciesId) {// Count only files that have at least one selection labeled with the specified speciescountResult = await db.select({count: sqlExpr<number>`COUNT(DISTINCT ${file.id})`}).from(file).innerJoin(selection, eq(selection.fileId, file.id)).innerJoin(label, eq(label.selectionId, selection.id)).where(sqlExpr`${whereConditions} AND ${label.speciesId} = ${speciesId} AND ${label.active} = true`);} else {// Standard count without species filtercountResult = await db.select({count: sqlExpr<number>`COUNT(1)`}).from(file).where(whereConditions);}const totalFiles = Number(countResult[0].count);const totalPages = Math.ceil(totalFiles / limitedPageSize);// Query files for the specified cluster with paginationlet filesResult;if (speciesId) {// Get only files that have at least one selection labeled with the specified speciesfilesResult = await db.select({id: file.id,fileName: file.fileName,path: file.path,timestampLocal: file.timestampLocal,duration: file.duration,sampleRate: file.sampleRate,locationId: file.locationId,description: file.description,maybeSolarNight: file.maybeSolarNight,maybeCivilNight: file.maybeCivilNight,moonPhase: file.moonPhase,}).from(file).innerJoin(selection, eq(selection.fileId, file.id)).innerJoin(label, eq(label.selectionId, selection.id)).where(sqlExpr`${whereConditions} AND ${label.speciesId} = ${speciesId} AND ${label.active} = true`).orderBy(file.timestampLocal).groupBy(file.id, file.fileName, file.path, file.timestampLocal, file.duration,file.sampleRate, file.locationId, file.description, file.maybeSolarNight,file.maybeCivilNight, file.moonPhase).limit(limitedPageSize).offset(offset);} else {// Standard query without species filterfilesResult = await db.select({id: file.id,fileName: file.fileName,path: file.path,timestampLocal: file.timestampLocal,duration: file.duration,sampleRate: file.sampleRate,locationId: file.locationId,description: file.description,maybeSolarNight: file.maybeSolarNight,maybeCivilNight: file.maybeCivilNight,moonPhase: file.moonPhase,}).from(file).where(whereConditions).orderBy(file.timestampLocal).limit(limitedPageSize).offset(offset);}// Early return if no files foundif (filesResult.length === 0) {return c.json({data: [],pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,speciesId: speciesId || null,}});}// Get all file IDs for metadata queriesconst fileIds = filesResult.map(f => f.id);// Safety check - if no files found, return empty resultsif (fileIds.length === 0) {return c.json({data: [],pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,speciesId: speciesId || null,}});}// Convert array to proper SQL format for IN clauseconst fileIdsQuoted = fileIds.map(id => `'${id}'`).join(',');// Execute metadata queries in parallel using Promise.allconst [metadataResults, mothMetadataResults, speciesResults] = await Promise.all([// Fetch file metadatadb.select({fileId: fileMetadata.fileId,json: fileMetadata.json}).from(fileMetadata).where(sqlExpr`${fileMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),// Fetch moth metadatadb.select({fileId: mothMetadata.fileId,gain: mothMetadata.gain,batteryV: mothMetadata.batteryV,tempC: mothMetadata.tempC}).from(mothMetadata).where(sqlExpr`${mothMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),// Fetch species data via selections and labelsdb.select({fileId: selection.fileId,speciesId: species.id,speciesLabel: species.label,ebirdCode: species.ebirdCode,description: species.description}).from(selection).innerJoin(label, eq(label.selectionId, selection.id)).innerJoin(species, eq(species.id, label.speciesId)).where(speciesId? sqlExpr`${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)}) AND ${label.active} = true AND ${label.speciesId} = ${speciesId}`: sqlExpr`${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)}) AND ${label.active} = true`)]);// Process metadata resultsconst metadataMap = metadataResults.reduce((acc, item) => {let processedJson = item.json;try {if (typeof item.json === 'string' && (item.json.startsWith('{') || item.json.startsWith('['))) {processedJson = JSON.parse(item.json);} else if (typeof item.json === 'string' && item.json.includes('\\"')) {const unescaped = item.json.replace(/\\"/g, '"');processedJson = JSON.parse(unescaped);}} catch (e) {console.error("Error processing metadata JSON:", e);processedJson = item.json;}acc[item.fileId] = processedJson;return acc;}, {} as Record<string, unknown>);// Process moth metadataconst mothMetadataMap = mothMetadataResults.reduce((acc, item) => {acc[item.fileId] = {gain: item.gain,batteryV: item.batteryV !== null ? Number(item.batteryV) : null,tempC: item.tempC !== null ? Number(item.tempC) : null};return acc;}, {} as Record<string, {gain: string | null;batteryV: number | null;tempC: number | null;}>);// Process species dataconst speciesMap = speciesResults.reduce((acc, item) => {if (!acc[item.fileId]) {acc[item.fileId] = [];}// Deduplicate species entriesconst existingSpecies = acc[item.fileId].find(s => s.id === item.speciesId);if (!existingSpecies) {acc[item.fileId].push({id: item.speciesId,label: item.speciesLabel,ebirdCode: item.ebirdCode,description: item.description});}return acc;}, {} as Record<string, Array<{id: string;label: string;ebirdCode: string | null;description: string | null;}>>);// Combine file data with metadataconst files = filesResult.map(fileData => ({...fileData,metadata: metadataMap[fileData.id] || null,mothMetadata: mothMetadataMap[fileData.id] || null,species: speciesMap[fileData.id] || []}));// Return paginated results with metadata and filtersreturn c.json({data: files,pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,speciesId: speciesId || null,}});} catch (error) {console.error("Error fetching files:", error);return c.json({error: "Failed to fetch files",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// SPECIES// ============================================================================/*** Protected API route to fetch species and call types for a dataset** @route GET /api/species* @authentication Required* @param {string} datasetId - Required query parameter specifying the dataset to fetch species from* @returns {Object} Response containing:* - data: Array of species objects, each with an array of associated call types* @error 400 - If datasetId is missing* @description Returns species associated with the specified dataset along with their call types* Each species object includes:* - id, label, ebirdCode, description* - callTypes: Array of call type objects with id and label** Uses an efficient JOIN approach to fetch data in a single query to avoid N+1 query problems* Results are transformed to provide a nested structure with call types inside species objects*/app.get("/api/species", authenticate, async (c) => {try {// Get query parameter for datasetIdconst datasetId = c.req.query("datasetId");// Validate parametersif (!datasetId) {return c.json({error: "Missing required query parameter: datasetId"}, 400);}// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Use JOIN approach to get species and call types in a single query// This eliminates the N+1 query problem and reduces round trips to the databaseconst queryResults = await db.select({id: species.id,label: species.label,ebirdCode: species.ebirdCode,description: species.description,callTypeId: callType.id,callTypeLabel: callType.label}).from(species).innerJoin(speciesDataset,eq(species.id, speciesDataset.speciesId)).leftJoin(callType,sqlExpr`${callType.speciesId} = ${species.id} AND ${callType.active} = true`).where(eq(speciesDataset.datasetId, datasetId)).orderBy(species.label);// If no results are found, return an empty arrayif (queryResults.length === 0) {return c.json({data: []});}// Transform the flat results into nested objects// Using a Map for better performance with large datasetsconst speciesMap = new Map();queryResults.forEach(row => {if (!speciesMap.has(row.id)) {// Create a new species entry if not already in the mapspeciesMap.set(row.id, {id: row.id,label: row.label,ebirdCode: row.ebirdCode,description: row.description,callTypes: []});}// Add the call type if it exists and isn't already in the arrayif (row.callTypeId) {const species = speciesMap.get(row.id);const existingCallType = species.callTypes.find((ct: { id: string }) => ct.id === row.callTypeId);if (!existingCallType) {species.callTypes.push({id: row.callTypeId,label: row.callTypeLabel});}}});// Convert map to arrayconst enrichedSpecies = Array.from(speciesMap.values());// Return the enriched species datareturn c.json({data: enrichedSpecies});} catch (error) {console.error("Error fetching species:", error);return c.json({error: "Failed to fetch species",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new species** @route POST /api/species* @authentication Required* @param {Object} body - Species data including datasetId, label, description, ebirdCode, callTypes* @returns {Object} Response containing:* - data: Created species object with call types* @description Creates a new species with optional eBird integration and call types.* Requires EDIT permission on the dataset. Creates entries in species, species_dataset,* and optionally call_type tables in a single transaction.*/app.post("/api/species", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Parse and validate request bodyconst body = await c.req.json();const { id, datasetId, label, description, ebirdCode, callTypes } = body;// Field validationif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!datasetId || typeof datasetId !== 'string') {return c.json({error: "Missing or invalid required field: datasetId"}, 400);}if (!label || typeof label !== 'string' || label.trim().length === 0) {return c.json({error: "Missing or invalid required field: label"}, 400);}// Length validationif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (label.length > 100) {return c.json({error: "Field 'label' must be 100 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}if (ebirdCode && ebirdCode.length > 12) {return c.json({error: "Field 'ebirdCode' must be 12 characters or less"}, 400);}// Validate call types if providedif (callTypes && (!Array.isArray(callTypes) || callTypes.some(ct => !ct.label || ct.label.length > 100))) {return c.json({error: "Field 'callTypes' must be an array of objects with 'label' field (max 100 chars)"}, 400);}// Check if user has EDIT permission on the datasetconst hasPermission = await checkUserPermission(db, userId, datasetId.trim(), 'EDIT');if (!hasPermission) {return c.json({error: "You don't have permission to create species in this dataset"}, 403);}// Start transaction for atomic creationconst result = await db.transaction(async (tx) => {const now = new Date();// Create species recordconst newSpecies = {id: id.trim(),label: label.trim(),ebirdCode: ebirdCode?.trim() || null,taxonomyVersion: ebirdCode ? '2024' : null,description: description?.trim() || null,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,owner: userId,active: true,};const [createdSpecies] = await tx.insert(species).values(newSpecies).returning({id: species.id,label: species.label,ebirdCode: species.ebirdCode,taxonomyVersion: species.taxonomyVersion,description: species.description,createdAt: species.createdAt,createdBy: species.createdBy,lastModified: species.lastModified,modifiedBy: species.modifiedBy,active: species.active,});// Create species-dataset associationawait tx.insert(speciesDataset).values({speciesId: id.trim(),datasetId: datasetId.trim(),createdAt: now,createdBy: userId,lastModified: now,modifiedBy: userId,});// Create call types if providedconst createdCallTypes = [];if (callTypes && callTypes.length > 0) {for (const callTypeData of callTypes) {const callTypeId = nanoid(12);const [createdCallType] = await tx.insert(callType).values({id: callTypeId,speciesId: id.trim(),label: callTypeData.label.trim(),createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,}).returning({id: callType.id,label: callType.label,});createdCallTypes.push(createdCallType);}}return {...createdSpecies,callTypes: createdCallTypes};});console.log("Created species:", result.id, "for dataset:", datasetId, "by user:", userId);return c.json({data: result}, 201);} catch (error) {console.error("Error creating species:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A species with this ID already exists"}, 400);}return c.json({error: "Failed to create species",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing species** @route PUT /api/species/:id* @authentication Required* @param {string} id - Species ID in URL path* @param {Object} body - Species data to update (label, description, ebirdCode, active)* @returns {Object} Response containing:* - data: Updated species object with call types* @description Updates an existing species. Requires EDIT permission on the dataset.* When soft deleting (active=false), also soft deletes all related call types.*/app.put("/api/species/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get species ID from URL parametersconst speciesId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { label, description, ebirdCode, active } = body;// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if species exists and get its associated datasetsconst existingSpecies = await db.select({id: species.id,active: species.active}).from(species).where(eq(species.id, speciesId)).limit(1);if (existingSpecies.length === 0) {return c.json({error: "Species not found"}, 404);}// Get datasets associated with this species to check permissionsconst associatedDatasets = await db.select({ datasetId: speciesDataset.datasetId }).from(speciesDataset).where(eq(speciesDataset.speciesId, speciesId));if (associatedDatasets.length === 0) {return c.json({error: "Species not associated with any dataset"}, 400);}// Check if user has EDIT permission on at least one associated datasetlet hasPermission = false;for (const assoc of associatedDatasets) {if (await checkUserPermission(db, userId, assoc.datasetId, 'EDIT')) {hasPermission = true;break;}}if (!hasPermission) {return c.json({error: "You don't have permission to edit this species"}, 403);}// Start transaction for atomic updateconst result = await db.transaction(async (tx) => {// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add label if providedif (label !== undefined) {if (typeof label !== 'string' || label.trim().length === 0) {return c.json({error: "Field 'label' must be a non-empty string"}, 400);}if (label.length > 100) {return c.json({error: "Field 'label' must be 100 characters or less"}, 400);}updateData.label = label.trim();}// Validate and add description if providedif (description !== undefined) {if (description !== null && typeof description !== 'string') {return c.json({error: "Field 'description' must be a string or null"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}updateData.description = description?.trim() || null;}// Validate and add eBird code if providedif (ebirdCode !== undefined) {if (ebirdCode !== null && typeof ebirdCode !== 'string') {return c.json({error: "Field 'ebirdCode' must be a string or null"}, 400);}if (ebirdCode && ebirdCode.length > 12) {return c.json({error: "Field 'ebirdCode' must be 12 characters or less"}, 400);}updateData.ebirdCode = ebirdCode?.trim() || null;updateData.taxonomyVersion = ebirdCode ? '2024' : null;}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;// If soft deleting species, also soft delete related recordsif (!active) {// Soft delete all call types for this speciesawait tx.update(callType).set({active: false,lastModified: new Date(),modifiedBy: userId}).where(eq(callType.speciesId, speciesId));// Delete all species-dataset associations for this species (hard delete since no active field)await tx.delete(speciesDataset).where(eq(speciesDataset.speciesId, speciesId));}}// Update the speciesconst [updatedSpecies] = await tx.update(species).set(updateData).where(eq(species.id, speciesId)).returning({id: species.id,label: species.label,ebirdCode: species.ebirdCode,taxonomyVersion: species.taxonomyVersion,description: species.description,createdAt: species.createdAt,createdBy: species.createdBy,lastModified: species.lastModified,modifiedBy: species.modifiedBy,active: species.active,});// Get updated call typesconst callTypes = await tx.select({id: callType.id,label: callType.label,}).from(callType).where(and(eq(callType.speciesId, speciesId), eq(callType.active, true)));return {...updatedSpecies,callTypes};});console.log("Updated species:", speciesId, "by user:", userId);return c.json({data: result});} catch (error) {console.error("Error updating species:", error);return c.json({error: "Failed to update species",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// CALL-TYPES// ============================================================================/*** Protected API route to create a new call type** @route POST /api/call-types* @authentication Required* @param {Object} body - Call type data including id, speciesId, label* @returns {Object} Response containing:* - data: Created call type object* @description Creates a new call type for a species. Requires EDIT permission on any dataset containing the species.*/app.post("/api/call-types", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Parse and validate request bodyconst body = await c.req.json();const { id, speciesId, label } = body;// Field validationif (!id || typeof id !== 'string' || id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (!speciesId || typeof speciesId !== 'string') {return c.json({error: "Missing or invalid required field: speciesId"}, 400);}if (!label || typeof label !== 'string' || label.trim().length === 0) {return c.json({error: "Missing or invalid required field: label"}, 400);}if (label.length > 100) {return c.json({error: "Field 'label' must be 100 characters or less"}, 400);}// Check if species exists and get its associated datasetsconst associatedDatasets = await db.select({ datasetId: speciesDataset.datasetId }).from(speciesDataset).where(eq(speciesDataset.speciesId, speciesId));if (associatedDatasets.length === 0) {return c.json({error: "Species not found or not associated with any dataset"}, 404);}// Check if user has EDIT permission on at least one associated datasetlet hasPermission = false;for (const assoc of associatedDatasets) {if (await checkUserPermission(db, userId, assoc.datasetId, 'EDIT')) {hasPermission = true;break;}}if (!hasPermission) {return c.json({error: "You don't have permission to create call types for this species"}, 403);}// Create call typeconst now = new Date();const [createdCallType] = await db.insert(callType).values({id: id.trim(),speciesId: speciesId.trim(),label: label.trim(),createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,}).returning({id: callType.id,speciesId: callType.speciesId,label: callType.label,createdAt: callType.createdAt,active: callType.active,});console.log("Created call type:", createdCallType.id, "for species:", speciesId, "by user:", userId);return c.json({data: createdCallType}, 201);} catch (error) {console.error("Error creating call type:", error);if (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A call type with this ID already exists"}, 400);}return c.json({error: "Failed to create call type",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing call type** @route PUT /api/call-types/:id* @authentication Required* @param {string} id - Call type ID in URL path* @param {Object} body - Call type data to update (label, active)* @returns {Object} Response containing:* - data: Updated call type object* @description Updates an existing call type. Requires EDIT permission on any dataset containing the parent species.*/app.put("/api/call-types/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get call type ID from URL parametersconst callTypeId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { label, active } = body;// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if call type exists and get its speciesconst existingCallType = await db.select({id: callType.id,speciesId: callType.speciesId,active: callType.active}).from(callType).where(eq(callType.id, callTypeId)).limit(1);if (existingCallType.length === 0) {return c.json({error: "Call type not found"}, 404);}const speciesId = existingCallType[0].speciesId;// Get datasets associated with the species to check permissionsconst associatedDatasets = await db.select({ datasetId: speciesDataset.datasetId }).from(speciesDataset).where(eq(speciesDataset.speciesId, speciesId));// Check if user has EDIT permission on at least one associated datasetlet hasPermission = false;for (const assoc of associatedDatasets) {if (await checkUserPermission(db, userId, assoc.datasetId, 'EDIT')) {hasPermission = true;break;}}if (!hasPermission) {return c.json({error: "You don't have permission to edit this call type"}, 403);}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add label if providedif (label !== undefined) {if (typeof label !== 'string' || label.trim().length === 0) {return c.json({error: "Field 'label' must be a non-empty string"}, 400);}if (label.length > 100) {return c.json({error: "Field 'label' must be 100 characters or less"}, 400);}updateData.label = label.trim();}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;}// Update the call typeconst [updatedCallType] = await db.update(callType).set(updateData).where(eq(callType.id, callTypeId)).returning({id: callType.id,speciesId: callType.speciesId,label: callType.label,createdAt: callType.createdAt,lastModified: callType.lastModified,active: callType.active,});console.log("Updated call type:", callTypeId, "by user:", userId);return c.json({data: updatedCallType});} catch (error) {console.error("Error updating call type:", error);return c.json({error: "Failed to update call type",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// EBIRD SEARCH// ============================================================================/*** Protected API route to search eBird taxonomy** @route GET /api/ebird/search* @authentication Required* @param {string} q - Search query (common name, scientific name, family, or species code)* @returns {Object} Response containing:* - data: Array of matching eBird taxonomy entries* @description Searches the eBird taxonomy v2024 materialized view for species matching the query.* Searches across primary_com_name, sci_name, family, and species_code fields.*/app.get("/api/ebird/search", authenticate, async (c) => {try {const query = c.req.query("q");if (!query || query.trim().length === 0) {return c.json({error: "Missing or empty query parameter 'q'"}, 400);}if (query.length < 2) {return c.json({error: "Query must be at least 2 characters long"}, 400);}// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Search across multiple fields with case-insensitive partial matchingconst searchTerm = `%${query.trim().toLowerCase()}%`;// Use raw SQL query for materialized view until Drizzle typing issue is resolvedconst results = await db.execute(sqlExpr`SELECTid,species_code as "speciesCode",primary_com_name as "primaryComName",sci_name as "sciName",bird_order as "birdOrder",familyFROM ebird_taxonomy_v2024WHERELOWER(primary_com_name) LIKE ${searchTerm} ORLOWER(sci_name) LIKE ${searchTerm} ORLOWER(family) LIKE ${searchTerm} ORLOWER(species_code) LIKE ${searchTerm}ORDER BY primary_com_nameLIMIT 20`);return c.json({data: results.rows || results});} catch (error) {console.error("Error searching eBird taxonomy:", error);return c.json({error: "Failed to search eBird taxonomy",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// SELECTION// ============================================================================/*** Protected API route to fetch files with selections for a specific dataset and species** @route GET /api/selection* @authentication Required* @param {string} datasetId - Required query parameter specifying the dataset* @param {string} speciesId - Required query parameter specifying the species to filter by* @param {number} [page=1] - Optional page number for pagination (starts at 1)* @param {number} [pageSize=100] - Optional page size (10-500, defaults to 100)* @param {string} [solarNight] - Optional filter for files recorded during solar night ('true'/'false')* @param {string} [civilNight] - Optional filter for files recorded during civil night ('true'/'false')* @returns {Object} Response containing:* - data: Array of file objects with metadata, mothMetadata, and species information* - pagination: Object with pagination metadata* - filters: Object showing the filters that were applied* @error 400 - If datasetId or speciesId is missing or page is invalid* @description Returns files that contain selections labeled with the specified species* Designed for cross-cluster searches within a dataset** Each file object includes:* - Basic file information (name, path, timestamp, duration, etc.)* - File metadata (JSON format)* - Recording device metadata (gain, battery voltage, temperature)* - Species information** Uses efficient query optimization with:* - Parallel Promise.all for metadata queries* - Proper SQL JOINs to avoid N+1 query problems* - Data transformation for optimal client-side consumption*/app.get("/api/selection", authenticate, async (c) => {try {// Get query parametersconst datasetId = c.req.query("datasetId");const speciesId = c.req.query("speciesId");const page = parseInt(c.req.query("page") || "1", 10);const pageSize = parseInt(c.req.query("pageSize") || "100", 10);const solarNight = c.req.query("solarNight");const civilNight = c.req.query("civilNight");// Validate parametersif (!datasetId) {return c.json({error: "Missing required query parameter: datasetId"}, 400);}if (!speciesId) {return c.json({error: "Missing required query parameter: speciesId"}, 400);}// Validate and limit page size to prevent excessive queriesconst limitedPageSize = Math.min(Math.max(pageSize, 10), 500);const offset = (page - 1) * limitedPageSize;// Validate page numberif (page < 1) {return c.json({error: "Invalid page parameter: must be greater than 0"}, 400);}// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Build base filter condition - active files with selections of the specified species// This forms the core of our WHERE clause for both querieslet baseCondition = sqlExpr`${file.active} = trueAND ${location.datasetId} = ${datasetId}AND ${label.speciesId} = ${speciesId}AND ${label.active} = true`;// Add filters for day/night if specifiedif (solarNight === 'true') {baseCondition = sqlExpr`${baseCondition} AND ${file.maybeSolarNight} = true`;} else if (solarNight === 'false') {baseCondition = sqlExpr`${baseCondition} AND (${file.maybeSolarNight} = false OR ${file.maybeSolarNight} IS NULL)`;}if (civilNight === 'true') {baseCondition = sqlExpr`${baseCondition} AND ${file.maybeCivilNight} = true`;} else if (civilNight === 'false') {baseCondition = sqlExpr`${baseCondition} AND (${file.maybeCivilNight} = false OR ${file.maybeCivilNight} IS NULL)`;}// Get total count for pagination using a more efficient COUNT(1)const countResult = await db.select({count: sqlExpr<number>`COUNT(DISTINCT ${file.id})`}).from(file).innerJoin(cluster, eq(file.clusterId, cluster.id)).innerJoin(location, eq(cluster.locationId, location.id)).innerJoin(selection, eq(selection.fileId, file.id)).innerJoin(label, eq(label.selectionId, selection.id)).where(baseCondition);const totalFiles = Number(countResult[0].count);const totalPages = Math.ceil(totalFiles / limitedPageSize);// Early return if there are no matching filesif (totalFiles === 0) {return c.json({data: [],pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: 0,totalItems: 0,hasNextPage: false,hasPreviousPage: false,},filters: {datasetId,speciesId,solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,}});}// Fetch files with paginationconst filesResult = await db.select({id: file.id,fileName: file.fileName,path: file.path,timestampLocal: file.timestampLocal,duration: file.duration,sampleRate: file.sampleRate,locationId: file.locationId,clusterId: file.clusterId,description: file.description,maybeSolarNight: file.maybeSolarNight,maybeCivilNight: file.maybeCivilNight,moonPhase: file.moonPhase,}).from(file).innerJoin(cluster, eq(file.clusterId, cluster.id)).innerJoin(location, eq(cluster.locationId, location.id)).innerJoin(selection, eq(selection.fileId, file.id)).innerJoin(label, eq(label.selectionId, selection.id)).where(baseCondition).orderBy(file.timestampLocal).groupBy(file.id, file.fileName, file.path, file.timestampLocal, file.duration,file.sampleRate, file.locationId, file.clusterId, file.description, file.maybeSolarNight,file.maybeCivilNight, file.moonPhase).limit(limitedPageSize).offset(offset);// Get all file IDs for metadata queriesconst fileIds = filesResult.map(f => f.id);// Safety check - shouldn't happen with our early return, but just in caseif (fileIds.length === 0) {return c.json({data: [],pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {datasetId,speciesId,solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,}});}// Convert file IDs to a properly formatted SQL stringconst fileIdsQuoted = fileIds.map(id => `'${id}'`).join(',');// Execute metadata queries in parallel using Promise.all for better performanceconst [metadataResults, mothMetadataResults, speciesResults] = await Promise.all([// Fetch file metadatadb.select({fileId: fileMetadata.fileId,json: fileMetadata.json}).from(fileMetadata).where(sqlExpr`${fileMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),// Fetch moth metadatadb.select({fileId: mothMetadata.fileId,gain: mothMetadata.gain,batteryV: mothMetadata.batteryV,tempC: mothMetadata.tempC}).from(mothMetadata).where(sqlExpr`${mothMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),// Fetch species data - pre-filtered to just get the relevant speciesdb.select({fileId: selection.fileId,speciesId: species.id,speciesLabel: species.label,ebirdCode: species.ebirdCode,description: species.description}).from(selection).innerJoin(label, eq(label.selectionId, selection.id)).innerJoin(species, eq(species.id, label.speciesId)).where(sqlExpr`${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})AND ${label.speciesId} = ${speciesId}AND ${label.active} = true`)]);// Process metadata results into maps for efficient lookupsconst metadataMap = metadataResults.reduce((acc, item) => {let processedJson = item.json;try {if (typeof item.json === 'string' && (item.json.startsWith('{') || item.json.startsWith('['))) {processedJson = JSON.parse(item.json);} else if (typeof item.json === 'string' && item.json.includes('\\"')) {const unescaped = item.json.replace(/\\"/g, '"');processedJson = JSON.parse(unescaped);}} catch (e) {console.error("Error processing metadata JSON:", e);processedJson = item.json;}acc[item.fileId] = processedJson;return acc;}, {} as Record<string, unknown>);// Process moth metadataconst mothMetadataMap = mothMetadataResults.reduce((acc, item) => {acc[item.fileId] = {gain: item.gain,batteryV: item.batteryV !== null ? Number(item.batteryV) : null,tempC: item.tempC !== null ? Number(item.tempC) : null};return acc;}, {} as Record<string, {gain: string | null;batteryV: number | null;tempC: number | null;}>);// Process species data with Map for better performanceconst speciesMap = new Map<string, Array<{id: string;label: string;ebirdCode: string | null;description: string | null;}>>();speciesResults.forEach(item => {if (!speciesMap.has(item.fileId)) {speciesMap.set(item.fileId, []);}// Get the current species array for this fileconst fileSpecies = speciesMap.get(item.fileId)!;// Check if we already have this species (deduplication)const existingSpeciesIndex = fileSpecies.findIndex(s => s.id === item.speciesId);if (existingSpeciesIndex === -1) {// Add species if it doesn't already exist for this filefileSpecies.push({id: item.speciesId,label: item.speciesLabel,ebirdCode: item.ebirdCode,description: item.description});}});// Combine file data with metadata in a single operationconst files = filesResult.map(fileData => ({...fileData,metadata: metadataMap[fileData.id] || null,mothMetadata: mothMetadataMap[fileData.id] || null,species: speciesMap.get(fileData.id) || []}));// Return paginated results with metadata and filtersreturn c.json({data: files,pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {datasetId,speciesId,solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,}});} catch (error) {console.error("Error fetching files by dataset and species:", error);return c.json({error: "Failed to fetch files by dataset and species",details: error instanceof Error ? error.message : String(error),},500);}});/*** Export the Hono app to be used by the Cloudflare Workers runtime** @default app* @description The main Hono application instance exported as the default* This is the entry point that Cloudflare Workers will use when handling requests*/export default app;
import { Context, Hono } from "hono";import { drizzle } from "drizzle-orm/neon-http";import { eq, and, sql as sqlExpr } from "drizzle-orm";import { neon } from "@neondatabase/serverless";import {dataset,location,cluster,cyclicRecordingPattern,file,fileMetadata,mothMetadata,species,speciesDataset,callType,selection,label,accessGrant,userRole} from "../../db/schema";import * as jose from 'jose';import { nanoid } from 'nanoid';
import { Hono } from "hono";import type { Env } from "./types";
/*** Environment variables required for the Worker** @interface Env* @description Defines the environment variables that must be configured in the Cloudflare Workers environment* - DATABASE_URL: Connection string for the Neon PostgreSQL database* - KINDE_ISSUER_URL: URL of the Kinde identity provider for JWT verification*/export type Env = {/** Connection string for the Neon PostgreSQL database */DATABASE_URL: string;/** URL of the Kinde identity provider (for JWT verification) */KINDE_ISSUER_URL: string;};/*** Structure of the JWT token payload from Kinde identity provider** @interface JWTPayload* @description Defines the structure of the JWT token payload used for authentication* Contains standard JWT claims (sub, iat, exp, aud, iss) plus any additional custom claims* Used for extracting user information after successful token verification*/interface JWTPayload {sub: string; // Subject (user ID)iat: number; // Issued atexp: number; // Expiration timeaud: string[]; // Audienceiss: string; // Issuer[key: string]: unknown; // Additional claims}
// Import route modulesimport datasets from "./routes/datasets";import locations from "./routes/locations";import clusters from "./routes/clusters";import files from "./routes/files";import species from "./routes/species";import callTypes from "./routes/callTypes";import ebird from "./routes/ebird";import selections from "./routes/selections";
/*** Helper function to check if a user has a specific permission for a dataset** @param db - Database connection* @param userId - User ID to check permissions for* @param datasetId - Dataset ID to check permissions for* @param permission - Permission to check (READ, EDIT, DELETE, etc.)* @returns Promise<boolean> - True if user has permission, false otherwise*/async function checkUserPermission(// eslint-disable-next-line @typescript-eslint/no-explicit-anydb: any,userId: string,datasetId: string,permission: string): Promise<boolean> {try {// First check if user is the owner (owners have all permissions)const ownerCheck = await db.select({ owner: dataset.owner }).from(dataset).where(eq(dataset.id, datasetId)).limit(1);if (ownerCheck.length > 0 && ownerCheck[0].owner === userId) {return true;}// Get user's roleconst userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';// Check access grantsconst grants = await db.select({ permission: accessGrant.permission }).from(accessGrant).where(sqlExpr`${accessGrant.datasetId} = ${datasetId} AND${accessGrant.permission} = ${permission} AND${accessGrant.active} = true AND((${accessGrant.userId} = ${userId}) OR(${accessGrant.userId} IS NULL AND ${accessGrant.role} = ${userRoleName}))`).limit(1);return grants.length > 0;} catch (error) {console.error('Error checking user permission:', error);return false;}}
/*** Authentication middleware for protected routes** Verifies JWT tokens from the Kinde identity provider using JWKS.* On successful verification, adds the JWT payload to the context* for use in route handlers.** @param c - Hono context containing request, response, and environment bindings* @param next - Next middleware in the chain* @returns Response with 401 status on auth failure, or passes to next middleware*/const authenticate = async (c: Context<{ Bindings: Env }>, next: () => Promise<void>): Promise<Response | void> => {// Get the authorization headerconst authHeader = c.req.header("Authorization");if (!authHeader || !authHeader.startsWith("Bearer ")) {return c.json({ error: "Unauthorized: Missing or invalid token" }, 401);}try {// Extract tokenconst token = authHeader.split(" ")[1];console.log("Received token:", token.substring(0, 10) + "...");// Get the JWKS endpoint from the Kinde issuerconst issuerUrl = c.env.KINDE_ISSUER_URL;const jwksUrl = `${issuerUrl}/.well-known/jwks.json`;console.log("JWKS URL:", jwksUrl);// Fetch the JWKSconsole.log("Fetching JWKS...");const jwksResponse = await fetch(jwksUrl);console.log("JWKS response status:", jwksResponse.status);if (!jwksResponse.ok) {throw new Error(`Failed to fetch JWKS: ${jwksResponse.status}`);}await jwksResponse.json(); // Parse JSON to validate responseconsole.log("JWKS keys received");// Create JWKS from the fetched keysconst keyStore = jose.createRemoteJWKSet(new URL(jwksUrl));console.log("Verifying token...");console.log("Using issuer:", issuerUrl);try {// Verify the token with issuer only (no audience validation)const { payload } = await jose.jwtVerify(token, keyStore, {issuer: issuerUrl,// No audience validation - Kinde tokens may have empty audience array});console.log("Token verified successfully!");// Store the payload in the context for use in the route handler// Use unknown-casting to work around Hono's type limitations(c as unknown as { jwtPayload: JWTPayload }).jwtPayload = payload as JWTPayload;} catch (verifyError) {console.error("Token verification failed:", verifyError);throw verifyError;}// If we get here, the token is validawait next();} catch (error) {console.error("JWT validation error:", error);return c.json({ error: "Unauthorized: Invalid token" }, 401);}};// ============================================================================// API ROUTES - Organized by resource// ============================================================================// ============================================================================// DATASETS// ============================================================================/*** Protected API route to fetch datasets with role-based access control** @route GET /api/datasets* @authentication Required* @returns {Object} Response containing:* - data: Array of dataset objects with permissions for the authenticated user* - userId: The authenticated user's ID (for debugging)* @description Returns datasets the user has READ access to, along with their specific permissions* @example Response:* {* "data": [* {* "id": "123",* "name": "Bird Sounds 2024",* "description": "Costa Rica recordings",* "public": true,* "permissions": ["READ", "EDIT", "DELETE"]* }* ],* "userId": "user_123abc"* }*/app.get("/api/datasets", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// First, get the user's roleconst userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';// Query to get datasets with permissions// This complex query gets datasets where the user has READ access either through:// 1. Direct user-specific grants// 2. Role-based grants for their role// 3. Being the owner of the datasetconst results = await db.select({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,createdAt: dataset.createdAt,owner: dataset.owner,type: dataset.type,permission: accessGrant.permission,userId: accessGrant.userId}).from(dataset).leftJoin(accessGrant,sqlExpr`${accessGrant.datasetId} = ${dataset.id} AND ${accessGrant.active} = true AND ((${accessGrant.userId} = ${userId}) OR(${accessGrant.userId} IS NULL AND ${accessGrant.role} = ${userRoleName}))`).where(sqlExpr`${dataset.active} = true AND (${dataset.owner} = ${userId} OR${accessGrant.permission} IS NOT NULL)`).orderBy(dataset.name);// Group results by dataset and collect permissionsconst datasetMap = new Map<string, {id: string;name: string;description: string | null;public: boolean;createdAt: string;owner: string;type: string;permissions: string[];}>();results.forEach(row => {if (!datasetMap.has(row.id)) {// If user is owner, they have all permissionsconst isOwner = row.owner === userId;const basePermissions = isOwner ? ['READ', 'UPLOAD', 'DOWNLOAD', 'EDIT', 'DELETE'] : [];datasetMap.set(row.id, {id: row.id,name: row.name,description: row.description,public: row.public ?? false,createdAt: row.createdAt?.toISOString() ?? new Date().toISOString(),owner: row.owner,type: row.type,permissions: basePermissions});}// Add permission from access grants (avoid duplicates)if (row.permission && !datasetMap.get(row.id)!.permissions.includes(row.permission)) {datasetMap.get(row.id)!.permissions.push(row.permission);}});// Filter datasets to only include those with READ permissionconst datasetsWithReadAccess = Array.from(datasetMap.values()).filter(dataset => dataset.permissions.includes('READ')).slice(0, 20); // Limit to 20 datasetsreturn c.json({data: datasetsWithReadAccess,userId: userId,userRole: userRoleName});} catch (error) {console.error("Error fetching datasets:", error);return c.json({error: "Failed to fetch datasets",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new dataset** @route POST /api/datasets* @authentication Required* @body {Object} Dataset creation payload:* - id: string (nanoid(12) - user generated)* - name: string (required, max 255 chars)* - description?: string (optional, max 255 chars)* - public?: boolean (optional, defaults to false)* - type?: string (optional, defaults to 'organise')* @returns {Object} Response containing:* - data: The created dataset object* @error 400 - If required fields are missing or invalid* @error 500 - If database operation fails* @description Creates a new dataset for the authenticated user* The user becomes the owner, creator, and modifier of the dataset*/app.post("/api/datasets", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Connect to the database first to check permissionsconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if user has permission to create datasets (ADMIN or CURATOR roles)const userRoleResult = await db.select({ role: userRole.role }).from(userRole).where(eq(userRole.userId, userId)).limit(1);const userRoleName = userRoleResult.length > 0 ? userRoleResult[0].role : 'USER';if (userRoleName !== 'ADMIN' && userRoleName !== 'CURATOR') {return c.json({error: "You don't have permission to create datasets"}, 403);}// Parse request bodyconst body = await c.req.json();const { id, name, description, public: isPublic, type } = body;// Validate required fieldsif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!name || typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Missing or invalid required field: name"}, 400);}// Validate field lengthsif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (name.length > 255) {return c.json({error: "Field 'name' must be 255 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}// Validate type if providedconst validTypes = ['organise', 'test', 'train'];const datasetType = type || 'organise';if (!validTypes.includes(datasetType)) {return c.json({error: `Field 'type' must be one of: ${validTypes.join(', ')}`}, 400);}// Create the datasetconst now = new Date();const newDataset = {id: id.trim(),name: name.trim(),description: description?.trim() || null,public: Boolean(isPublic),type: datasetType,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,owner: userId,active: true,};// Insert the datasetconst result = await db.insert(dataset).values(newDataset).returning({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,type: dataset.type,createdAt: dataset.createdAt,owner: dataset.owner,});console.log("Created dataset:", result[0].id, "for user:", userId);return c.json({data: result[0]}, 201);} catch (error) {console.error("Error creating dataset:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A dataset with this ID already exists"}, 400);}return c.json({error: "Failed to create dataset",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing dataset** @route PUT /api/datasets/:id* @authentication Required* @param {string} id - Dataset ID in URL path* @body {Object} Dataset update payload:* - name?: string (optional, max 255 chars)* - description?: string (optional, max 255 chars)* - public?: boolean (optional)* - type?: string (optional)* - active?: boolean (optional, for soft delete)* @returns {Object} Response containing:* - data: The updated dataset object* @error 400 - If fields are invalid or dataset not found* @error 403 - If user doesn't own the dataset* @error 500 - If database operation fails* @description Updates an existing dataset owned by the authenticated user* Only the dataset owner can modify it*/app.put("/api/datasets/:id", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT// Get dataset ID from URL parametersconst datasetId = c.req.param("id");if (!datasetId) {return c.json({error: "Missing dataset ID in URL"}, 400);}// Parse request bodyconst body = await c.req.json();const { name, description, public: isPublic, type, active } = body;// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// First, check if the dataset exists and if the user owns itconst existingDataset = await db.select({id: dataset.id,owner: dataset.owner,active: dataset.active}).from(dataset).where(eq(dataset.id, datasetId)).limit(1);if (existingDataset.length === 0) {return c.json({error: "Dataset not found"}, 404);}// Check if user has permission to edit this datasetconst hasEditPermission = await checkUserPermission(db, userId, datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to modify this dataset"}, 403);}// Validate fields if providedif (name !== undefined) {if (typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Invalid field: name must be a non-empty string"}, 400);}if (name.length > 255) {return c.json({error: "Field 'name' must be 255 characters or less"}, 400);}}if (description !== undefined && description !== null && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}if (type !== undefined) {const validTypes = ['organise', 'test', 'train'];if (!validTypes.includes(type)) {return c.json({error: `Field 'type' must be one of: ${validTypes.join(', ')}`}, 400);}}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};if (name !== undefined) {updateData.name = name.trim();}if (description !== undefined) {updateData.description = description?.trim() || null;}if (isPublic !== undefined) {updateData.public = Boolean(isPublic);}if (type !== undefined) {updateData.type = type;}if (active !== undefined) {updateData.active = Boolean(active);}// Update the datasetconst result = await db.update(dataset).set(updateData).where(eq(dataset.id, datasetId)).returning({id: dataset.id,name: dataset.name,description: dataset.description,public: dataset.public,type: dataset.type,createdAt: dataset.createdAt,lastModified: dataset.lastModified,owner: dataset.owner,active: dataset.active,});if (result.length === 0) {return c.json({error: "Failed to update dataset"}, 500);}console.log("Updated dataset:", result[0].id, "for user:", userId);return c.json({data: result[0]});} catch (error) {console.error("Error updating dataset:", error);return c.json({error: "Failed to update dataset",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// LOCATIONS// ============================================================================/*** Protected API route to fetch locations for a specific dataset** @route GET /api/locations* @authentication Required* @param {string} datasetId - Required query parameter specifying the dataset to fetch locations from* @param {number} [page=1] - Optional page number for pagination (starts at 1)* @param {number} [pageSize=100] - Optional page size (1-100, defaults to 100)* @returns {Object} Response containing:* - data: Array of location objects with id, name, latitude, longitude, description* - pagination: Object with pagination metadata (currentPage, pageSize, totalPages, totalItems, etc.)* @error 400 - If datasetId is missing or page is invalid* @description Returns active locations for the specified dataset with pagination support*/app.get("/api/locations", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT // Subject claim usually contains the user ID// Get the dataset ID from query parameterconst datasetId = c.req.query("datasetId");const page = parseInt(c.req.query("page") || "1", 10);const pageSize = parseInt(c.req.query("pageSize") || "100", 10);console.log("Locations API called with datasetId:", datasetId, "userId:", userId, "page:", page);if (!datasetId) {console.log("Missing datasetId in request");return c.json({error: "Missing required query parameter: datasetId"}, 400);}// Validate page numberif (page < 1) {return c.json({error: "Invalid page parameter: must be greater than 0"}, 400);}// Limit page size between 1 and 100const limitedPageSize = Math.min(Math.max(pageSize, 1), 100);const offset = (page - 1) * limitedPageSize;// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// First, get total count for paginationconsole.log("Counting locations for datasetId:", datasetId);const countResult = await db.select({count: sqlExpr<number>`COUNT(*)`}).from(location).where(sqlExpr`${location.datasetId} = ${datasetId} AND ${location.active} = true`);const totalLocations = Number(countResult[0].count);const totalPages = Math.ceil(totalLocations / limitedPageSize);// Query locations for the specified dataset with pagination (using 100 as default limit)console.log("Querying locations for datasetId:", datasetId, "page:", page);const results = await db.select({id: location.id,name: location.name,latitude: location.latitude,longitude: location.longitude,description: location.description,}).from(location).where(sqlExpr`${location.datasetId} = ${datasetId} AND ${location.active} = true`).orderBy(location.name).limit(limitedPageSize).offset(offset);console.log("Found", results.length, "locations for dataset", datasetId, "page:", page);return c.json({data: results,pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalLocations,hasNextPage: page < totalPages,hasPreviousPage: page > 1,}});} catch (error) {console.error("Error fetching locations:", error);return c.json({error: "Failed to fetch locations",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new location** @route POST /api/locations* @authentication Required* @param {Object} body - Location data including datasetId, name, description, latitude, longitude* @returns {Object} Response containing:* - data: Created location object* @description Creates a new location with the authenticated user as owner.* Requires EDIT permission on the dataset to create locations within it.*/app.post("/api/locations", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Parse and validate request bodyconst body = await c.req.json();const { id, datasetId, name, description, latitude, longitude } = body;// Field validationif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!datasetId || typeof datasetId !== 'string') {return c.json({error: "Missing or invalid required field: datasetId"}, 400);}if (!name || typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Missing or invalid required field: name"}, 400);}// Length validationif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}// Validate latitude and longitude if providedif (latitude !== null && latitude !== undefined) {const lat = Number(latitude);if (isNaN(lat) || lat < -90 || lat > 90) {return c.json({error: "Field 'latitude' must be a valid number between -90 and 90"}, 400);}}if (longitude !== null && longitude !== undefined) {const lng = Number(longitude);if (isNaN(lng) || lng < -180 || lng > 180) {return c.json({error: "Field 'longitude' must be a valid number between -180 and 180"}, 400);}}// Check if user has EDIT permission on the datasetconst hasPermission = await checkUserPermission(db, userId, datasetId.trim(), 'EDIT');if (!hasPermission) {return c.json({error: "You don't have permission to create locations in this dataset"}, 403);}// Create location objectconst now = new Date();const newLocation = {id: id.trim(),datasetId: datasetId.trim(),name: name.trim(),description: description?.trim() || null,latitude: latitude !== null && latitude !== undefined ? String(Number(latitude)) : null,longitude: longitude !== null && longitude !== undefined ? String(Number(longitude)) : null,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,};// Insert the locationconst result = await db.insert(location).values(newLocation).returning({id: location.id,datasetId: location.datasetId,name: location.name,description: location.description,latitude: location.latitude,longitude: location.longitude,createdAt: location.createdAt,createdBy: location.createdBy,lastModified: location.lastModified,modifiedBy: location.modifiedBy,active: location.active,});console.log("Created location:", result[0].id, "for dataset:", datasetId, "by user:", userId);return c.json({data: result[0]}, 201);} catch (error) {console.error("Error creating location:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A location with this ID already exists"}, 400);}return c.json({error: "Failed to create location",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing location** @route PUT /api/locations/:id* @authentication Required* @param {string} id - Location ID in URL path* @param {Object} body - Location data to update (name, description, latitude, longitude, active)* @returns {Object} Response containing:* - data: Updated location object* @description Updates an existing location. Requires EDIT permission on the dataset.*/app.put("/api/locations/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get location ID from URL parametersconst locationId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { name, description, latitude, longitude, active } = body;// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if location exists and get its dataset IDconst existingLocation = await db.select({id: location.id,datasetId: location.datasetId,active: location.active}).from(location).where(eq(location.id, locationId)).limit(1);if (existingLocation.length === 0) {return c.json({error: "Location not found"}, 404);}const locationRecord = existingLocation[0];// Check if user has EDIT permission on the datasetconst hasEditPermission = await checkUserPermission(db, userId, locationRecord.datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to edit this location"}, 403);}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add name if providedif (name !== undefined) {if (typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Field 'name' must be a non-empty string"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}updateData.name = name.trim();}// Validate and add description if providedif (description !== undefined) {if (description !== null && typeof description !== 'string') {return c.json({error: "Field 'description' must be a string or null"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}updateData.description = description?.trim() || null;}// Validate and add latitude if providedif (latitude !== undefined) {if (latitude !== null) {const lat = Number(latitude);if (isNaN(lat) || lat < -90 || lat > 90) {return c.json({error: "Field 'latitude' must be a valid number between -90 and 90"}, 400);}updateData.latitude = String(lat);} else {updateData.latitude = null;}}// Validate and add longitude if providedif (longitude !== undefined) {if (longitude !== null) {const lng = Number(longitude);if (isNaN(lng) || lng < -180 || lng > 180) {return c.json({error: "Field 'longitude' must be a valid number between -180 and 180"}, 400);}updateData.longitude = String(lng);} else {updateData.longitude = null;}}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;}// Update the locationconst result = await db.update(location).set(updateData).where(eq(location.id, locationId)).returning({id: location.id,datasetId: location.datasetId,name: location.name,description: location.description,latitude: location.latitude,longitude: location.longitude,createdAt: location.createdAt,createdBy: location.createdBy,lastModified: location.lastModified,modifiedBy: location.modifiedBy,active: location.active,});if (result.length === 0) {return c.json({error: "Failed to update location"}, 500);}console.log("Updated location:", result[0].id, "by user:", userId);return c.json({data: result[0]});} catch (error) {console.error("Error updating location:", error);return c.json({error: "Failed to update location",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// CLUSTERS// ============================================================================/*** Protected API route to fetch clusters for a specific location** @route GET /api/clusters* @authentication Required* @param {string} locationId - Required query parameter specifying the location to fetch clusters from* @returns {Object} Response containing:* - data: Array of cluster objects with recording pattern information* @error 400 - If locationId is missing* @description Returns clusters for the specified location along with their recording patterns* Performs a LEFT JOIN with the cyclicRecordingPattern table to include recording duration data* Results are returned with recording pattern information embedded in each cluster object*/app.get("/api/clusters", authenticate, async (c) => {try {// Get the JWT payload (user info)const jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub; // User ID from JWT // Subject claim usually contains the user ID// Get the location ID from query parameterconst locationId = c.req.query("locationId");console.log("Cluster API called with locationId:", locationId, "userId:", userId);if (!locationId) {console.log("Missing locationId in request");return c.json({error: "Missing required query parameter: locationId"}, 400);}// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Query clusters with a LEFT JOIN on recording patterns to avoid N+1 queriesconsole.log("Querying clusters for locationId:", locationId);const joinedResults = await db.select({// Cluster fieldsid: cluster.id,datasetId: cluster.datasetId,locationId: cluster.locationId,name: cluster.name,description: cluster.description,createdBy: cluster.createdBy,createdAt: cluster.createdAt,lastModified: cluster.lastModified,modifiedBy: cluster.modifiedBy,active: cluster.active,timezoneId: cluster.timezoneId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId,sampleRate: cluster.sampleRate,// Recording pattern fieldsrecordS: cyclicRecordingPattern.recordS,sleepS: cyclicRecordingPattern.sleepS}).from(cluster).leftJoin(cyclicRecordingPattern,eq(cluster.cyclicRecordingPatternId, cyclicRecordingPattern.id)).where(eq(cluster.locationId, locationId)).orderBy(cluster.name);console.log("Found", joinedResults.length, "clusters for location", locationId);// Transform results to match the expected structureconst enrichedResults = joinedResults.map(row => ({id: row.id,datasetId: row.datasetId,locationId: row.locationId,name: row.name,description: row.description,createdBy: row.createdBy,createdAt: row.createdAt,lastModified: row.lastModified,modifiedBy: row.modifiedBy,active: row.active,timezoneId: row.timezoneId,cyclicRecordingPatternId: row.cyclicRecordingPatternId,sampleRate: row.sampleRate,recordingPattern: (row.recordS !== null && row.sleepS !== null) ? {recordS: row.recordS,sleepS: row.sleepS} : null}));return c.json({data: enrichedResults});} catch (error) {console.error("Error fetching clusters:", error);return c.json({error: "Failed to fetch clusters",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new cluster** @route POST /api/clusters* @authentication Required* @param {Object} body - Cluster data including datasetId, locationId, name, description, timezoneId, sampleRate, recordingPattern* - recordingPattern?: Object (optional) - { recordS: number, sleepS: number }* @returns {Object} Response containing:* - data: Created cluster object with optional recordingPattern* @description Creates a new cluster with the authenticated user as owner.* Requires EDIT permission on the dataset to create clusters within it.* If recordingPattern is provided, creates a new cyclic recording pattern record.*/app.post("/api/clusters", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Parse and validate request bodyconst body = await c.req.json();const { id, datasetId, locationId, name, description, timezoneId, sampleRate, recordingPattern } = body;// Field validationif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!datasetId || typeof datasetId !== 'string') {return c.json({error: "Missing or invalid required field: datasetId"}, 400);}if (!locationId || typeof locationId !== 'string') {return c.json({error: "Missing or invalid required field: locationId"}, 400);}if (!name || typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Missing or invalid required field: name"}, 400);}if (!sampleRate || typeof sampleRate !== 'number' || sampleRate <= 0) {return c.json({error: "Missing or invalid required field: sampleRate"}, 400);}// Length validationif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}if (timezoneId && timezoneId.length > 40) {return c.json({error: "Field 'timezoneId' must be 40 characters or less"}, 400);}// Validate recording pattern if providedif (recordingPattern) {if (typeof recordingPattern !== 'object' || recordingPattern === null) {return c.json({error: "Field 'recordingPattern' must be an object"}, 400);}const { recordS, sleepS } = recordingPattern;if (typeof recordS !== 'number' || recordS <= 0 || !Number.isInteger(recordS)) {return c.json({error: "Field 'recordingPattern.recordS' must be a positive integer"}, 400);}if (typeof sleepS !== 'number' || sleepS <= 0 || !Number.isInteger(sleepS)) {return c.json({error: "Field 'recordingPattern.sleepS' must be a positive integer"}, 400);}}// Check if user has EDIT permission on the datasetconst hasPermission = await checkUserPermission(db, userId, datasetId.trim(), 'EDIT');if (!hasPermission) {return c.json({error: "You don't have permission to create clusters in this dataset"}, 403);}// Create cluster with optional recording pattern in transactionconst result = await db.transaction(async (tx) => {const now = new Date();let cyclicRecordingPatternId = null;// Create cyclic recording pattern if providedif (recordingPattern) {const patternId = nanoid(12);const [createdPattern] = await tx.insert(cyclicRecordingPattern).values({id: patternId,recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,}).returning({id: cyclicRecordingPattern.id,});cyclicRecordingPatternId = createdPattern.id;}// Create cluster objectconst newCluster = {id: id.trim(),datasetId: datasetId.trim(),locationId: locationId.trim(),name: name.trim(),description: description?.trim() || null,timezoneId: timezoneId?.trim() || null,cyclicRecordingPatternId: cyclicRecordingPatternId,sampleRate: sampleRate,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,};// Insert the clusterconst [createdCluster] = await tx.insert(cluster).values(newCluster).returning({id: cluster.id,datasetId: cluster.datasetId,locationId: cluster.locationId,name: cluster.name,description: cluster.description,timezoneId: cluster.timezoneId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId,sampleRate: cluster.sampleRate,createdAt: cluster.createdAt,createdBy: cluster.createdBy,lastModified: cluster.lastModified,modifiedBy: cluster.modifiedBy,active: cluster.active,});// If recording pattern was created, include it in the responseconst recordingPatternData = recordingPattern ? {recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS} : null;return {...createdCluster,recordingPattern: recordingPatternData};});console.log("Created cluster:", result.id, "for location:", locationId, "by user:", userId);return c.json({data: result}, 201);} catch (error) {console.error("Error creating cluster:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A cluster with this ID already exists"}, 400);}return c.json({error: "Failed to create cluster",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing cluster** @route PUT /api/clusters/:id* @authentication Required* @param {string} id - Cluster ID in URL path* @param {Object} body - Cluster data to update (name, description, timezoneId, sampleRate, active, recordingPattern)* - recordingPattern?: Object | null (optional) - { recordS: number, sleepS: number } or null to remove* @returns {Object} Response containing:* - data: Updated cluster object with recordingPattern if present* @description Updates an existing cluster. Requires EDIT permission on the dataset.* recordingPattern can be provided to add/update or set to null to remove recording pattern.*/app.put("/api/clusters/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get cluster ID from URL parametersconst clusterId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { name, description, timezoneId, sampleRate, active, recordingPattern } = body;// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if cluster exists and get its dataset ID and current recording patternconst existingCluster = await db.select({id: cluster.id,datasetId: cluster.datasetId,active: cluster.active,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId}).from(cluster).where(eq(cluster.id, clusterId)).limit(1);if (existingCluster.length === 0) {return c.json({error: "Cluster not found"}, 404);}const clusterRecord = existingCluster[0];// Check if user has EDIT permission on the datasetconst hasEditPermission = await checkUserPermission(db, userId, clusterRecord.datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to edit this cluster"}, 403);}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add name if providedif (name !== undefined) {if (typeof name !== 'string' || name.trim().length === 0) {return c.json({error: "Field 'name' must be a non-empty string"}, 400);}if (name.length > 140) {return c.json({error: "Field 'name' must be 140 characters or less"}, 400);}updateData.name = name.trim();}// Validate and add description if providedif (description !== undefined) {if (description !== null && typeof description !== 'string') {return c.json({error: "Field 'description' must be a string or null"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}updateData.description = description?.trim() || null;}// Validate and add timezoneId if providedif (timezoneId !== undefined) {if (timezoneId !== null && typeof timezoneId !== 'string') {return c.json({error: "Field 'timezoneId' must be a string or null"}, 400);}if (timezoneId && timezoneId.length > 40) {return c.json({error: "Field 'timezoneId' must be 40 characters or less"}, 400);}updateData.timezoneId = timezoneId?.trim() || null;}// Validate and add sampleRate if providedif (sampleRate !== undefined) {if (typeof sampleRate !== 'number' || sampleRate <= 0) {return c.json({error: "Field 'sampleRate' must be a positive number"}, 400);}updateData.sampleRate = sampleRate;}// Validate recording pattern if providedif (recordingPattern !== undefined) {if (recordingPattern !== null && typeof recordingPattern !== 'object') {return c.json({error: "Field 'recordingPattern' must be an object or null"}, 400);}if (recordingPattern) {const { recordS, sleepS } = recordingPattern;if (typeof recordS !== 'number' || recordS <= 0 || !Number.isInteger(recordS)) {return c.json({error: "Field 'recordingPattern.recordS' must be a positive integer"}, 400);}if (typeof sleepS !== 'number' || sleepS <= 0 || !Number.isInteger(sleepS)) {return c.json({error: "Field 'recordingPattern.sleepS' must be a positive integer"}, 400);}}}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;}// Update cluster with optional recording pattern changes in transactionconst result = await db.transaction(async (tx) => {const currentPatternId = clusterRecord.cyclicRecordingPatternId;// Handle recording pattern updatesif (recordingPattern !== undefined) {if (recordingPattern === null) {// Remove recording patternif (currentPatternId) {await tx.delete(cyclicRecordingPattern).where(eq(cyclicRecordingPattern.id, currentPatternId));updateData.cyclicRecordingPatternId = null;}} else {// Add or update recording patternif (currentPatternId) {// Update existing patternawait tx.update(cyclicRecordingPattern).set({recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS,lastModified: new Date(),modifiedBy: userId,}).where(eq(cyclicRecordingPattern.id, currentPatternId));} else {// Create new patternconst patternId = nanoid(12);await tx.insert(cyclicRecordingPattern).values({id: patternId,recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS,createdBy: userId,createdAt: new Date(),lastModified: new Date(),modifiedBy: userId,});updateData.cyclicRecordingPatternId = patternId;}}}// Update the clusterconst [updatedCluster] = await tx.update(cluster).set(updateData).where(eq(cluster.id, clusterId)).returning({id: cluster.id,datasetId: cluster.datasetId,locationId: cluster.locationId,name: cluster.name,description: cluster.description,timezoneId: cluster.timezoneId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId,sampleRate: cluster.sampleRate,createdAt: cluster.createdAt,createdBy: cluster.createdBy,lastModified: cluster.lastModified,modifiedBy: cluster.modifiedBy,active: cluster.active,});// Include recording pattern data in response if it existslet recordingPatternData = null;if (updatedCluster.cyclicRecordingPatternId) {if (recordingPattern !== undefined && recordingPattern !== null) {// Use the provided pattern datarecordingPatternData = {recordS: recordingPattern.recordS,sleepS: recordingPattern.sleepS};} else {// Fetch existing pattern dataconst existingPattern = await tx.select({recordS: cyclicRecordingPattern.recordS,sleepS: cyclicRecordingPattern.sleepS}).from(cyclicRecordingPattern).where(eq(cyclicRecordingPattern.id, updatedCluster.cyclicRecordingPatternId)).limit(1);if (existingPattern.length > 0) {recordingPatternData = existingPattern[0];}}}return {...updatedCluster,recordingPattern: recordingPatternData};});if (!result) {return c.json({error: "Failed to update cluster"}, 500);}console.log("Updated cluster:", result.id, "by user:", userId);return c.json({data: result});} catch (error) {console.error("Error updating cluster:", error);return c.json({error: "Failed to update cluster",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to delete a cluster** @route DELETE /api/clusters/:id* @authentication Required* @param {string} id - Cluster ID in URL path* @returns {Object} Response containing:* - message: Success message* @description Soft deletes a cluster (sets active=false) and hard deletes its associated cyclic recording pattern if present.* Requires EDIT permission on the dataset.*/app.delete("/api/clusters/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get cluster ID from URL parametersconst clusterId = c.req.param("id");// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if cluster exists and get its dataset ID and recording patternconst existingCluster = await db.select({id: cluster.id,datasetId: cluster.datasetId,cyclicRecordingPatternId: cluster.cyclicRecordingPatternId}).from(cluster).where(eq(cluster.id, clusterId)).limit(1);if (existingCluster.length === 0) {return c.json({error: "Cluster not found"}, 404);}const clusterRecord = existingCluster[0];// Check if user has EDIT permission on the datasetconst hasEditPermission = await checkUserPermission(db, userId, clusterRecord.datasetId, 'EDIT');if (!hasEditPermission) {return c.json({error: "You don't have permission to delete this cluster"}, 403);}// Soft delete cluster and hard delete associated recording pattern in transactionawait db.transaction(async (tx) => {// Soft delete the cluster (set active = false)await tx.update(cluster).set({active: false,lastModified: new Date(),modifiedBy: userId}).where(eq(cluster.id, clusterId));// Hard delete associated cyclic recording pattern if it exists (no active field in this table)if (clusterRecord.cyclicRecordingPatternId) {await tx.delete(cyclicRecordingPattern).where(eq(cyclicRecordingPattern.id, clusterRecord.cyclicRecordingPatternId));}});console.log("Soft deleted cluster:", clusterId, "and hard deleted associated recording pattern by user:", userId);
return c.json({message: "Cluster deleted successfully"});} catch (error) {console.error("Error deleting cluster:", error);return c.json({error: "Failed to delete cluster",details: error instanceof Error ? error.message : String(error),},500);}});
// Register route modulesapp.route("/api/datasets", datasets);app.route("/api/locations", locations);app.route("/api/clusters", clusters);app.route("/api/files", files);app.route("/api/species", species);app.route("/api/call-types", callTypes);app.route("/api/ebird", ebird);app.route("/api/selection", selections);
* Protected API route to fetch audio files for a specific cluster** @route GET /api/files* @authentication Required* @param {string} clusterId - Required query parameter specifying the cluster to fetch files from* @param {number} [page=1] - Optional page number for pagination (starts at 1)* @param {number} [pageSize=100] - Optional page size (10-500, defaults to 100)* @param {string} [solarNight] - Optional filter for files recorded during solar night ('true'/'false')* @param {string} [civilNight] - Optional filter for files recorded during civil night ('true'/'false')* @param {string} [speciesId] - Optional filter for files with selections labeled with specific species* @returns {Object} Response containing:* - data: Array of file objects with metadata, mothMetadata, and species information* - pagination: Object with pagination metadata* - filters: Object showing the filters that were applied* @error 400 - If clusterId is missing or page is invalid* @description Returns audio files for the specified cluster with comprehensive metadata:* - Basic file information (name, path, timestamp, duration, etc.)* - File metadata (JSON format)* - Recording device metadata (gain, battery voltage, temperature)* - Species found in each file** When speciesId is provided, only returns files that have at least one* selection labeled with the specified species.*/app.get("/api/files", authenticate, async (c) => {try {// Get query parametersconst clusterId = c.req.query("clusterId");const page = parseInt(c.req.query("page") || "1", 10);const pageSize = parseInt(c.req.query("pageSize") || "100", 10);const solarNight = c.req.query("solarNight");const civilNight = c.req.query("civilNight");const speciesId = c.req.query("speciesId"); // Optional filter for species// Validate parametersif (!clusterId) {console.log("Missing clusterId in request");return c.json({error: "Missing required query parameter: clusterId"}, 400);}// Validate and limit page size to prevent excessive queriesconst limitedPageSize = Math.min(Math.max(pageSize, 10), 500);const offset = (page - 1) * limitedPageSize;// Validate page numberif (page < 1) {console.log("Invalid page number in request:", page);return c.json({error: "Invalid page parameter: must be greater than 0"}, 400);}// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Build filter conditionslet whereConditions = sqlExpr`${file.clusterId} = ${clusterId} AND ${file.active} = true`;// Add filters for solarNight if specifiedif (solarNight === 'true') {whereConditions = sqlExpr`${whereConditions} AND ${file.maybeSolarNight} = true`;} else if (solarNight === 'false') {whereConditions = sqlExpr`${whereConditions} AND (${file.maybeSolarNight} = false OR ${file.maybeSolarNight} IS NULL)`;}// Add filters for civilNight if specifiedif (civilNight === 'true') {whereConditions = sqlExpr`${whereConditions} AND ${file.maybeCivilNight} = true`;} else if (civilNight === 'false') {whereConditions = sqlExpr`${whereConditions} AND (${file.maybeCivilNight} = false OR ${file.maybeCivilNight} IS NULL)`;}// First, get the total count of files for pagination metadatalet countResult;if (speciesId) {// Count only files that have at least one selection labeled with the specified speciescountResult = await db.select({count: sqlExpr<number>`COUNT(DISTINCT ${file.id})`}).from(file).innerJoin(selection, eq(selection.fileId, file.id)).innerJoin(label, eq(label.selectionId, selection.id)).where(sqlExpr`${whereConditions} AND ${label.speciesId} = ${speciesId} AND ${label.active} = true`);} else {// Standard count without species filtercountResult = await db.select({count: sqlExpr<number>`COUNT(1)`}).from(file).where(whereConditions);}const totalFiles = Number(countResult[0].count);const totalPages = Math.ceil(totalFiles / limitedPageSize);// Query files for the specified cluster with paginationlet filesResult;if (speciesId) {// Get only files that have at least one selection labeled with the specified speciesfilesResult = await db.select({id: file.id,fileName: file.fileName,path: file.path,timestampLocal: file.timestampLocal,duration: file.duration,sampleRate: file.sampleRate,locationId: file.locationId,description: file.description,maybeSolarNight: file.maybeSolarNight,maybeCivilNight: file.maybeCivilNight,moonPhase: file.moonPhase,}).from(file).innerJoin(selection, eq(selection.fileId, file.id)).innerJoin(label, eq(label.selectionId, selection.id)).where(sqlExpr`${whereConditions} AND ${label.speciesId} = ${speciesId} AND ${label.active} = true`).orderBy(file.timestampLocal).groupBy(file.id, file.fileName, file.path, file.timestampLocal, file.duration,file.sampleRate, file.locationId, file.description, file.maybeSolarNight,file.maybeCivilNight, file.moonPhase).limit(limitedPageSize).offset(offset);} else {// Standard query without species filterfilesResult = await db.select({id: file.id,fileName: file.fileName,path: file.path,timestampLocal: file.timestampLocal,duration: file.duration,sampleRate: file.sampleRate,locationId: file.locationId,description: file.description,maybeSolarNight: file.maybeSolarNight,maybeCivilNight: file.maybeCivilNight,moonPhase: file.moonPhase,}).from(file).where(whereConditions).orderBy(file.timestampLocal).limit(limitedPageSize).offset(offset);}// Early return if no files foundif (filesResult.length === 0) {return c.json({data: [],pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,speciesId: speciesId || null,}});}// Get all file IDs for metadata queriesconst fileIds = filesResult.map(f => f.id);// Safety check - if no files found, return empty resultsif (fileIds.length === 0) {return c.json({data: [],pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,speciesId: speciesId || null,}});}// Convert array to proper SQL format for IN clauseconst fileIdsQuoted = fileIds.map(id => `'${id}'`).join(',');// Execute metadata queries in parallel using Promise.allconst [metadataResults, mothMetadataResults, speciesResults] = await Promise.all([// Fetch file metadatadb.select({fileId: fileMetadata.fileId,json: fileMetadata.json}).from(fileMetadata).where(sqlExpr`${fileMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),// Fetch moth metadatadb.select({fileId: mothMetadata.fileId,gain: mothMetadata.gain,batteryV: mothMetadata.batteryV,tempC: mothMetadata.tempC}).from(mothMetadata).where(sqlExpr`${mothMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),// Fetch species data via selections and labelsdb.select({fileId: selection.fileId,speciesId: species.id,speciesLabel: species.label,ebirdCode: species.ebirdCode,description: species.description}).from(selection).innerJoin(label, eq(label.selectionId, selection.id)).innerJoin(species, eq(species.id, label.speciesId)).where(speciesId? sqlExpr`${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)}) AND ${label.active} = true AND ${label.speciesId} = ${speciesId}`: sqlExpr`${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)}) AND ${label.active} = true`)]);// Process metadata resultsconst metadataMap = metadataResults.reduce((acc, item) => {let processedJson = item.json;try {if (typeof item.json === 'string' && (item.json.startsWith('{') || item.json.startsWith('['))) {processedJson = JSON.parse(item.json);} else if (typeof item.json === 'string' && item.json.includes('\\"')) {const unescaped = item.json.replace(/\\"/g, '"');processedJson = JSON.parse(unescaped);}} catch (e) {console.error("Error processing metadata JSON:", e);processedJson = item.json;}acc[item.fileId] = processedJson;return acc;}, {} as Record<string, unknown>);// Process moth metadataconst mothMetadataMap = mothMetadataResults.reduce((acc, item) => {acc[item.fileId] = {gain: item.gain,batteryV: item.batteryV !== null ? Number(item.batteryV) : null,tempC: item.tempC !== null ? Number(item.tempC) : null};return acc;}, {} as Record<string, {gain: string | null;batteryV: number | null;tempC: number | null;}>);// Process species dataconst speciesMap = speciesResults.reduce((acc, item) => {if (!acc[item.fileId]) {acc[item.fileId] = [];}// Deduplicate species entriesconst existingSpecies = acc[item.fileId].find(s => s.id === item.speciesId);if (!existingSpecies) {acc[item.fileId].push({id: item.speciesId,label: item.speciesLabel,ebirdCode: item.ebirdCode,description: item.description});}return acc;}, {} as Record<string, Array<{id: string;label: string;ebirdCode: string | null;description: string | null;}>>);// Combine file data with metadataconst files = filesResult.map(fileData => ({...fileData,metadata: metadataMap[fileData.id] || null,mothMetadata: mothMetadataMap[fileData.id] || null,species: speciesMap[fileData.id] || []}));// Return paginated results with metadata and filtersreturn c.json({data: files,pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,speciesId: speciesId || null,}});} catch (error) {console.error("Error fetching files:", error);return c.json({error: "Failed to fetch files",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// SPECIES// ============================================================================/*** Protected API route to fetch species and call types for a dataset** @route GET /api/species* @authentication Required* @param {string} datasetId - Required query parameter specifying the dataset to fetch species from* @returns {Object} Response containing:* - data: Array of species objects, each with an array of associated call types* @error 400 - If datasetId is missing* @description Returns species associated with the specified dataset along with their call types* Each species object includes:* - id, label, ebirdCode, description* - callTypes: Array of call type objects with id and label** Uses an efficient JOIN approach to fetch data in a single query to avoid N+1 query problems* Results are transformed to provide a nested structure with call types inside species objects*/app.get("/api/species", authenticate, async (c) => {try {// Get query parameter for datasetIdconst datasetId = c.req.query("datasetId");// Validate parametersif (!datasetId) {return c.json({error: "Missing required query parameter: datasetId"}, 400);}// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Use JOIN approach to get species and call types in a single query// This eliminates the N+1 query problem and reduces round trips to the databaseconst queryResults = await db.select({id: species.id,label: species.label,ebirdCode: species.ebirdCode,description: species.description,callTypeId: callType.id,callTypeLabel: callType.label}).from(species).innerJoin(speciesDataset,eq(species.id, speciesDataset.speciesId)).leftJoin(callType,sqlExpr`${callType.speciesId} = ${species.id} AND ${callType.active} = true`).where(eq(speciesDataset.datasetId, datasetId)).orderBy(species.label);// If no results are found, return an empty arrayif (queryResults.length === 0) {return c.json({data: []});}// Transform the flat results into nested objects// Using a Map for better performance with large datasetsconst speciesMap = new Map();queryResults.forEach(row => {if (!speciesMap.has(row.id)) {// Create a new species entry if not already in the mapspeciesMap.set(row.id, {id: row.id,label: row.label,ebirdCode: row.ebirdCode,description: row.description,callTypes: []});}// Add the call type if it exists and isn't already in the arrayif (row.callTypeId) {const species = speciesMap.get(row.id);const existingCallType = species.callTypes.find((ct: { id: string }) => ct.id === row.callTypeId);if (!existingCallType) {species.callTypes.push({id: row.callTypeId,label: row.callTypeLabel});}}});// Convert map to arrayconst enrichedSpecies = Array.from(speciesMap.values());// Return the enriched species datareturn c.json({data: enrichedSpecies});} catch (error) {console.error("Error fetching species:", error);return c.json({error: "Failed to fetch species",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to create a new species** @route POST /api/species* @authentication Required* @param {Object} body - Species data including datasetId, label, description, ebirdCode, callTypes* @returns {Object} Response containing:* - data: Created species object with call types* @description Creates a new species with optional eBird integration and call types.* Requires EDIT permission on the dataset. Creates entries in species, species_dataset,* and optionally call_type tables in a single transaction.*/app.post("/api/species", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Parse and validate request bodyconst body = await c.req.json();const { id, datasetId, label, description, ebirdCode, callTypes } = body;// Field validationif (!id || typeof id !== 'string') {return c.json({error: "Missing or invalid required field: id"}, 400);}if (!datasetId || typeof datasetId !== 'string') {return c.json({error: "Missing or invalid required field: datasetId"}, 400);}if (!label || typeof label !== 'string' || label.trim().length === 0) {return c.json({error: "Missing or invalid required field: label"}, 400);}// Length validationif (id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (label.length > 100) {return c.json({error: "Field 'label' must be 100 characters or less"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}if (ebirdCode && ebirdCode.length > 12) {return c.json({error: "Field 'ebirdCode' must be 12 characters or less"}, 400);}// Validate call types if providedif (callTypes && (!Array.isArray(callTypes) || callTypes.some(ct => !ct.label || ct.label.length > 100))) {return c.json({error: "Field 'callTypes' must be an array of objects with 'label' field (max 100 chars)"}, 400);}// Check if user has EDIT permission on the datasetconst hasPermission = await checkUserPermission(db, userId, datasetId.trim(), 'EDIT');if (!hasPermission) {return c.json({error: "You don't have permission to create species in this dataset"}, 403);}// Start transaction for atomic creationconst result = await db.transaction(async (tx) => {const now = new Date();// Create species recordconst newSpecies = {id: id.trim(),label: label.trim(),ebirdCode: ebirdCode?.trim() || null,taxonomyVersion: ebirdCode ? '2024' : null,description: description?.trim() || null,createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,owner: userId,active: true,};const [createdSpecies] = await tx.insert(species).values(newSpecies).returning({id: species.id,label: species.label,ebirdCode: species.ebirdCode,taxonomyVersion: species.taxonomyVersion,description: species.description,createdAt: species.createdAt,createdBy: species.createdBy,lastModified: species.lastModified,modifiedBy: species.modifiedBy,active: species.active,});// Create species-dataset associationawait tx.insert(speciesDataset).values({speciesId: id.trim(),datasetId: datasetId.trim(),createdAt: now,createdBy: userId,lastModified: now,modifiedBy: userId,});// Create call types if providedconst createdCallTypes = [];if (callTypes && callTypes.length > 0) {for (const callTypeData of callTypes) {const callTypeId = nanoid(12);const [createdCallType] = await tx.insert(callType).values({id: callTypeId,speciesId: id.trim(),label: callTypeData.label.trim(),createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,}).returning({id: callType.id,label: callType.label,});createdCallTypes.push(createdCallType);}}return {...createdSpecies,callTypes: createdCallTypes};});console.log("Created species:", result.id, "for dataset:", datasetId, "by user:", userId);return c.json({data: result}, 201);} catch (error) {console.error("Error creating species:", error);// Handle unique constraint violationsif (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A species with this ID already exists"}, 400);}return c.json({error: "Failed to create species",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing species** @route PUT /api/species/:id* @authentication Required* @param {string} id - Species ID in URL path* @param {Object} body - Species data to update (label, description, ebirdCode, active)* @returns {Object} Response containing:* - data: Updated species object with call types* @description Updates an existing species. Requires EDIT permission on the dataset.* When soft deleting (active=false), also soft deletes all related call types.*/app.put("/api/species/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get species ID from URL parametersconst speciesId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { label, description, ebirdCode, active } = body;// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if species exists and get its associated datasetsconst existingSpecies = await db.select({id: species.id,active: species.active}).from(species).where(eq(species.id, speciesId)).limit(1);if (existingSpecies.length === 0) {return c.json({error: "Species not found"}, 404);}// Get datasets associated with this species to check permissionsconst associatedDatasets = await db.select({ datasetId: speciesDataset.datasetId }).from(speciesDataset).where(eq(speciesDataset.speciesId, speciesId));if (associatedDatasets.length === 0) {return c.json({error: "Species not associated with any dataset"}, 400);}// Check if user has EDIT permission on at least one associated datasetlet hasPermission = false;for (const assoc of associatedDatasets) {if (await checkUserPermission(db, userId, assoc.datasetId, 'EDIT')) {hasPermission = true;break;}}if (!hasPermission) {return c.json({error: "You don't have permission to edit this species"}, 403);}// Start transaction for atomic updateconst result = await db.transaction(async (tx) => {// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add label if providedif (label !== undefined) {if (typeof label !== 'string' || label.trim().length === 0) {return c.json({error: "Field 'label' must be a non-empty string"}, 400);}if (label.length > 100) {return c.json({error: "Field 'label' must be 100 characters or less"}, 400);}updateData.label = label.trim();}// Validate and add description if providedif (description !== undefined) {if (description !== null && typeof description !== 'string') {return c.json({error: "Field 'description' must be a string or null"}, 400);}if (description && description.length > 255) {return c.json({error: "Field 'description' must be 255 characters or less"}, 400);}updateData.description = description?.trim() || null;}// Validate and add eBird code if providedif (ebirdCode !== undefined) {if (ebirdCode !== null && typeof ebirdCode !== 'string') {return c.json({error: "Field 'ebirdCode' must be a string or null"}, 400);}if (ebirdCode && ebirdCode.length > 12) {return c.json({error: "Field 'ebirdCode' must be 12 characters or less"}, 400);}updateData.ebirdCode = ebirdCode?.trim() || null;updateData.taxonomyVersion = ebirdCode ? '2024' : null;}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;// If soft deleting species, also soft delete related recordsif (!active) {// Soft delete all call types for this speciesawait tx.update(callType).set({active: false,lastModified: new Date(),modifiedBy: userId}).where(eq(callType.speciesId, speciesId));// Delete all species-dataset associations for this species (hard delete since no active field)await tx.delete(speciesDataset).where(eq(speciesDataset.speciesId, speciesId));}}// Update the speciesconst [updatedSpecies] = await tx.update(species).set(updateData).where(eq(species.id, speciesId)).returning({id: species.id,label: species.label,ebirdCode: species.ebirdCode,taxonomyVersion: species.taxonomyVersion,description: species.description,createdAt: species.createdAt,createdBy: species.createdBy,lastModified: species.lastModified,modifiedBy: species.modifiedBy,active: species.active,});// Get updated call typesconst callTypes = await tx.select({id: callType.id,label: callType.label,}).from(callType).where(and(eq(callType.speciesId, speciesId), eq(callType.active, true)));return {...updatedSpecies,callTypes};});console.log("Updated species:", speciesId, "by user:", userId);return c.json({data: result});} catch (error) {console.error("Error updating species:", error);return c.json({error: "Failed to update species",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// CALL-TYPES// ============================================================================/*** Protected API route to create a new call type** @route POST /api/call-types* @authentication Required* @param {Object} body - Call type data including id, speciesId, label* @returns {Object} Response containing:* - data: Created call type object* @description Creates a new call type for a species. Requires EDIT permission on any dataset containing the species.*/app.post("/api/call-types", authenticate, async (c) => {try {// Get user ID from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Parse and validate request bodyconst body = await c.req.json();const { id, speciesId, label } = body;// Field validationif (!id || typeof id !== 'string' || id.length !== 12) {return c.json({error: "Field 'id' must be exactly 12 characters (nanoid)"}, 400);}if (!speciesId || typeof speciesId !== 'string') {return c.json({error: "Missing or invalid required field: speciesId"}, 400);}if (!label || typeof label !== 'string' || label.trim().length === 0) {return c.json({error: "Missing or invalid required field: label"}, 400);}if (label.length > 100) {return c.json({error: "Field 'label' must be 100 characters or less"}, 400);}// Check if species exists and get its associated datasetsconst associatedDatasets = await db.select({ datasetId: speciesDataset.datasetId }).from(speciesDataset).where(eq(speciesDataset.speciesId, speciesId));if (associatedDatasets.length === 0) {return c.json({error: "Species not found or not associated with any dataset"}, 404);}// Check if user has EDIT permission on at least one associated datasetlet hasPermission = false;for (const assoc of associatedDatasets) {if (await checkUserPermission(db, userId, assoc.datasetId, 'EDIT')) {hasPermission = true;break;}}if (!hasPermission) {return c.json({error: "You don't have permission to create call types for this species"}, 403);}// Create call typeconst now = new Date();const [createdCallType] = await db.insert(callType).values({id: id.trim(),speciesId: speciesId.trim(),label: label.trim(),createdBy: userId,createdAt: now,lastModified: now,modifiedBy: userId,active: true,}).returning({id: callType.id,speciesId: callType.speciesId,label: callType.label,createdAt: callType.createdAt,active: callType.active,});console.log("Created call type:", createdCallType.id, "for species:", speciesId, "by user:", userId);return c.json({data: createdCallType}, 201);} catch (error) {console.error("Error creating call type:", error);if (error instanceof Error && error.message.includes('duplicate key')) {return c.json({error: "A call type with this ID already exists"}, 400);}return c.json({error: "Failed to create call type",details: error instanceof Error ? error.message : String(error),},500);}});/*** Protected API route to update an existing call type** @route PUT /api/call-types/:id* @authentication Required* @param {string} id - Call type ID in URL path* @param {Object} body - Call type data to update (label, active)* @returns {Object} Response containing:* - data: Updated call type object* @description Updates an existing call type. Requires EDIT permission on any dataset containing the parent species.*/app.put("/api/call-types/:id", authenticate, async (c) => {try {// Get user from JWTconst jwtPayload = (c as unknown as { jwtPayload: JWTPayload }).jwtPayload;const userId = jwtPayload.sub;// Get call type ID from URL parametersconst callTypeId = c.req.param("id");// Parse request bodyconst body = await c.req.json();const { label, active } = body;// Connect to databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Check if call type exists and get its speciesconst existingCallType = await db.select({id: callType.id,speciesId: callType.speciesId,active: callType.active}).from(callType).where(eq(callType.id, callTypeId)).limit(1);if (existingCallType.length === 0) {return c.json({error: "Call type not found"}, 404);}const speciesId = existingCallType[0].speciesId;// Get datasets associated with the species to check permissionsconst associatedDatasets = await db.select({ datasetId: speciesDataset.datasetId }).from(speciesDataset).where(eq(speciesDataset.speciesId, speciesId));// Check if user has EDIT permission on at least one associated datasetlet hasPermission = false;for (const assoc of associatedDatasets) {if (await checkUserPermission(db, userId, assoc.datasetId, 'EDIT')) {hasPermission = true;break;}}if (!hasPermission) {return c.json({error: "You don't have permission to edit this call type"}, 403);}// Build update object with only provided fieldsconst updateData: Record<string, unknown> = {lastModified: new Date(),modifiedBy: userId,};// Validate and add label if providedif (label !== undefined) {if (typeof label !== 'string' || label.trim().length === 0) {return c.json({error: "Field 'label' must be a non-empty string"}, 400);}if (label.length > 100) {return c.json({error: "Field 'label' must be 100 characters or less"}, 400);}updateData.label = label.trim();}// Add active status if provided (for soft delete)if (active !== undefined) {if (typeof active !== 'boolean') {return c.json({error: "Field 'active' must be a boolean"}, 400);}updateData.active = active;}// Update the call typeconst [updatedCallType] = await db.update(callType).set(updateData).where(eq(callType.id, callTypeId)).returning({id: callType.id,speciesId: callType.speciesId,label: callType.label,createdAt: callType.createdAt,lastModified: callType.lastModified,active: callType.active,});console.log("Updated call type:", callTypeId, "by user:", userId);return c.json({data: updatedCallType});} catch (error) {console.error("Error updating call type:", error);return c.json({error: "Failed to update call type",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// EBIRD SEARCH// ============================================================================/*** Protected API route to search eBird taxonomy** @route GET /api/ebird/search* @authentication Required* @param {string} q - Search query (common name, scientific name, family, or species code)* @returns {Object} Response containing:* - data: Array of matching eBird taxonomy entries* @description Searches the eBird taxonomy v2024 materialized view for species matching the query.* Searches across primary_com_name, sci_name, family, and species_code fields.*/app.get("/api/ebird/search", authenticate, async (c) => {try {const query = c.req.query("q");if (!query || query.trim().length === 0) {return c.json({error: "Missing or empty query parameter 'q'"}, 400);}if (query.length < 2) {return c.json({error: "Query must be at least 2 characters long"}, 400);}// Database connectionconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Search across multiple fields with case-insensitive partial matchingconst searchTerm = `%${query.trim().toLowerCase()}%`;// Use raw SQL query for materialized view until Drizzle typing issue is resolvedconst results = await db.execute(sqlExpr`SELECTid,species_code as "speciesCode",primary_com_name as "primaryComName",sci_name as "sciName",bird_order as "birdOrder",familyFROM ebird_taxonomy_v2024WHERELOWER(primary_com_name) LIKE ${searchTerm} ORLOWER(sci_name) LIKE ${searchTerm} ORLOWER(family) LIKE ${searchTerm} ORLOWER(species_code) LIKE ${searchTerm}ORDER BY primary_com_nameLIMIT 20`);return c.json({data: results.rows || results});} catch (error) {console.error("Error searching eBird taxonomy:", error);return c.json({error: "Failed to search eBird taxonomy",details: error instanceof Error ? error.message : String(error),},500);}});// ============================================================================// SELECTION// ============================================================================/*** Protected API route to fetch files with selections for a specific dataset and species** @route GET /api/selection* @authentication Required* @param {string} datasetId - Required query parameter specifying the dataset* @param {string} speciesId - Required query parameter specifying the species to filter by* @param {number} [page=1] - Optional page number for pagination (starts at 1)* @param {number} [pageSize=100] - Optional page size (10-500, defaults to 100)* @param {string} [solarNight] - Optional filter for files recorded during solar night ('true'/'false')* @param {string} [civilNight] - Optional filter for files recorded during civil night ('true'/'false')* @returns {Object} Response containing:* - data: Array of file objects with metadata, mothMetadata, and species information* - pagination: Object with pagination metadata* - filters: Object showing the filters that were applied* @error 400 - If datasetId or speciesId is missing or page is invalid* @description Returns files that contain selections labeled with the specified species* Designed for cross-cluster searches within a dataset** Each file object includes:* - Basic file information (name, path, timestamp, duration, etc.)* - File metadata (JSON format)* - Recording device metadata (gain, battery voltage, temperature)* - Species information** Uses efficient query optimization with:* - Parallel Promise.all for metadata queries* - Proper SQL JOINs to avoid N+1 query problems* - Data transformation for optimal client-side consumption*/app.get("/api/selection", authenticate, async (c) => {try {// Get query parametersconst datasetId = c.req.query("datasetId");const speciesId = c.req.query("speciesId");const page = parseInt(c.req.query("page") || "1", 10);const pageSize = parseInt(c.req.query("pageSize") || "100", 10);const solarNight = c.req.query("solarNight");const civilNight = c.req.query("civilNight");// Validate parametersif (!datasetId) {return c.json({error: "Missing required query parameter: datasetId"}, 400);}if (!speciesId) {return c.json({error: "Missing required query parameter: speciesId"}, 400);}// Validate and limit page size to prevent excessive queriesconst limitedPageSize = Math.min(Math.max(pageSize, 10), 500);const offset = (page - 1) * limitedPageSize;// Validate page numberif (page < 1) {return c.json({error: "Invalid page parameter: must be greater than 0"}, 400);}// Connect to the databaseconst sql = neon(c.env.DATABASE_URL);const db = drizzle(sql);// Build base filter condition - active files with selections of the specified species// This forms the core of our WHERE clause for both querieslet baseCondition = sqlExpr`${file.active} = trueAND ${location.datasetId} = ${datasetId}AND ${label.speciesId} = ${speciesId}AND ${label.active} = true`;// Add filters for day/night if specifiedif (solarNight === 'true') {baseCondition = sqlExpr`${baseCondition} AND ${file.maybeSolarNight} = true`;} else if (solarNight === 'false') {baseCondition = sqlExpr`${baseCondition} AND (${file.maybeSolarNight} = false OR ${file.maybeSolarNight} IS NULL)`;}if (civilNight === 'true') {baseCondition = sqlExpr`${baseCondition} AND ${file.maybeCivilNight} = true`;} else if (civilNight === 'false') {baseCondition = sqlExpr`${baseCondition} AND (${file.maybeCivilNight} = false OR ${file.maybeCivilNight} IS NULL)`;}// Get total count for pagination using a more efficient COUNT(1)const countResult = await db.select({count: sqlExpr<number>`COUNT(DISTINCT ${file.id})`}).from(file).innerJoin(cluster, eq(file.clusterId, cluster.id)).innerJoin(location, eq(cluster.locationId, location.id)).innerJoin(selection, eq(selection.fileId, file.id)).innerJoin(label, eq(label.selectionId, selection.id)).where(baseCondition);const totalFiles = Number(countResult[0].count);const totalPages = Math.ceil(totalFiles / limitedPageSize);// Early return if there are no matching filesif (totalFiles === 0) {return c.json({data: [],pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: 0,totalItems: 0,hasNextPage: false,hasPreviousPage: false,},filters: {datasetId,speciesId,solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,}});}// Fetch files with paginationconst filesResult = await db.select({id: file.id,fileName: file.fileName,path: file.path,timestampLocal: file.timestampLocal,duration: file.duration,sampleRate: file.sampleRate,locationId: file.locationId,clusterId: file.clusterId,description: file.description,maybeSolarNight: file.maybeSolarNight,maybeCivilNight: file.maybeCivilNight,moonPhase: file.moonPhase,}).from(file).innerJoin(cluster, eq(file.clusterId, cluster.id)).innerJoin(location, eq(cluster.locationId, location.id)).innerJoin(selection, eq(selection.fileId, file.id)).innerJoin(label, eq(label.selectionId, selection.id)).where(baseCondition).orderBy(file.timestampLocal).groupBy(file.id, file.fileName, file.path, file.timestampLocal, file.duration,file.sampleRate, file.locationId, file.clusterId, file.description, file.maybeSolarNight,file.maybeCivilNight, file.moonPhase).limit(limitedPageSize).offset(offset);// Get all file IDs for metadata queriesconst fileIds = filesResult.map(f => f.id);// Safety check - shouldn't happen with our early return, but just in caseif (fileIds.length === 0) {return c.json({data: [],pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {datasetId,speciesId,solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,}});}// Convert file IDs to a properly formatted SQL stringconst fileIdsQuoted = fileIds.map(id => `'${id}'`).join(',');// Execute metadata queries in parallel using Promise.all for better performanceconst [metadataResults, mothMetadataResults, speciesResults] = await Promise.all([// Fetch file metadatadb.select({fileId: fileMetadata.fileId,json: fileMetadata.json}).from(fileMetadata).where(sqlExpr`${fileMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),// Fetch moth metadatadb.select({fileId: mothMetadata.fileId,gain: mothMetadata.gain,batteryV: mothMetadata.batteryV,tempC: mothMetadata.tempC}).from(mothMetadata).where(sqlExpr`${mothMetadata.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})`),// Fetch species data - pre-filtered to just get the relevant speciesdb.select({fileId: selection.fileId,speciesId: species.id,speciesLabel: species.label,ebirdCode: species.ebirdCode,description: species.description}).from(selection).innerJoin(label, eq(label.selectionId, selection.id)).innerJoin(species, eq(species.id, label.speciesId)).where(sqlExpr`${selection.fileId} IN (${sqlExpr.raw(fileIdsQuoted)})AND ${label.speciesId} = ${speciesId}AND ${label.active} = true`)]);// Process metadata results into maps for efficient lookupsconst metadataMap = metadataResults.reduce((acc, item) => {let processedJson = item.json;try {if (typeof item.json === 'string' && (item.json.startsWith('{') || item.json.startsWith('['))) {processedJson = JSON.parse(item.json);} else if (typeof item.json === 'string' && item.json.includes('\\"')) {const unescaped = item.json.replace(/\\"/g, '"');processedJson = JSON.parse(unescaped);}} catch (e) {console.error("Error processing metadata JSON:", e);processedJson = item.json;}acc[item.fileId] = processedJson;return acc;}, {} as Record<string, unknown>);// Process moth metadataconst mothMetadataMap = mothMetadataResults.reduce((acc, item) => {acc[item.fileId] = {gain: item.gain,batteryV: item.batteryV !== null ? Number(item.batteryV) : null,tempC: item.tempC !== null ? Number(item.tempC) : null};return acc;}, {} as Record<string, {gain: string | null;batteryV: number | null;tempC: number | null;}>);// Process species data with Map for better performanceconst speciesMap = new Map<string, Array<{id: string;label: string;ebirdCode: string | null;description: string | null;}>>();speciesResults.forEach(item => {if (!speciesMap.has(item.fileId)) {speciesMap.set(item.fileId, []);}// Get the current species array for this fileconst fileSpecies = speciesMap.get(item.fileId)!;// Check if we already have this species (deduplication)const existingSpeciesIndex = fileSpecies.findIndex(s => s.id === item.speciesId);if (existingSpeciesIndex === -1) {// Add species if it doesn't already exist for this filefileSpecies.push({id: item.speciesId,label: item.speciesLabel,ebirdCode: item.ebirdCode,description: item.description});}});// Combine file data with metadata in a single operationconst files = filesResult.map(fileData => ({...fileData,metadata: metadataMap[fileData.id] || null,mothMetadata: mothMetadataMap[fileData.id] || null,species: speciesMap.get(fileData.id) || []}));// Return paginated results with metadata and filtersreturn c.json({data: files,pagination: {currentPage: page,pageSize: limitedPageSize,totalPages: totalPages,totalItems: totalFiles,hasNextPage: page < totalPages,hasPreviousPage: page > 1,},filters: {datasetId,speciesId,solarNight: solarNight === 'true' ? true : solarNight === 'false' ? false : null,civilNight: civilNight === 'true' ? true : civilNight === 'false' ? false : null,}});} catch (error) {console.error("Error fetching files by dataset and species:", error);return c.json({error: "Failed to fetch files by dataset and species",details: error instanceof Error ? error.message : String(error),},500);}});/**
})})describe('Modular structure tests', () => {it('should test route file imports work correctly', async () => {// Test that the modular structure can be imported// This validates that the refactoring preserved the API structureconst { checkUserPermission } = await import('../../../worker/middleware/auth')const { createDatabase } = await import('../../../worker/utils/database')const { Env } = await import('../../../worker/types')expect(typeof checkUserPermission).toBe('function')expect(typeof createDatabase).toBe('function')expect(typeof Env).toBe('undefined') // Types don't exist at runtime})it('should validate authentication middleware exports', async () => {const authModule = await import('../../../worker/middleware/auth')expect(authModule.authenticate).toBeDefined()expect(authModule.checkUserPermission).toBeDefined()expect(typeof authModule.authenticate).toBe('function')expect(typeof authModule.checkUserPermission).toBe('function')
it('should validate route modules export Hono instances', async () => {const datasetsModule = await import('../../../worker/routes/datasets')const locationsModule = await import('../../../worker/routes/locations')expect(datasetsModule.default).toBeDefined()expect(locationsModule.default).toBeDefined()// Hono instances should have common methodsexpect(typeof datasetsModule.default.get).toBe('function')expect(typeof locationsModule.default.post).toBe('function')})