GE3VNRXLBCRRCW3R5CFGYKCAZE2WCQNTKTUW4BKV5GEN3NZKWXTAC // AviaNZMeta is the metadata element in a .data filetype AviaNZMeta struct {Operator string `json:"Operator"`Reviewer *string `json:"Reviewer,omitempty"`Duration float64 `json:"Duration"`}// AviaNZLabel represents a species label in a segmenttype AviaNZLabel struct {Species string `json:"species"`Certainty int `json:"certainty"`Filter string `json:"filter"`}// AviaNZSegment represents a detection segment [start, end, freq_low, freq_high, labels]type AviaNZSegment [5]any
package tools// AviaNZMeta is the metadata element in a .data filetype AviaNZMeta struct {Operator string `json:"Operator"`Reviewer *string `json:"Reviewer,omitempty"`Duration float64 `json:"Duration"`}// AviaNZLabel represents a species label in a segmenttype AviaNZLabel struct {Species string `json:"species"`Certainty int `json:"certainty"`Filter string `json:"filter"`}// AviaNZSegment represents a detection segment [start, end, freq_low, freq_high, labels]type AviaNZSegment [5]any
// JSONTime wraps time.Time and marshals as RFC3339 string.// Used as an embedded field in marshal-only structs to avoid// duplicating MarshalJSON for every DB type that has timestamps.type JSONTime time.Time// MarshalJSON implements json.Marshaler for JSONTime.func (t JSONTime) MarshalJSON() ([]byte, error) {return json.Marshal(time.Time(t).Format(time.RFC3339))}// jt converts a time.Time to JSONTime.func jt(t time.Time) JSONTime { return JSONTime(t) }
return json.Marshal(&struct {ID string `json:"id"`DatasetID string `json:"dataset_id"`Name string `json:"name"`Latitude float64 `json:"latitude"`Longitude float64 `json:"longitude"`Description *string `json:"description"`CreatedAt string `json:"created_at"`LastModified string `json:"last_modified"`Active bool `json:"active"`TimezoneID string `json:"timezone_id"`
return json.Marshal(struct {ID string `json:"id"`DatasetID string `json:"dataset_id"`Name string `json:"name"`Latitude float64 `json:"latitude"`Longitude float64 `json:"longitude"`Description *string `json:"description"`CreatedAt JSONTime `json:"created_at"`LastModified JSONTime `json:"last_modified"`Active bool `json:"active"`TimezoneID string `json:"timezone_id"`
return json.Marshal(&struct {ID string `json:"id"`DatasetID string `json:"dataset_id"`LocationID string `json:"location_id"`Name string `json:"name"`Description *string `json:"description"`CreatedAt string `json:"created_at"`LastModified string `json:"last_modified"`Active bool `json:"active"`CyclicRecordingPatternID *string `json:"cyclic_recording_pattern_id"`SampleRate int `json:"sample_rate"`
return json.Marshal(struct {ID string `json:"id"`DatasetID string `json:"dataset_id"`LocationID string `json:"location_id"`Name string `json:"name"`Description *string `json:"description"`CreatedAt JSONTime `json:"created_at"`LastModified JSONTime `json:"last_modified"`Active bool `json:"active"`CyclicRecordingPatternID *string `json:"cyclic_recording_pattern_id"`SampleRate int `json:"sample_rate"`
return json.Marshal(&struct {ID string `json:"id"`RecordS int `json:"record_s"`SleepS int `json:"sleep_s"`CreatedAt string `json:"created_at"`LastModified string `json:"last_modified"`Active bool `json:"active"`
return json.Marshal(struct {ID string `json:"id"`RecordS int `json:"record_s"`SleepS int `json:"sleep_s"`CreatedAt JSONTime `json:"created_at"`LastModified JSONTime `json:"last_modified"`Active bool `json:"active"`
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *record == 0 {missing = append(missing, "--record")}if *sleep == 0 {missing = append(missing, "--sleep")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath)requireNonZeroFlags(fs,struct {Name stringValue int}{"--record", *record},struct {Name stringValue int}{"--sleep", *sleep},)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *id == "" {missing = append(missing, "--id")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--id", *id)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--dataset")}if *name == "" {missing = append(missing, "--name")}if *lat == "" {missing = append(missing, "--lat")}if *lon == "" {missing = append(missing, "--lon")}if *tz == "" {missing = append(missing, "--timezone")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--dataset", *datasetID, "--name", *name, "--lat", *lat, "--lon", *lon, "--timezone", *tz)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *id == "" {missing = append(missing, "--id")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--id", *id)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--dataset")}if *csvPath == "" {missing = append(missing, "--csv")}if *logPath == "" {missing = append(missing, "--log")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--dataset", *datasetID, "--csv", *csvPath, "--log", *logPath)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--dataset")}if *locationID == "" {missing = append(missing, "--location")}if *clusterID == "" {missing = append(missing, "--cluster")}if *filePath == "" {missing = append(missing, "--file")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--dataset", *datasetID, "--location", *locationID, "--cluster", *clusterID, "--file", *filePath)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--dataset")}if *locationID == "" {missing = append(missing, "--location")}if *clusterID == "" {missing = append(missing, "--cluster")}if *folderPath == "" {missing = append(missing, "--folder")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--dataset", *datasetID, "--location", *locationID, "--cluster", *clusterID, "--folder", *folderPath)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--dataset")}if *locationID == "" {missing = append(missing, "--location")}if *clusterID == "" {missing = append(missing, "--cluster")}if *folderPath == "" {missing = append(missing, "--folder")}if *mappingPath == "" {missing = append(missing, "--mapping")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--dataset", *datasetID, "--location", *locationID, "--cluster", *clusterID, "--folder", *folderPath, "--mapping", *mappingPath)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--dataset")}if *folderPath == "" {missing = append(missing, "--folder")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--dataset", *datasetID, "--folder", *folderPath)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--id")}if *output == "" {missing = append(missing, "--output")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--id", *datasetID, "--output", *output)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *name == "" {missing = append(missing, "--name")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--name", *name)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *id == "" {missing = append(missing, "--id")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--id", *id)
// requireFlags checks that the given flag values are non-empty strings.// If any are empty, prints an error and exits with usage.// Each pair is (flagName, flagValue) — e.g. requireFlags(fs, "--db", *dbPath, "--id", *id)func requireFlags(fs *flag.FlagSet, pairs ...string) {var missing []stringfor i := 0; i < len(pairs); i += 2 {if pairs[i+1] == "" {missing = append(missing, pairs[i])}}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}}// requireNonZeroFlags checks that the given int flag values are non-zero.// Each pair is (flagName, flagValue) — e.g. requireNonZeroFlags(fs, "--record", *record)func requireNonZeroFlags(fs *flag.FlagSet, pairs ...struct {Name stringValue int}) {var missing []stringfor _, p := range pairs {if p.Value == 0 {missing = append(missing, p.Name)}}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}}
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--dataset")}if *locationID == "" {missing = append(missing, "--location")}if *name == "" {missing = append(missing, "--name")}if *sampleRate == "" {missing = append(missing, "--sample-rate")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--dataset", *datasetID, "--location", *locationID, "--name", *name, "--sample-rate", *sampleRate)
missing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *id == "" {missing = append(missing, "--id")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}
requireFlags(fs, "--db", *dbPath, "--id", *id)
## [2026-05-11] Validation consolidation, DBPath migration, cmd helpers, type dedup### Validation consolidation (Stream 2)Moved duplicated verify/validate functions from `tools/` to `db/validation.go`,using the `db.Querier` interface consistently. This eliminates signature drift(`*sql.DB` vs `db.Querier` vs anonymous interface) and provides a single sourceof truth for entity-exists-and-active checks.**New functions in `db/validation.go`:**- `ClusterExistsAndActive(Querier, string)` — cluster exists and is active- `PatternExistsAndActive(Querier, string)` — cyclic recording pattern exists and is active- `LocationExistsAndActive(Querier, string)` — location exists and is active- `ValidateDatasetTypeForExport(Querier, string)` — dataset exists, active, and structured (for export)**Removed from `tools/`:**- `verifyPatternExists` (cluster.go) → `db.PatternExistsAndActive`- `validateClusterActive` (cluster.go) → `db.ClusterExistsAndActive`- `validateCyclicPattern` (cluster.go) → `db.PatternExistsAndActive`- `verifyDatasetActive` (dataset.go) → `db.DatasetExistsAndActive`- `verifyExportDataset` (export.go) → `db.ValidateDatasetTypeForExport`- `verifyDatasetExistsAndActive` (location.go) → `db.DatasetExistsAndActive`- `verifyLocationExistsAndActive` (location.go) → `db.LocationExistsAndActive`- `verifyPatternExistsAndActive` (pattern.go) → `db.PatternExistsAndActive`### DBPath migration (Stream 3a)Added `DBPath` field to all Input structs that access the database. The toolspackage now uses `resolveDBPath(input.DBPath)` which prefers the explicitfield and falls back to the package-level `var dbPath` (set by `SetDBPath`).This is an incremental migration — `SetDBPath` still works, but all `cmd/`functions now also pass `DBPath: *dbPath` in their Input structs.
**Input structs with new `DBPath` field:**`BulkFileImportInput`, `ClusterInput`, `DatasetInput`, `ExecuteSQLInput`,`ExportDatasetInput`, `ImportAudioFilesInput`, `ImportFileInput`,`ImportSegmentsInput`, `ImportUnstructuredInput`, `LocationInput`,`PatternInput`**New helper:** `resolveDBPath(inputPath string) string` in `tools/sql.go`**Updated sub-functions to accept `dbPath` parameter:**- `bulkValidateLocations` now takes `dbPath string` parameter- `copyDataToOutput` now takes `sourceDBPath string` parameter### cmd/ boilerplate reduction (Stream 3b)Added `requireFlags()` and `requireNonZeroFlags()` helpers in `cmd/common.go`.Replaced 16 instances of the `missing := []string{}` + append + error patternacross cmd/cluster.go, cmd/dataset.go, cmd/export.go, cmd/import.go,cmd/location.go, and cmd/pattern.go.### AviaNZ types extraction (Stream 4a)Moved `AviaNZMeta`, `AviaNZLabel`, `AviaNZSegment` type definitions from`tools/calls_from_preds.go` to `tools/avianz_types.go`. These types are sharedby `calls_from_birda.go` and `calls_from_raven.go` and should live in theirown file.### MarshalJSON dedup (Stream 4b)Introduced `JSONTime` type in `db/types.go` that marshals as RFC3339 string.Replaced four near-identical `MarshalJSON` implementations (Dataset, Location,Cluster, CyclicRecordingPattern) with `JSONTime`-based versions, eliminating~80 lines of boilerplate.