SWLGSMXGYI3G4YG35W47V5QG33CDWZQMJYCJVTQKT4XXEPF3OUFAC K2K53ZPCIG7VEFW5XQUJOMJJMXVSVRBJPINBZKUBORTMU44R2F4QC IFVRAERTCCDICNTYTG3TX2WASB6RXQQEJWWXQMQZJSQDQ3HLE5OQC DBOROCRFD6A5SJBMFYFEJI5S5M77X4EFEK6KDQWA5QDMQJKIHRWQC 7NS27QXZMVTZBK4VPMYL5IKGSTTAWR6NDG5SOVITNX44VNIRZPMAC AUTP2GAED3F4BQVCLOPRORNLCFUGTSPW6542OA2BPP7TRXPIFKRAC D4EL6RSTSZ3S3IDSETRNGLJHZKGZEE2V2OZIOKQK6LRLHQNS77JQC package toolsimport ("encoding/json""os""path/filepath""testing""skraak/utils")func TestPushCertaintyPromotesMatchingLabels(t *testing.T) {tempDir := t.TempDir()// File with two Kiwi segments: certainty=90 and certainty=70file1 := `[{"Operator": "test"}, [0, 10, 100, 1000, [{"species": "Kiwi", "certainty": 90}]], [10, 20, 100, 1000, [{"species": "Kiwi", "certainty": 70}]]]`file1Path := filepath.Join(tempDir, "file1.data")if err := os.WriteFile(file1Path, []byte(file1), 0644); err != nil {t.Fatal(err)}// File with one Tomtit at certainty=90 (must not be promoted when species=Kiwi)file2 := `[{"Operator": "test"}, [0, 10, 100, 1000, [{"species": "Tomtit", "certainty": 90}]]]`file2Path := filepath.Join(tempDir, "file2.data")if err := os.WriteFile(file2Path, []byte(file2), 0644); err != nil {t.Fatal(err)}result, err := PushCertainty(PushCertaintyConfig{Folder: tempDir,Species: "Kiwi",Reviewer: "TestReviewer",})if err != nil {t.Fatal(err)}if result.SegmentsUpdated != 1 {t.Errorf("expected 1 segment updated, got %d", result.SegmentsUpdated)}if result.FilesUpdated != 1 {t.Errorf("expected 1 file updated, got %d", result.FilesUpdated)}// Verify file1: certainty=90 Kiwi → 100, certainty=70 Kiwi → unchangeddf, err := utils.ParseDataFile(file1Path)if err != nil {t.Fatal(err)}if df.Segments[0].Labels[0].Certainty != 100 {t.Errorf("expected certainty=100, got %d", df.Segments[0].Labels[0].Certainty)}if df.Segments[1].Labels[0].Certainty != 70 {t.Errorf("expected certainty=70 unchanged, got %d", df.Segments[1].Labels[0].Certainty)}if df.Meta.Reviewer != "TestReviewer" {t.Errorf("expected reviewer=TestReviewer, got %q", df.Meta.Reviewer)}// Verify Tomtit file was not modifieddf2, err := utils.ParseDataFile(file2Path)if err != nil {t.Fatal(err)}if df2.Segments[0].Labels[0].Certainty != 90 {t.Errorf("Tomtit certainty should be unchanged at 90, got %d", df2.Segments[0].Labels[0].Certainty)}}func TestPushCertaintyFilterScope(t *testing.T) {tempDir := t.TempDir()// Segment has two labels from different filters, both Kiwi certainty=90data := []interface{}{map[string]interface{}{"Operator": "test"},[]interface{}{0.0, 10.0, 100.0, 1000.0, []interface{}{map[string]interface{}{"species": "Kiwi", "certainty": 90, "filter": "model-a"},map[string]interface{}{"species": "Kiwi", "certainty": 90, "filter": "model-b"},}},}raw, _ := json.Marshal(data)filePath := filepath.Join(tempDir, "file1.data")if err := os.WriteFile(filePath, raw, 0644); err != nil {t.Fatal(err)}// Push only model-aresult, err := PushCertainty(PushCertaintyConfig{Folder: tempDir,Filter: "model-a",Species: "Kiwi",Reviewer: "TestReviewer",})if err != nil {t.Fatal(err)}if result.SegmentsUpdated != 1 {t.Errorf("expected 1 segment updated, got %d", result.SegmentsUpdated)}// Verify only model-a label was promoted; model-b stays at 90df, err := utils.ParseDataFile(filePath)if err != nil {t.Fatal(err)}for _, label := range df.Segments[0].Labels {if label.Filter == "model-a" && label.Certainty != 100 {t.Errorf("model-a label should be 100, got %d", label.Certainty)}if label.Filter == "model-b" && label.Certainty != 90 {t.Errorf("model-b label should be unchanged at 90, got %d", label.Certainty)}}}
package toolsimport ("fmt""skraak/utils")// PushCertaintyConfig holds the configuration for push-certaintytype PushCertaintyConfig struct {Folder stringFile stringFilter stringSpecies stringCallType stringNight boolDay boolLat float64Lng float64Timezone stringReviewer string}// PushCertaintyResult holds the result of push-certaintytype PushCertaintyResult struct {SegmentsUpdated int `json:"segments_updated"`FilesUpdated int `json:"files_updated"`TimeFilteredCount int `json:"time_filtered_count"`}// PushCertainty promotes all certainty=90 segments matching the filter scope to certainty=100.// Uses identical filtering logic to LoadDataFiles so the scope matches calls classify exactly.func PushCertainty(config PushCertaintyConfig) (*PushCertaintyResult, error) {state, err := LoadDataFiles(ClassifyConfig{Folder: config.Folder,File: config.File,Filter: config.Filter,Species: config.Species,CallType: config.CallType,Certainty: 90,Sample: -1,Night: config.Night,Day: config.Day,Lat: config.Lat,Lng: config.Lng,Timezone: config.Timezone,})if err != nil {return nil, err}var segsUpdated, filesUpdated intfor i, df := range state.DataFiles {changed := falsefor _, seg := range state.FilteredSegs()[i] {for _, label := range seg.Labels {if labelMatchesPush(label, config.Filter, config.Species, config.CallType) {label.Certainty = 100changed = truesegsUpdated++}}}if changed {df.Meta.Reviewer = config.Reviewerif err := df.Write(df.FilePath); err != nil {return nil, fmt.Errorf("write %s: %w", df.FilePath, err)}filesUpdated++}}return &PushCertaintyResult{SegmentsUpdated: segsUpdated,FilesUpdated: filesUpdated,TimeFilteredCount: state.TimeFilteredCount,}, nil}// labelMatchesPush returns true if the label matches the push scope and has certainty=90.// Certainty is already guaranteed by LoadDataFiles, but we re-check to target only the// specific label that matched (a segment may carry labels from multiple filters).func labelMatchesPush(label *utils.Label, filter, species, callType string) bool {if filter != "" && label.Filter != filter {return false}if species != "" && label.Species != species {return false}if callType != "" && label.CallType != callType {return false}return label.Certainty == 90}
package cmdimport ("encoding/json""fmt""os""strconv""skraak/tools""skraak/utils")func printPushCertaintyUsage() {fmt.Fprintf(os.Stderr, "Usage: skraak calls push-certainty [options]\n\n")fmt.Fprintf(os.Stderr, "Promote certainty=90 segments to certainty=100 for a filtered set.\n")fmt.Fprintf(os.Stderr, "Filtering logic matches 'calls classify' exactly. Reviewer is set from config.\n\n")fmt.Fprintf(os.Stderr, "Options:\n")fmt.Fprintf(os.Stderr, " --folder <path> Path to folder containing .data files (required, or --file)\n")fmt.Fprintf(os.Stderr, " --file <path> Path to a single .data file (required, or --folder)\n")fmt.Fprintf(os.Stderr, " --filter <name> Scope to filter name (optional)\n")fmt.Fprintf(os.Stderr, " --species <name> Scope to species, optionally with calltype (e.g. Kiwi, Kiwi+Duet)\n")fmt.Fprintf(os.Stderr, " --night Only act on solar-night recordings (requires --lat and --lng)\n")fmt.Fprintf(os.Stderr, " --day Only act on solar-day recordings (requires --lat and --lng)\n")fmt.Fprintf(os.Stderr, " --lat <float> Latitude in decimal degrees (required with --night or --day)\n")fmt.Fprintf(os.Stderr, " --lng <float> Longitude in decimal degrees (required with --night or --day)\n")fmt.Fprintf(os.Stderr, " --timezone <zone> IANA timezone ID (e.g. Pacific/Auckland)\n")fmt.Fprintf(os.Stderr, "\nExamples:\n")fmt.Fprintf(os.Stderr, " skraak calls push-certainty --folder ./data --species Kiwi\n")fmt.Fprintf(os.Stderr, " skraak calls push-certainty --folder ./data --species Kiwi --night --lat -45.5 --lng 167.4\n")}func runCallsPushCertainty(args []string) {var folder, file, filter, species, timezone stringvar night, day boolvar lat, lng float64var latSet, lngSet booli := 0for i < len(args) {arg := args[i]switch arg {case "--folder":if i+1 >= len(args) {fmt.Fprintf(os.Stderr, "Error: --folder requires a value\n")os.Exit(1)}folder = args[i+1]i += 2case "--file":if i+1 >= len(args) {fmt.Fprintf(os.Stderr, "Error: --file requires a value\n")os.Exit(1)}file = args[i+1]i += 2case "--filter":if i+1 >= len(args) {fmt.Fprintf(os.Stderr, "Error: --filter requires a value\n")os.Exit(1)}filter = args[i+1]i += 2case "--species":if i+1 >= len(args) {fmt.Fprintf(os.Stderr, "Error: --species requires a value\n")os.Exit(1)}species = args[i+1]i += 2case "--night":night = truei++case "--day":day = truei++case "--lat":if i+1 >= len(args) {fmt.Fprintf(os.Stderr, "Error: --lat requires a value\n")os.Exit(1)}v, err := strconv.ParseFloat(args[i+1], 64)if err != nil {fmt.Fprintf(os.Stderr, "Error: --lat must be a number\n")os.Exit(1)}lat = vlatSet = truei += 2case "--lng":if i+1 >= len(args) {fmt.Fprintf(os.Stderr, "Error: --lng requires a value\n")os.Exit(1)}v, err := strconv.ParseFloat(args[i+1], 64)if err != nil {fmt.Fprintf(os.Stderr, "Error: --lng must be a number\n")os.Exit(1)}lng = vlngSet = truei += 2case "--timezone":if i+1 >= len(args) {fmt.Fprintf(os.Stderr, "Error: --timezone requires a value\n")os.Exit(1)}timezone = args[i+1]i += 2case "--help", "-h":printPushCertaintyUsage()os.Exit(0)default:fmt.Fprintf(os.Stderr, "Error: unknown flag: %s\n\n", arg)printPushCertaintyUsage()os.Exit(1)}}if folder == "" && file == "" {fmt.Fprintf(os.Stderr, "Error: missing required flag: --folder or --file\n\n")printPushCertaintyUsage()os.Exit(1)}if night && day {fmt.Fprintf(os.Stderr, "Error: --night and --day are mutually exclusive\n\n")printPushCertaintyUsage()os.Exit(1)}if (night || day) && (!latSet || !lngSet) {fmt.Fprintf(os.Stderr, "Error: --night/--day requires both --lat and --lng\n\n")printPushCertaintyUsage()os.Exit(1)}cfg, cfgPath, err := utils.LoadConfig()if err != nil {fmt.Fprintf(os.Stderr, "Error: %v\n", err)fmt.Fprintf(os.Stderr, "Create %s with a \"classify\" section; run `skraak calls classify --help` for an example.\n", cfgPath)os.Exit(1)}if cfg.Classify.Reviewer == "" {fmt.Fprintf(os.Stderr, "Error: %s is missing \"classify.reviewer\"\n", cfgPath)os.Exit(1)}speciesName, callType := utils.ParseSpeciesCallType(species)config := tools.PushCertaintyConfig{Folder: folder,File: file,Filter: filter,Species: speciesName,CallType: callType,Night: night,Day: day,Lat: lat,Lng: lng,Timezone: timezone,Reviewer: cfg.Classify.Reviewer,}result, err := tools.PushCertainty(config)if err != nil {fmt.Fprintf(os.Stderr, "Error: %v\n", err)os.Exit(1)}if result.TimeFilteredCount > 0 {label := "daytime"if config.Day {label = "nighttime"}fmt.Fprintf(os.Stderr, "Skipped %d %s files\n", result.TimeFilteredCount, label)}fmt.Fprintf(os.Stderr, "Updated %d segments across %d files\n",result.SegmentsUpdated, result.FilesUpdated)enc := json.NewEncoder(os.Stdout)enc.SetIndent("", " ")enc.Encode(result)}