7NS27QXZMVTZBK4VPMYL5IKGSTTAWR6NDG5SOVITNX44VNIRZPMAC OQ7Q4PCX3EKNP5IGOCSCTZGWAKX6HQYSEHRI7BPGINJFAXSVUANQC GGFJ6IS5SHXLHNJ4JXRGI4OQB3T4V7HF75WG4TBEYJT56LDJABGAC KS7LFF6M5Y6UGBBA7G63BJRR5XS4P4R3PSZPG752NSGZ3Z6GY72QC Y2DREPFT254GKDP3W6LOVI6KZVBOUED2LAAZVVIUQFOSQTMIOTZAC IFVRAERTCCDICNTYTG3TX2WASB6RXQQEJWWXQMQZJSQDQ3HLE5OQC DORZF5HSV672ZP5HUDYB3J6TBH5O2LMXJE4HPSE7H5SOGZQBDCXQC OGLLBQQYE5KICDMI6EX7ZI4TZT5RB7UFHH7O2DUOZ44QQXVL5YAAC 4VO5HC4R37PDGNAN6SX24X2M5G2V4RXIX2WCTFXDOH2TPRZA7DZQC 25GQ5TYGSGL7QED7L2IAPFLZ4WJJ2ZFAM6O6X5AOSTYPVJNCOGPQC L4STQEXDGCPZXDHTEUBCOQKBMTFDRVXRLNFQHPDHOVXDCJO33LQQC OCRETPZZPDCUSOPYRH5MVRATJ37TRFGVSIMOI4IV755HFXXOVHEAC MK5UPYCRKUKCIBWBKLCEW754G4JJWSU2R2JCYNECUG2IRFLQKVNAC YCGYOCFORTFK53WZ2B7GYZLT4HYXT55LCCKOTXM76ATWJNZ5WLJQC CDT7NGOU3VIKCP3YPZTXKOMLV54VERP4FFRVSRPVKMU3IQTI76CAC VZGXBNYYO3E7EPFQ4GOLNVMRXXTQDDQZUU2BZ6JHNBDY4B2QLDAAC ZVYOPUNH7UJL3YALGNNXQW2B4H4ONI5Z6XWAUZUONFG7LR55W4SQC PZHNIV62T77A3VPGPAYURINYRMUJKMNQHTHYD7L22X7WDZSSKQ7QC 3KVRYKAIMTYO3PNM4PYGCEB2C522NKKJYH5RXLHILFHXVJ6XZT7QC // GetCurrentTime implements the get_current_time tool handler// It returns the current system time with timezone informationfunc GetCurrentTime(ctx context.Context, req *mcp.CallToolRequest, input GetCurrentTimeInput) (*mcp.CallToolResult,GetCurrentTimeOutput,error,) {// Get current time
// GetCurrentTime returns the current system time with timezone informationfunc GetCurrentTime(ctx context.Context, input GetCurrentTimeInput) (GetCurrentTimeOutput, error) {
return nil, output, fmt.Errorf("location '%s' (ID: %s) does not belong to dataset '%s' (ID: %s) - it belongs to dataset ID '%s'",
return output, fmt.Errorf("location '%s' (ID: %s) does not belong to dataset '%s' (ID: %s) - it belongs to dataset ID '%s'",
fmt.Fprintf(os.Stderr, " mcp <path-to-duckdb-database> Start MCP server (stdio transport)\n\n")fmt.Fprintf(os.Stderr, "Example:\n")
fmt.Fprintf(os.Stderr, " mcp <db-path> Start MCP server (stdio transport)\n")fmt.Fprintf(os.Stderr, " import bulk --db <db> --dataset <id> --csv <path> --log <path> Bulk import from CSV\n")fmt.Fprintf(os.Stderr, " sql --db <db> <query> Execute SQL query\n")fmt.Fprintf(os.Stderr, "\nExamples:\n")
func runMCP() {if len(os.Args) != 3 {fmt.Fprintf(os.Stderr, "Usage: %s mcp <path-to-duckdb-database>\n", os.Args[0])fmt.Fprintf(os.Stderr, "Example: %s mcp ./db/skraak.duckdb\n", os.Args[0])os.Exit(1)}dbPath = os.Args[2]// Set database path for tools packagetools.SetDBPath(dbPath)// Set schema path for resources packageschemaPath := filepath.Join(filepath.Dir(os.Args[0]), "db", "schema.sql")resources.SetSchemaPath(schemaPath)// Create MCP server with metadataserver := mcp.NewServer(&mcp.Implementation{Name: "skraak",Version: "v1.0.0",}, nil)// Register the get_current_time toolmcp.AddTool(server, &mcp.Tool{Name: "get_current_time",Description: "Get the current system time with timezone information",}, tools.GetCurrentTime)// Register the generic SQL query toolmcp.AddTool(server, &mcp.Tool{Name: "execute_sql",Description: "Execute arbitrary SQL SELECT queries against the database. Supports parameterized queries with ? placeholders. Database is read-only. Results limited to 1000 rows by default (max 10000). Use with schema resources to construct queries.",}, tools.ExecuteSQL)// Register create-or-update toolsmcp.AddTool(server, &mcp.Tool{Name: "create_or_update_dataset",Description: "Create or update a dataset. Omit 'id' to create (name required), provide 'id' to update. Returns the dataset with timestamps.",}, tools.CreateOrUpdateDataset)mcp.AddTool(server, &mcp.Tool{Name: "create_or_update_location",Description: "Create or update a location. Omit 'id' to create (dataset_id, name, latitude, longitude, timezone_id required), provide 'id' to update. Location must belong to the specified dataset when creating.",}, tools.CreateOrUpdateLocation)mcp.AddTool(server, &mcp.Tool{Name: "create_or_update_cluster",Description: "Create or update a cluster. Omit 'id' to create (dataset_id, location_id, name, sample_rate required), provide 'id' to update. Query existing patterns first with execute_sql before setting cyclic_recording_pattern_id.",}, tools.CreateOrUpdateCluster)mcp.AddTool(server, &mcp.Tool{Name: "create_or_update_pattern",Description: "Create or update a cyclic recording pattern. Omit 'id' to create (record_seconds, sleep_seconds required), provide 'id' to update. Returns existing pattern if duplicate record/sleep values found.",}, tools.CreateOrUpdatePattern)// Register import single filemcp.AddTool(server, &mcp.Tool{Name: "import_file",Description: "Import a single WAV file into the database. Automatically parses AudioMoth and filename timestamps, calculates hash, extracts metadata, and computes astronomical data. Skips if duplicate (by hash).",}, tools.ImportFile)// Register import audio filesmcp.AddTool(server, &mcp.Tool{Name: "import_audio_files",Description: "Batch import WAV files from a folder into the database. Automatically parses AudioMoth and filename timestamps, calculates hashes, extracts metadata, and computes astronomical data. Files are imported in a single transaction. Duplicate files (by hash) are skipped.",}, tools.ImportAudioFiles)// Register bulk file importmcp.AddTool(server, &mcp.Tool{Name: "bulk_file_import",Description: "Batch import WAV files across multiple locations/clusters using a CSV file. CSV must have columns (in order): location_name, location_id, directory_path, date_range, sample_rate, file_count. Auto-creates clusters using date_range as cluster name. Logs progress to file for monitoring. Synchronous/fail-fast execution.",}, tools.BulkFileImport)// Register import ML selectionsmcp.AddTool(server, &mcp.Tool{Name: "import_ml_selections",Description: "Import hand sorted ML-detected kiwi call selections from folder structure organized by species/call-type with WAV/PNG pairs",}, tools.ImportMLSelections)// Register schema resourcesschemaResource, schemaTemplate := resources.GetSchemaResources()server.AddResource(schemaResource, resources.SchemaResourceHandler)server.AddResourceTemplate(schemaTemplate, resources.SchemaResourceHandler)// Register promptsserver.AddPrompt(prompts.GetQueryDatasetsPrompt(), prompts.QueryDatasetsPromptHandler)server.AddPrompt(prompts.GetExploreSchemaPrompt(), prompts.ExploreSchemaPromptHandler)server.AddPrompt(prompts.GetExploreLocationHierarchyPrompt(), prompts.ExploreLocationHierarchyPromptHandler)server.AddPrompt(prompts.GetQueryLocationDataPrompt(), prompts.QueryLocationDataPromptHandler)server.AddPrompt(prompts.GetAnalyzeClusterFilesPrompt(), prompts.AnalyzeClusterFilesPromptHandler)server.AddPrompt(prompts.GetSystemStatusPrompt(), prompts.SystemStatusPromptHandler)// Run the server on stdio transportif err := server.Run(context.Background(), &mcp.StdioTransport{}); err != nil {log.Fatalf("Server error: %v", err)}}
package cmdimport ("context""encoding/json""flag""fmt""os""strings""skraak/tools")// RunSQL handles the "sql" subcommandfunc RunSQL(args []string) {fs := flag.NewFlagSet("sql", flag.ExitOnError)dbPath := fs.String("db", "", "Path to DuckDB database (required)")limit := fs.Int("limit", 0, "Maximum rows to return (default 1000, max 10000)")fs.Usage = func() {fmt.Fprintf(os.Stderr, "Usage: skraak sql --db <path> [options] <query>\n\n")fmt.Fprintf(os.Stderr, "Execute a SQL SELECT query against the database.\n\n")fmt.Fprintf(os.Stderr, "Options:\n")fs.PrintDefaults()fmt.Fprintf(os.Stderr, "\nExamples:\n")fmt.Fprintf(os.Stderr, " skraak sql --db ./db/skraak.duckdb \"SELECT COUNT(*) FROM file WHERE active = true\"\n")fmt.Fprintf(os.Stderr, " skraak sql --db ./db/skraak.duckdb --limit 10 \"SELECT * FROM dataset\"\n")}if err := fs.Parse(args); err != nil {os.Exit(1)}if *dbPath == "" {fmt.Fprintf(os.Stderr, "Error: --db is required\n\n")fs.Usage()os.Exit(1)}// Remaining args are the queryremaining := fs.Args()if len(remaining) == 0 {fmt.Fprintf(os.Stderr, "Error: query is required\n\n")fs.Usage()os.Exit(1)}query := strings.Join(remaining, " ")tools.SetDBPath(*dbPath)input := tools.ExecuteSQLInput{Query: query,}if *limit > 0 {input.Limit = limit}output, err := tools.ExecuteSQL(context.Background(), input)if err != nil {fmt.Fprintf(os.Stderr, "Error: %v\n", err)os.Exit(1)}enc := json.NewEncoder(os.Stdout)enc.SetIndent("", " ")enc.Encode(output)}
package cmdimport ("context""fmt""log""os""path/filepath""skraak/prompts""skraak/resources""skraak/tools""github.com/modelcontextprotocol/go-sdk/mcp")// RunMCP starts the MCP server on stdio transportfunc RunMCP(args []string) {if len(args) != 1 {fmt.Fprintf(os.Stderr, "Usage: skraak mcp <path-to-duckdb-database>\n")fmt.Fprintf(os.Stderr, "Example: skraak mcp ./db/skraak.duckdb\n")os.Exit(1)}dbPath := args[0]tools.SetDBPath(dbPath)// Set schema path for resources packageschemaPath := filepath.Join(filepath.Dir(os.Args[0]), "db", "schema.sql")resources.SetSchemaPath(schemaPath)// Create MCP serverserver := mcp.NewServer(&mcp.Implementation{Name: "skraak",Version: "v1.0.0",}, nil)// Register tools (thin adapters that bridge MCP types to core functions)mcp.AddTool(server, &mcp.Tool{Name: "get_current_time",Description: "Get the current system time with timezone information",}, mcpGetCurrentTime)mcp.AddTool(server, &mcp.Tool{Name: "execute_sql",Description: "Execute arbitrary SQL SELECT queries against the database. Supports parameterized queries with ? placeholders. Database is read-only. Results limited to 1000 rows by default (max 10000). Use with schema resources to construct queries.",}, mcpExecuteSQL)mcp.AddTool(server, &mcp.Tool{Name: "create_or_update_dataset",Description: "Create or update a dataset. Omit 'id' to create (name required), provide 'id' to update. Returns the dataset with timestamps.",}, mcpCreateOrUpdateDataset)mcp.AddTool(server, &mcp.Tool{Name: "create_or_update_location",Description: "Create or update a location. Omit 'id' to create (dataset_id, name, latitude, longitude, timezone_id required), provide 'id' to update. Location must belong to the specified dataset when creating.",}, mcpCreateOrUpdateLocation)mcp.AddTool(server, &mcp.Tool{Name: "create_or_update_cluster",Description: "Create or update a cluster. Omit 'id' to create (dataset_id, location_id, name, sample_rate required), provide 'id' to update. Query existing patterns first with execute_sql before setting cyclic_recording_pattern_id.",}, mcpCreateOrUpdateCluster)mcp.AddTool(server, &mcp.Tool{Name: "create_or_update_pattern",Description: "Create or update a cyclic recording pattern. Omit 'id' to create (record_seconds, sleep_seconds required), provide 'id' to update. Returns existing pattern if duplicate record/sleep values found.",}, mcpCreateOrUpdatePattern)mcp.AddTool(server, &mcp.Tool{Name: "import_file",Description: "Import a single WAV file into the database. Automatically parses AudioMoth and filename timestamps, calculates hash, extracts metadata, and computes astronomical data. Skips if duplicate (by hash).",}, mcpImportFile)mcp.AddTool(server, &mcp.Tool{Name: "import_audio_files",Description: "Batch import WAV files from a folder into the database. Automatically parses AudioMoth and filename timestamps, calculates hashes, extracts metadata, and computes astronomical data. Files are imported in a single transaction. Duplicate files (by hash) are skipped.",}, mcpImportAudioFiles)mcp.AddTool(server, &mcp.Tool{Name: "bulk_file_import",Description: "Batch import WAV files across multiple locations/clusters using a CSV file. CSV must have columns (in order): location_name, location_id, directory_path, date_range, sample_rate, file_count. Auto-creates clusters using date_range as cluster name. Logs progress to file for monitoring. Synchronous/fail-fast execution.",}, mcpBulkFileImport)mcp.AddTool(server, &mcp.Tool{Name: "import_ml_selections",Description: "Import hand sorted ML-detected kiwi call selections from folder structure organized by species/call-type with WAV/PNG pairs",}, mcpImportMLSelections)// Register schema resourcesschemaResource, schemaTemplate := resources.GetSchemaResources()server.AddResource(schemaResource, resources.SchemaResourceHandler)server.AddResourceTemplate(schemaTemplate, resources.SchemaResourceHandler)// Register promptsserver.AddPrompt(prompts.GetQueryDatasetsPrompt(), prompts.QueryDatasetsPromptHandler)server.AddPrompt(prompts.GetExploreSchemaPrompt(), prompts.ExploreSchemaPromptHandler)server.AddPrompt(prompts.GetExploreLocationHierarchyPrompt(), prompts.ExploreLocationHierarchyPromptHandler)server.AddPrompt(prompts.GetQueryLocationDataPrompt(), prompts.QueryLocationDataPromptHandler)server.AddPrompt(prompts.GetAnalyzeClusterFilesPrompt(), prompts.AnalyzeClusterFilesPromptHandler)server.AddPrompt(prompts.GetSystemStatusPrompt(), prompts.SystemStatusPromptHandler)// Run the server on stdio transportif err := server.Run(context.Background(), &mcp.StdioTransport{}); err != nil {log.Fatalf("Server error: %v", err)}}// MCP adapter functions — thin wrappers that bridge MCP signatures to core tool functionsfunc mcpGetCurrentTime(ctx context.Context, req *mcp.CallToolRequest, input tools.GetCurrentTimeInput) (*mcp.CallToolResult, tools.GetCurrentTimeOutput, error) {output, err := tools.GetCurrentTime(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpExecuteSQL(ctx context.Context, req *mcp.CallToolRequest, input tools.ExecuteSQLInput) (*mcp.CallToolResult, tools.ExecuteSQLOutput, error) {output, err := tools.ExecuteSQL(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpCreateOrUpdateDataset(ctx context.Context, req *mcp.CallToolRequest, input tools.DatasetInput) (*mcp.CallToolResult, tools.DatasetOutput, error) {output, err := tools.CreateOrUpdateDataset(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpCreateOrUpdateLocation(ctx context.Context, req *mcp.CallToolRequest, input tools.LocationInput) (*mcp.CallToolResult, tools.LocationOutput, error) {output, err := tools.CreateOrUpdateLocation(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpCreateOrUpdateCluster(ctx context.Context, req *mcp.CallToolRequest, input tools.ClusterInput) (*mcp.CallToolResult, tools.ClusterOutput, error) {output, err := tools.CreateOrUpdateCluster(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpCreateOrUpdatePattern(ctx context.Context, req *mcp.CallToolRequest, input tools.PatternInput) (*mcp.CallToolResult, tools.PatternOutput, error) {output, err := tools.CreateOrUpdatePattern(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpImportFile(ctx context.Context, req *mcp.CallToolRequest, input tools.ImportFileInput) (*mcp.CallToolResult, tools.ImportFileOutput, error) {output, err := tools.ImportFile(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpImportAudioFiles(ctx context.Context, req *mcp.CallToolRequest, input tools.ImportAudioFilesInput) (*mcp.CallToolResult, tools.ImportAudioFilesOutput, error) {output, err := tools.ImportAudioFiles(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpBulkFileImport(ctx context.Context, req *mcp.CallToolRequest, input tools.BulkFileImportInput) (*mcp.CallToolResult, tools.BulkFileImportOutput, error) {output, err := tools.BulkFileImport(ctx, input)return &mcp.CallToolResult{}, output, err}func mcpImportMLSelections(ctx context.Context, req *mcp.CallToolRequest, input tools.ImportMLSelectionsInput) (*mcp.CallToolResult, tools.ImportMLSelectionsOutput, error) {output, err := tools.ImportMLSelections(ctx, input)return &mcp.CallToolResult{}, output, err}
package cmdimport ("context""encoding/json""flag""fmt""os""skraak/tools")// RunImport handles the "import" subcommandfunc RunImport(args []string) {if len(args) < 1 {printImportUsage()os.Exit(1)}switch args[0] {case "bulk":runImportBulk(args[1:])default:fmt.Fprintf(os.Stderr, "Unknown import subcommand: %s\n\n", args[0])printImportUsage()os.Exit(1)}}func printImportUsage() {fmt.Fprintf(os.Stderr, "Usage: skraak import <subcommand> [options]\n\n")fmt.Fprintf(os.Stderr, "Subcommands:\n")fmt.Fprintf(os.Stderr, " bulk Bulk import WAV files from CSV\n")fmt.Fprintf(os.Stderr, "\nExamples:\n")fmt.Fprintf(os.Stderr, " skraak import bulk --db ./db/skraak.duckdb --dataset abc123 --csv import.csv --log progress.log\n")}func runImportBulk(args []string) {fs := flag.NewFlagSet("import bulk", flag.ExitOnError)dbPath := fs.String("db", "", "Path to DuckDB database (required)")datasetID := fs.String("dataset", "", "Dataset ID (required)")csvPath := fs.String("csv", "", "Path to CSV file (required)")logPath := fs.String("log", "", "Path to progress log file (required)")fs.Usage = func() {fmt.Fprintf(os.Stderr, "Usage: skraak import bulk [options]\n\n")fmt.Fprintf(os.Stderr, "Bulk import WAV files across multiple locations/clusters using a CSV file.\n\n")fmt.Fprintf(os.Stderr, "Options:\n")fs.PrintDefaults()fmt.Fprintf(os.Stderr, "\nCSV format: location_name,location_id,directory_path,date_range,sample_rate,file_count\n")fmt.Fprintf(os.Stderr, "\nMonitor progress: tail -f <log-file>\n")}if err := fs.Parse(args); err != nil {os.Exit(1)}// Validate required flagsmissing := []string{}if *dbPath == "" {missing = append(missing, "--db")}if *datasetID == "" {missing = append(missing, "--dataset")}if *csvPath == "" {missing = append(missing, "--csv")}if *logPath == "" {missing = append(missing, "--log")}if len(missing) > 0 {fmt.Fprintf(os.Stderr, "Error: missing required flags: %v\n\n", missing)fs.Usage()os.Exit(1)}// Set DB path and runtools.SetDBPath(*dbPath)input := tools.BulkFileImportInput{DatasetID: *datasetID,CSVPath: *csvPath,LogFilePath: *logPath,}fmt.Fprintf(os.Stderr, "Starting bulk import...\n")fmt.Fprintf(os.Stderr, " Database: %s\n", *dbPath)fmt.Fprintf(os.Stderr, " Dataset: %s\n", *datasetID)fmt.Fprintf(os.Stderr, " CSV: %s\n", *csvPath)fmt.Fprintf(os.Stderr, " Log: %s\n", *logPath)fmt.Fprintf(os.Stderr, "\nMonitor progress: tail -f %s\n\n", *logPath)output, err := tools.BulkFileImport(context.Background(), input)if err != nil {fmt.Fprintf(os.Stderr, "Error: %v\n", err)// Still print partial output if availableif output.TotalLocations > 0 || output.FilesImported > 0 {printJSON(output)}os.Exit(1)}printJSON(output)}func printJSON(v interface{}) {enc := json.NewEncoder(os.Stdout)enc.SetIndent("", " ")enc.Encode(v)}
The Skraak MCP Server is a Model Context Protocol (MCP) server written in Go that provides a **generic SQL query interface** for an acoustic monitoring system. It follows MCP's three-primitive architecture with an LLM-friendly design:
- **Core logic** (`tools/`): Pure Go functions with no MCP dependency. Input/output via plain structs.- **MCP adapters** (`cmd/mcp.go`): Thin wrappers bridging MCP types to core functions. Only file importing `mcp` SDK.- **CLI commands** (`cmd/`): Parse flags, call core functions, print results. Power-user interface.
skraak_mcp/├── main.go # Server entry point, tool registration
skraak/├── main.go # CLI dispatcher (mcp | import | sql)├── cmd/ # Command entry points (only MCP importer)│ ├── mcp.go # MCP server setup + adapter wrappers│ ├── import.go # CLI: skraak import bulk ...│ └── sql.go # CLI: skraak sql ...
├── tools/ # Tool implementations│ ├── time.go # get_current_time (utility tool)│ ├── sql.go # execute_sql (generic query tool)
├── tools/ # Core logic (NO MCP dependency)│ ├── time.go # get_current_time│ ├── sql.go # execute_sql (generic query)
### CLI Commands (power-user, no MCP needed)```bash# SQL query./skraak sql --db ./db/skraak.duckdb "SELECT COUNT(*) FROM file WHERE active = true"./skraak sql --db ./db/skraak.duckdb --limit 10 "SELECT * FROM dataset WHERE active = true"# Bulk import from CSV./skraak import bulk --db ./db/skraak.duckdb --dataset abc123 --csv import.csv --log progress.log```
### Latest Update: Bulk File Import Cluster Assignment Bug Fix (2026-02-10)
### Latest Update: CLI Refactoring — Two-Layer Architecture (2026-02-11)**Major refactoring: Separated core logic from MCP types, added CLI commands****Problem:** All tool functions were tightly coupled to MCP SDK types (`*mcp.CallToolRequest`, `*mcp.CallToolResult`). This meant functionality could only be invoked via MCP protocol — no CLI access for power users.**Solution:** Two-layer architecture separating core logic from MCP adapters.**Created:**- `cmd/mcp.go` — MCP server setup + 10 thin adapter wrappers (~3 lines each)- `cmd/import.go` — `skraak import bulk` CLI command with flag parsing- `cmd/sql.go` — `skraak sql` CLI command for ad-hoc queries**Modified (mechanical, all tools/):**- Removed `*mcp.CallToolRequest` parameter (was never used — `req` always ignored)- Removed `*mcp.CallToolResult` from returns (was always empty `&mcp.CallToolResult{}`)- Removed `import "github.com/modelcontextprotocol/go-sdk/mcp"` from all tool files- Updated test files (`integration_test.go`, `pattern_test.go`) to match new signatures- Updated `main.go` to pure dispatcher: `mcp | import | sql`**Architecture:**```main.go → pure dispatchercmd/mcp.go → MCP server + adapter wrappers (ONLY file importing mcp SDK)cmd/import.go → CLI: skraak import bulk --db ... --dataset ... --csv ... --log ...cmd/sql.go → CLI: skraak sql --db ... "SELECT ..."tools/*.go → core logic, NO mcp dependency (plain Go structs in/out)utils/, db/, etc. → unchanged```**CLI Usage:**```bash# MCP server (unchanged)skraak mcp ./db/skraak.duckdb# Power-user CLI commands (new)skraak sql --db ./db/skraak.duckdb "SELECT COUNT(*) FROM file WHERE active = true"skraak sql --db ./db/skraak.duckdb --limit 10 "SELECT * FROM dataset"skraak import bulk --db ./db/skraak.duckdb --dataset abc123 --csv import.csv --log progress.log```
**Benefits:**- ✅ **CLI access:** Power users can run imports and queries without MCP- ✅ **Token savings:** CLI commands avoid MCP protocol overhead- ✅ **Code sharing:** CLI and MCP call identical core functions- ✅ **MCP SDK contained:** Only `cmd/mcp.go` imports the MCP SDK- ✅ **Extensible:** New CLI commands just need a file in `cmd/` calling `tools/`- ✅ **No logic changes:** All core tool logic unchanged, just signature cleanup- ✅ **All tests pass:** `go test ./...`, all 8 shell test scripts verified---### Previous Update: Bulk File Import Cluster Assignment Bug Fix (2026-02-10)
**Last Updated**: 2026-02-10 NZDT**Status**: Bulk import cluster assignment bug fixed 🐛✅**Current Tools**: 10 (read: 2, write: 4, import: 4)
**Last Updated**: 2026-02-11 NZDT**Status**: CLI refactoring complete — two-layer architecture ✅**Architecture**: `tools/` = core logic (MCP-free), `cmd/mcp.go` = MCP adapters, `cmd/*.go` = CLI commands**Current Tools**: 10 (read: 2, write: 4, import: 4) — available via both MCP and CLI**CLI Commands**: `skraak mcp`, `skraak sql`, `skraak import bulk`