HOGJHCBFXFGNSDOXL4EPXGLVX4LMWXN2Y7W6U5DRNOXHC4AJAEHQC package dbimport ("database/sql""testing"_ "github.com/duckdb/duckdb-go/v2")// setupInvariantsTestDB creates an in-memory database with the full schemafunc setupInvariantsTestDB(t *testing.T) *sql.DB {t.Helper()db, err := sql.Open("duckdb", ":memory:")if err != nil {t.Fatalf("failed to open database: %v", err)}schema, err := ReadSchemaSQL()if err != nil {t.Fatalf("failed to read schema: %v", err)}_, err = db.Exec(schema)if err != nil {t.Fatalf("failed to create schema: %v", err)}return db}// insertDataset creates a test dataset and returns its IDfunc insertDataset(t *testing.T, db *sql.DB, id, name string) {t.Helper()_, err := db.Exec("INSERT INTO dataset (id, name, type, active) VALUES (?, ?, 'structured', true)",id, name,)if err != nil {t.Fatalf("failed to insert dataset: %v", err)}}// insertLocation creates a test location and returns its IDfunc insertLocation(t *testing.T, db *sql.DB, id, datasetID, name string) {t.Helper()_, err := db.Exec(`INSERT INTO location (id, dataset_id, name, latitude, longitude, timezone_id, active)VALUES (?, ?, ?, -36.8485, 174.7633, 'Pacific/Auckland', true)`,id, datasetID, name,)if err != nil {t.Fatalf("failed to insert location: %v", err)}}// insertCluster creates a test clusterfunc insertCluster(t *testing.T, db *sql.DB, id, datasetID, locationID, name string) {t.Helper()_, err := db.Exec(`INSERT INTO cluster (id, dataset_id, location_id, name, sample_rate, active)VALUES (?, ?, ?, ?, 48000, true)`,id, datasetID, locationID, name,)if err != nil {t.Fatalf("failed to insert cluster: %v", err)}}// insertFile creates a test filefunc insertFile(t *testing.T, db *sql.DB, id, hash, locationID string) {t.Helper()_, err := db.Exec(`INSERT INTO file (id, file_name, xxh64_hash, location_id, timestamp_local, duration, sample_rate, active)VALUES (?, 'test.wav', ?, ?, CURRENT_TIMESTAMP, 1.0, 48000, true)`,id, hash, locationID,)if err != nil {t.Fatalf("failed to insert file: %v", err)}}// ============================================================================// Phase 1, Test 1: UniqueFileHash invariant// Spec: validation.allium - UniqueFileHash// "for f1 in Files: for f2 in Files: f1 != f2 implies f1.xxh64_hash != f2.xxh64_hash"// ============================================================================func TestInvariant_UniqueFileHash(t *testing.T) {db := setupInvariantsTestDB(t)defer db.Close()// Setup: create dataset → location → cluster → fileinsertDataset(t, db, "ds_test12345", "Test Dataset")insertLocation(t, db, "loc_test1234", "ds_test12345", "Test Location")insertCluster(t, db, "clustest1234", "ds_test12345", "loc_test1234", "Test Cluster")// Insert first file with a specific hashinsertFile(t, db, "filetest1234567890123", "abcd1234efgh5678", "loc_test1234")// Test: Attempting to insert a second file with the same hash should failt.Run("duplicate hash rejected", func(t *testing.T) {_, err := db.Exec(`INSERT INTO file (id, file_name, xxh64_hash, location_id, timestamp_local, duration, sample_rate, active)VALUES ('filetest_diffhash01', 'test2.wav', 'abcd1234efgh5678', 'loc_test1234', CURRENT_TIMESTAMP, 1.0, 48000, true)`,)if err == nil {t.Error("expected error for duplicate xxh64_hash, got nil")}})// Test: Different hash should succeedt.Run("different hash accepted", func(t *testing.T) {_, err := db.Exec(`INSERT INTO file (id, file_name, xxh64_hash, location_id, timestamp_local, duration, sample_rate, active)VALUES ('filetest_diffhash02', 'test3.wav', '9876zyxw5432vuts', 'loc_test1234', CURRENT_TIMESTAMP, 1.0, 48000, true)`,)if err != nil {t.Errorf("unexpected error for different hash: %v", err)}})// Test: Same hash with inactive file should still fail (constraint applies to all rows)t.Run("inactive file still blocks duplicate", func(t *testing.T) {// Mark first file as inactive_, err := db.Exec("UPDATE file SET active = false WHERE id = 'filetest1234567890123'")if err != nil {t.Fatalf("failed to deactivate file: %v", err)}// Attempt duplicate hash with new file_, err = db.Exec(`INSERT INTO file (id, file_name, xxh64_hash, location_id, timestamp_local, duration, sample_rate, active)VALUES ('filetest_inactblk01', 'test4.wav', 'abcd1234efgh5678', 'loc_test1234', CURRENT_TIMESTAMP, 1.0, 48000, true)`,)if err == nil {t.Error("expected error for duplicate xxh64_hash even with inactive file, got nil")}})}// ============================================================================// Phase 1, Test 2: LocationBelongsToDataset invariant// Spec: validation.allium - LocationBelongsToDataset// "for l in Locations: l.dataset exists and is valid"// ============================================================================func TestInvariant_LocationBelongsToDataset(t *testing.T) {db := setupInvariantsTestDB(t)defer db.Close()// Setup: create datasetinsertDataset(t, db, "ds_valid123456", "Valid Dataset")t.Run("location with valid dataset accepted", func(t *testing.T) {_, err := db.Exec(`INSERT INTO location (id, dataset_id, name, latitude, longitude, timezone_id, active)VALUES ('loc_valid12345', 'ds_valid123456', 'Valid Location', -36.8485, 174.7633, 'Pacific/Auckland', true)`,)if err != nil {t.Errorf("unexpected error: %v", err)}})t.Run("location with nonexistent dataset rejected", func(t *testing.T) {_, err := db.Exec(`INSERT INTO location (id, dataset_id, name, latitude, longitude, timezone_id, active)VALUES ('loc_bad_ds_001', 'ds_nonexistent', 'Bad Location', -36.8485, 174.7633, 'Pacific/Auckland', true)`,)if err == nil {t.Error("expected error for nonexistent dataset_id, got nil")}})t.Run("location with deleted dataset rejected", func(t *testing.T) {// Create and then soft-delete a datasetinsertDataset(t, db, "ds_del_temp_01", "To Be Deleted")_, err := db.Exec("UPDATE dataset SET active = false WHERE id = 'ds_del_temp_01'")if err != nil {t.Fatalf("failed to deactivate dataset: %v", err)}// Try to create location pointing to inactive dataset_, err = db.Exec(`INSERT INTO location (id, dataset_id, name, latitude, longitude, timezone_id, active)VALUES ('loc_inact_ds01', 'ds_del_temp_01', 'Inactive DS Location', -36.8485, 174.7633, 'Pacific/Auckland', true)`,)// Note: FK constraint may still allow this depending on implementation// This test documents the current behaviort.Logf("Insert location to inactive dataset: err=%v", err)})t.Run("duplicate location name in same dataset rejected", func(t *testing.T) {// Try to insert location with same name in same dataset_, err := db.Exec(`INSERT INTO location (id, dataset_id, name, latitude, longitude, timezone_id, active)VALUES ('loc_dup_name01', 'ds_valid123456', 'Valid Location', -40.9006, 174.8860, 'Pacific/Auckland', true)`,)if err == nil {t.Error("expected error for duplicate location name in same dataset, got nil")}})t.Run("same location name in different datasets accepted", func(t *testing.T) {// Create second datasetinsertDataset(t, db, "ds_second_1234", "Second Dataset")// Same name as in first dataset should work_, err := db.Exec(`INSERT INTO location (id, dataset_id, name, latitude, longitude, timezone_id, active)VALUES ('loc_same_name2', 'ds_second_1234', 'Valid Location', -36.8485, 174.7633, 'Pacific/Auckland', true)`,)if err != nil {t.Errorf("unexpected error for same name in different dataset: %v", err)}})}// ============================================================================// Phase 1, Test 3: ClusterBelongsToLocation invariant// Spec: validation.allium - ClusterBelongsToLocation, LocationBelongsToDataset (cross-check)// "for c in Clusters: c.location exists AND c.location.dataset = c.dataset"// ============================================================================func TestInvariant_ClusterBelongsToLocation(t *testing.T) {db := setupInvariantsTestDB(t)defer db.Close()// Setup: create two separate dataset hierarchiesinsertDataset(t, db, "ds_cluster_t01", "Cluster Test Dataset 1")insertDataset(t, db, "ds_cluster_t02", "Cluster Test Dataset 2")insertLocation(t, db, "loc_clust_t001", "ds_cluster_t01", "Location in DS1")insertLocation(t, db, "loc_clust_t002", "ds_cluster_t02", "Location in DS2")t.Run("cluster with valid location accepted", func(t *testing.T) {_, err := db.Exec(`INSERT INTO cluster (id, dataset_id, location_id, name, sample_rate, active)VALUES ('cl_valid123456', 'ds_cluster_t01', 'loc_clust_t001', 'Valid Cluster', 48000, true)`,)if err != nil {t.Errorf("unexpected error: %v", err)}})t.Run("cluster with nonexistent location rejected", func(t *testing.T) {_, err := db.Exec(`INSERT INTO cluster (id, dataset_id, location_id, name, sample_rate, active)VALUES ('cl_badloc12345', 'ds_cluster_t01', 'loc_nonexistent', 'Bad Location Cluster', 48000, true)`,)if err == nil {t.Error("expected error for nonexistent location_id, got nil")}})t.Run("cluster with mismatched dataset and location rejected", func(t *testing.T) {// Attempt: cluster.dataset_id = ds1, but cluster.location_id = location from ds2_, err := db.Exec(`INSERT INTO cluster (id, dataset_id, location_id, name, sample_rate, active)VALUES ('cl_mismatch001', 'ds_cluster_t01', 'loc_clust_t002', 'Mismatched Cluster', 48000, true)`,)// This tests the business logic invariant from the spec// The schema allows this via FKs, but the application should reject it// If the schema doesn't prevent this, the test documents the gapt.Logf("Mismatched dataset/location: err=%v", err)})t.Run("duplicate cluster name in same location rejected", func(t *testing.T) {// Try to insert cluster with same name in same location_, err := db.Exec(`INSERT INTO cluster (id, dataset_id, location_id, name, sample_rate, active)VALUES ('cl_dup_name_01', 'ds_cluster_t01', 'loc_clust_t001', 'Valid Cluster', 48000, true)`,)if err == nil {t.Error("expected error for duplicate cluster name in same location, got nil")}})t.Run("same cluster name in different locations accepted", func(t *testing.T) {// Same name but different location should work_, err := db.Exec(`INSERT INTO cluster (id, dataset_id, location_id, name, sample_rate, active)VALUES ('cl_same_nam_02', 'ds_cluster_t02', 'loc_clust_t002', 'Valid Cluster', 48000, true)`,)if err != nil {t.Errorf("unexpected error for same name in different location: %v", err)}})}// ============================================================================// Cross-invariant: Hierarchical integrity// Tests that the full hierarchy chain is enforced// ============================================================================func TestInvariant_HierarchicalIntegrity(t *testing.T) {db := setupInvariantsTestDB(t)defer db.Close()// Build complete hierarchyinsertDataset(t, db, "ds_hier_test01", "Hierarchy Test")insertLocation(t, db, "loc_hier_test1", "ds_hier_test01", "Hier Location")insertCluster(t, db, "cl_hier_test01", "ds_hier_test01", "loc_hier_test1", "Hier Cluster")t.Run("file must have valid location", func(t *testing.T) {_, err := db.Exec(`INSERT INTO file (id, file_name, xxh64_hash, location_id, timestamp_local, duration, sample_rate, active)VALUES ('file_badloc001', 'test.wav', '1111111111111111', 'loc_nonexistent', CURRENT_TIMESTAMP, 1.0, 48000, true)`,)if err == nil {t.Error("expected error for file with invalid location, got nil")}})t.Run("file with valid location but invalid cluster rejected", func(t *testing.T) {_, err := db.Exec(`INSERT INTO file (id, file_name, xxh64_hash, location_id, cluster_id, timestamp_local, duration, sample_rate, active)VALUES ('file_badcl_001', 'test.wav', '2222222222222222', 'loc_hier_test1', 'cl_nonexistent', CURRENT_TIMESTAMP, 1.0, 48000, true)`,)if err == nil {t.Error("expected error for file with invalid cluster, got nil")}})t.Run("valid file through full hierarchy accepted", func(t *testing.T) {_, err := db.Exec(`INSERT INTO file (id, file_name, xxh64_hash, location_id, cluster_id, timestamp_local, duration, sample_rate, active)VALUES ('file_valid0001', 'test.wav', '3333333333333333', 'loc_hier_test1', 'cl_hier_test01', CURRENT_TIMESTAMP, 1.0, 48000, true)`,)if err != nil {t.Errorf("unexpected error: %v", err)}})}
package cmdimport ("bufio""database/sql""encoding/json""fmt""io""os""os/exec""path/filepath""strings""testing""time"_ "github.com/duckdb/duckdb-go/v2")// mcpRequest represents a JSON-RPC requesttype mcpRequest struct {JSONRPC string `json:"jsonrpc"`ID int `json:"id"`Method string `json:"method"`Params any `json:"params"`}// mcpResponse represents a JSON-RPC responsetype mcpResponse struct {JSONRPC string `json:"jsonrpc"`ID int `json:"id"`Result *mcpResult `json:"result,omitempty"`Error *mcpErrorDetail `json:"error,omitempty"`}type mcpResult struct {Content []mcpContent `json:"content,omitempty"`IsError bool `json:"isError,omitempty"`Resources []mcpResource `json:"resources,omitempty"`Prompts []mcpPrompt `json:"prompts,omitempty"`}type mcpContent struct {Type string `json:"type"`Text string `json:"text"`}type mcpErrorDetail struct {Code int `json:"code"`Message string `json:"message"`}type mcpResource struct {URI string `json:"uri"`Name string `json:"name"`MimeType string `json:"mimeType,omitempty"`}type mcpPrompt struct {Name string `json:"name"`Description string `json:"description,omitempty"`}// mcpServer represents a running MCP server processtype mcpServer struct {cmd *exec.Cmdstdin io.WriteCloserstdout io.ReadCloserstderr io.ReadCloserdbPath string}// startMCPServer starts the skraak MCP server for testingfunc startMCPServer(t *testing.T, dbPath string) *mcpServer {t.Helper()// Build skraak binary if neededprojectDir, err := filepath.Abs("..")if err != nil {t.Fatalf("failed to get project dir: %v", err)}binaryPath := filepath.Join(projectDir, "skraak")// Verify binary exists and is executableif _, err := os.Stat(binaryPath); err != nil {t.Fatalf("skraak binary not found at %s: %v", binaryPath, err)}cmd := exec.Command(binaryPath, "mcp", "--db", dbPath)cmd.Dir = projectDirstdin, err := cmd.StdinPipe()if err != nil {t.Fatalf("failed to create stdin pipe: %v", err)}stdout, err := cmd.StdoutPipe()if err != nil {t.Fatalf("failed to create stdout pipe: %v", err)}stderr, err := cmd.StderrPipe()if err != nil {t.Fatalf("failed to create stderr pipe: %v", err)}if err := cmd.Start(); err != nil {t.Fatalf("failed to start MCP server: %v", err)}server := &mcpServer{cmd: cmd,stdin: stdin,stdout: stdout,stderr: stderr,dbPath: dbPath,}// Send initialize requestinitReq := mcpRequest{JSONRPC: "2.0",ID: 1,Method: "initialize",Params: map[string]any{"protocolVersion": "2024-11-05","capabilities": map[string]any{},"clientInfo": map[string]string{"name": "test","version": "1.0",},},}if err := server.sendRequest(initReq); err != nil {t.Fatalf("failed to send initialize request: %v", err)}// Read initialize response_, err = server.readResponse()if err != nil {t.Fatalf("failed to read initialize response: %v", err)}t.Cleanup(func() {server.shutdown()})return server}// sendRequest sends a JSON-RPC request to the MCP serverfunc (s *mcpServer) sendRequest(req mcpRequest) error {data, err := json.Marshal(req)if err != nil {return fmt.Errorf("failed to marshal request: %w", err)}_, err = s.stdin.Write(append(data, '\n'))if err != nil {return fmt.Errorf("failed to write request: %w", err)}return nil}// readResponse reads a JSON-RPC response from the MCP serverfunc (s *mcpServer) readResponse() (*mcpResponse, error) {scanner := bufio.NewScanner(s.stdout)done := make(chan *mcpResponse, 1)errChan := make(chan error, 1)go func() {for scanner.Scan() {line := scanner.Text()if line == "" {continue}var resp mcpResponseif err := json.Unmarshal([]byte(line), &resp); err != nil {continue // Skip non-JSON lines (notifications)}// Only return responses with ID (not notifications)if resp.ID > 0 {done <- &respreturn}}if err := scanner.Err(); err != nil {errChan <- err}}()select {case resp := <-done:return resp, nilcase err := <-errChan:return nil, errcase <-time.After(5 * time.Second):return nil, fmt.Errorf("timeout waiting for response")}}// callTool sends a tools/call requestfunc (s *mcpServer) callTool(name string, arguments map[string]any) (*mcpResponse, error) {req := mcpRequest{JSONRPC: "2.0",ID: int(time.Now().UnixNano() % 1000000),Method: "tools/call",Params: map[string]any{"name": name,"arguments": arguments,},}if err := s.sendRequest(req); err != nil {return nil, err}return s.readResponse()}// shutdown stops the MCP serverfunc (s *mcpServer) shutdown() {s.stdin.Close()s.cmd.Wait()}// setupTestDB creates a fresh test database with schemafunc setupTestDB(t *testing.T) string {t.Helper()tempDir := t.TempDir()dbPath := filepath.Join(tempDir, "test.duckdb")db, err := sql.Open("duckdb", dbPath)if err != nil {t.Fatalf("failed to open database: %v", err)}defer db.Close()// Read and execute schemaschemaPath := filepath.Join("..", "db", "schema.sql")schema, err := os.ReadFile(schemaPath)if err != nil {t.Fatalf("failed to read schema: %v", err)}_, err = db.Exec(string(schema))if err != nil {t.Fatalf("failed to create schema: %v", err)}return dbPath}func TestMCPServer_ReadOnlySQL(t *testing.T) {dbPath := setupTestDB(t)server := startMCPServer(t, dbPath)tests := []struct {name stringquery stringshouldAllow bool}{{name: "allows SELECT statements",query: "SELECT 1 as test",shouldAllow: true,},{name: "allows SELECT from tables",query: "SELECT id, name FROM dataset WHERE active = true LIMIT 1",shouldAllow: true,},{name: "allows WITH/CTE statements",query: "WITH test AS (SELECT 1 as x) SELECT * FROM test",shouldAllow: true,},{name: "rejects INSERT statements",query: "INSERT INTO dataset (id, name, type) VALUES ('test123', 'Test', 'unstructured')",shouldAllow: false,},{name: "rejects UPDATE statements",query: "UPDATE dataset SET name = 'New Name' WHERE id = 'test'",shouldAllow: false,},{name: "rejects DELETE statements",query: "DELETE FROM dataset WHERE id = 'test'",shouldAllow: false,},{name: "rejects DROP statements",query: "DROP TABLE dataset",shouldAllow: false,},{name: "rejects CREATE statements",query: "CREATE TABLE test (id VARCHAR PRIMARY KEY)",shouldAllow: false,},{name: "rejects ALTER statements",query: "ALTER TABLE dataset ADD COLUMN test VARCHAR",shouldAllow: false,},{name: "rejects TRUNCATE statements",query: "TRUNCATE TABLE dataset",shouldAllow: false,},{name: "rejects SQL injection attempts",query: "SELECT * FROM dataset; DROP TABLE dataset;",shouldAllow: false,},{name: "rejects INSERT with SELECT",query: "INSERT INTO dataset SELECT * FROM dataset_backup",shouldAllow: false,},}for _, tt := range tests {t.Run(tt.name, func(t *testing.T) {resp, err := server.callTool("execute_sql", map[string]any{"query": tt.query,})if err != nil {t.Fatalf("failed to call tool: %v", err)}if resp.Result == nil {t.Fatalf("expected result, got nil (error: %v)", resp.Error)}isError := resp.Result.IsErrorif tt.shouldAllow && isError {errorText := ""if len(resp.Result.Content) > 0 {errorText = resp.Result.Content[0].Text}t.Errorf("expected query to be allowed, but got error: %s", errorText)}if !tt.shouldAllow && !isError {t.Errorf("expected query to be rejected, but it was allowed")}})}}func TestMCPServer_EventLogging(t *testing.T) {dbPath := setupTestDB(t)server := startMCPServer(t, dbPath)eventLogPath := dbPath + ".events.jsonl"t.Run("logs dataset creation", func(t *testing.T) {// Clear any existing eventsos.Remove(eventLogPath)resp, err := server.callTool("create_or_update_dataset", map[string]any{"name": "Test Dataset","type": "unstructured",})if err != nil {t.Fatalf("failed to create dataset: %v", err)}if resp.Result.IsError {t.Fatalf("dataset creation failed: %v", resp.Result.Content)}// Give event log time to writetime.Sleep(200 * time.Millisecond)// Read event logevents, err := readEventLog(eventLogPath)if err != nil {t.Fatalf("failed to read event log: %v", err)}if len(events) != 1 {t.Errorf("expected 1 event, got %d", len(events))}if len(events) > 0 {event := events[0]if event.Tool != "create_or_update_dataset" {t.Errorf("expected tool='create_or_update_dataset', got '%s'", event.Tool)}if event.ID == "" {t.Error("expected event to have ID")}if event.Timestamp == "" {t.Error("expected event to have timestamp")}if !event.Success {t.Error("expected event to have success=true")}}})t.Run("logs location creation", func(t *testing.T) {t.Skip("Skipped: Event log tests require isolated server instances - covered by unit tests in db/tx_logger_test.go")})t.Run("does not log SQL queries", func(t *testing.T) {os.Remove(eventLogPath)_, err := server.callTool("execute_sql", map[string]any{"query": "SELECT 1 as test",})if err != nil {t.Fatalf("failed to execute SQL: %v", err)}time.Sleep(100 * time.Millisecond)events, err := readEventLog(eventLogPath)if err != nil {t.Fatalf("failed to read event log: %v", err)}if len(events) != 0 {t.Errorf("expected 0 events for SQL query, got %d", len(events))}})t.Run("event log is append-only", func(t *testing.T) {t.Skip("Skipped: Event log tests require isolated server instances - covered by unit tests in db/tx_logger_test.go")})}// eventLogEntry represents a single event log entrytype eventLogEntry struct {ID string `json:"id"`Timestamp string `json:"timestamp"`Tool string `json:"tool"`Queries json.RawMessage `json:"queries"` // Can be int or arraySuccess bool `json:"success"`}// readEventLog reads all events from the event log filefunc readEventLog(path string) ([]eventLogEntry, error) {data, err := os.ReadFile(path)if err != nil {if os.IsNotExist(err) {return []eventLogEntry{}, nil}return nil, err}var events []eventLogEntrylines := strings.Split(string(data), "\n")for _, line := range lines {line = strings.TrimSpace(line)if line == "" {continue}var event eventLogEntryif err := json.Unmarshal([]byte(line), &event); err != nil {continue // Skip invalid lines}events = append(events, event)}return events, nil}func TestMCPServer_IdempotentCreation(t *testing.T) {dbPath := setupTestDB(t)server := startMCPServer(t, dbPath)t.Run("dataset creation is idempotent", func(t *testing.T) {// Create first datasetresp1, err := server.callTool("create_or_update_dataset", map[string]any{"name": "Idempotent Dataset","type": "unstructured",})if err != nil {t.Fatalf("failed to create dataset: %v", err)}id1 := extractIDFromResponse(t, resp1)// Create second dataset with same nameresp2, err := server.callTool("create_or_update_dataset", map[string]any{"name": "Idempotent Dataset","type": "unstructured",})if err != nil {t.Fatalf("failed to create second dataset: %v", err)}id2 := extractIDFromResponse(t, resp2)if id1 != id2 {t.Errorf("expected same ID for duplicate dataset, got %s and %s", id1, id2)}// Verify only one dataset existsdb, err := sql.Open("duckdb", dbPath)if err != nil {t.Fatalf("failed to open database: %v", err)}defer db.Close()var count interr = db.QueryRow("SELECT COUNT(*) FROM dataset WHERE name = ?", "Idempotent Dataset").Scan(&count)if err != nil {t.Fatalf("failed to query: %v", err)}if count != 1 {t.Errorf("expected 1 dataset, got %d", count)}})t.Run("pattern creation returns existing for duplicate", func(t *testing.T) {// Create first patternresp1, err := server.callTool("create_or_update_pattern", map[string]any{"record_seconds": 10,"sleep_seconds": 50,})if err != nil {t.Fatalf("failed to create pattern: %v", err)}id1 := extractIDFromResponse(t, resp1)// Create second pattern with same valuesresp2, err := server.callTool("create_or_update_pattern", map[string]any{"record_seconds": 10,"sleep_seconds": 50,})if err != nil {t.Fatalf("failed to create second pattern: %v", err)}id2 := extractIDFromResponse(t, resp2)if id1 != id2 {t.Errorf("expected same ID for duplicate pattern, got %s and %s", id1, id2)}})t.Run("location creation with same name in different datasets", func(t *testing.T) {// Create two datasetsdatasetResp1, _ := server.callTool("create_or_update_dataset", map[string]any{"name": "Dataset A","type": "structured",})datasetID1 := extractIDFromResponse(t, datasetResp1)datasetResp2, _ := server.callTool("create_or_update_dataset", map[string]any{"name": "Dataset B","type": "structured",})datasetID2 := extractIDFromResponse(t, datasetResp2)// Create location in first datasetlocResp1, err := server.callTool("create_or_update_location", map[string]any{"dataset_id": datasetID1,"name": "Same Name","latitude": -41.0,"longitude": 174.0,"timezone_id": "Pacific/Auckland",})if err != nil {t.Fatalf("failed to create location: %v", err)}if locResp1.Result.IsError {t.Fatalf("location creation failed: %v", locResp1.Result.Content)}locID1 := extractIDFromResponse(t, locResp1)// Create location with same name in second datasetlocResp2, err := server.callTool("create_or_update_location", map[string]any{"dataset_id": datasetID2,"name": "Same Name","latitude": -42.0,"longitude": 175.0,"timezone_id": "Pacific/Auckland",})if err != nil {t.Fatalf("failed to create second location: %v", err)}locID2 := extractIDFromResponse(t, locResp2)if locID1 == locID2 {t.Error("expected different IDs for locations in different datasets")}})}// extractIDFromResponse extracts the entity ID from an MCP response// Handles both direct format {"id": "..."} and wrapped format {"dataset": {"id": "..."}}func extractIDFromResponse(t *testing.T, resp *mcpResponse) string {t.Helper()if resp.Result == nil || len(resp.Result.Content) == 0 {t.Fatal("response has no content")}var result map[string]anyif err := json.Unmarshal([]byte(resp.Result.Content[0].Text), &result); err != nil {t.Fatalf("failed to parse response: %v", err)}// Try direct format firstif id, ok := result["id"].(string); ok && id != "" {return id}// Try wrapped format (e.g., {"dataset": {"id": "..."}})for _, key := range []string{"dataset", "location", "cluster", "pattern"} {if wrapped, ok := result[key].(map[string]any); ok {if id, ok := wrapped["id"].(string); ok && id != "" {return id}}}t.Fatalf("response has no id field. Content: %s", resp.Result.Content[0].Text)return ""}