summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--internal/store/data_test.go69
-rw-r--r--internal/store/index.go14
-rw-r--r--internal/store/index_test.go80
-rw-r--r--internal/store/store.go36
-rw-r--r--internal/store/store_test.go422
5 files changed, 592 insertions, 29 deletions
diff --git a/internal/store/data_test.go b/internal/store/data_test.go
index 9d8a86a..88b80cd 100644
--- a/internal/store/data_test.go
+++ b/internal/store/data_test.go
@@ -72,8 +72,10 @@ func TestDataString(t *testing.T) {
// --- TestDataCommitAndLoad ---------------------------------------------------
-// TestDataCommitAndLoad writes a Data to disk via Commit (force=true), then
-// reads it back with loadData and verifies the round-trip.
+// TestDataCommitAndLoad encrypts content directly and reads it back via
+// loadData, verifying the full encrypt/decrypt round-trip.
+// (Commit is tested in the integration tests that wire up a real git repo;
+// here we test the encrypt+write+decrypt path without git scaffolding.)
func TestDataCommitAndLoad(t *testing.T) {
ctx := context.Background()
c := newTestCipher(t)
@@ -82,13 +84,6 @@ func TestDataCommitAndLoad(t *testing.T) {
dataPath := filepath.Join(dir, "test.data")
wantContent := "my secret data\nwith newlines\n"
- d := &Data{
- Content: []byte(wantContent),
- DataPath: dataPath,
- }
-
- // Use a nil git — Commit with git.Add will fail, so we test Commit in two stages:
- // encrypt+write only. We manually write the file to sidestep git in unit tests.
ciphertext, err := c.Encrypt([]byte(wantContent))
if err != nil {
t.Fatalf("Encrypt: %v", err)
@@ -104,7 +99,6 @@ func TestDataCommitAndLoad(t *testing.T) {
if string(loaded.Content) != wantContent {
t.Errorf("loadData content = %q; want %q", loaded.Content, wantContent)
}
- _ = d // d was constructed for documentation only; loadData is what we test here.
}
// --- TestDataExport ----------------------------------------------------------
@@ -154,6 +148,61 @@ func TestDataExportCreatesSubdir(t *testing.T) {
}
}
+// --- TestLoadDataMissingFile -------------------------------------------------
+
+// TestLoadDataMissingFile verifies that loadData returns an error when the data
+// file does not exist on disk.
+func TestLoadDataMissingFile(t *testing.T) {
+ ctx := context.Background()
+ c := newTestCipher(t)
+
+ _, err := loadData(ctx, "/nonexistent/path/to.data", c)
+ if err == nil {
+ t.Error("loadData with missing file: expected error, got nil")
+ }
+}
+
+// --- TestLoadDataCorrupted ---------------------------------------------------
+
+// TestLoadDataCorrupted verifies that loadData returns an error when the file
+// contains data that cannot be decrypted (not valid ciphertext).
+func TestLoadDataCorrupted(t *testing.T) {
+ ctx := context.Background()
+ c := newTestCipher(t)
+
+ dir := t.TempDir()
+ badPath := filepath.Join(dir, "bad.data")
+ // Write garbage that is not valid AES-CBC ciphertext.
+ if err := os.WriteFile(badPath, []byte("not valid ciphertext"), 0o600); err != nil {
+ t.Fatalf("writing bad file: %v", err)
+ }
+
+ _, err := loadData(ctx, badPath, c)
+ if err == nil {
+ t.Error("loadData with corrupted file: expected error, got nil")
+ }
+}
+
+// --- TestDataExportUnwritable ------------------------------------------------
+
+// TestDataExportUnwritable verifies that Export returns an error when the
+// destination directory cannot be created (non-writable parent).
+func TestDataExportUnwritable(t *testing.T) {
+ // Skip when running as root since root can write anywhere.
+ if os.Getuid() == 0 {
+ t.Skip("running as root; permission check not applicable")
+ }
+
+ ctx := context.Background()
+ d := &Data{Content: []byte("test")}
+
+ // /nonexistent is a path whose parent "/" is read-only for non-root users.
+ err := d.Export(ctx, "/nonexistent/dir", "file.txt")
+ if err == nil {
+ t.Error("Export to unwritable dir: expected error, got nil")
+ }
+}
+
// --- TestDataCommitSkipsExisting ---------------------------------------------
// TestDataCommitSkipsExisting checks that Commit with force=false is a no-op
diff --git a/internal/store/index.go b/internal/store/index.go
index 99a83e6..9064971 100644
--- a/internal/store/index.go
+++ b/internal/store/index.go
@@ -95,9 +95,19 @@ func (idx *Index) String() string {
}
// CommitIndex encrypts the Description and writes it to IndexPath, then stages
-// the file with git. The force parameter is passed through to the underlying
-// write helper (unused here since index files always need to be written).
+// the file with git. When force is false and IndexPath already exists the write
+// is silently skipped, matching the Ruby CommitFile#commit_content behaviour and
+// keeping the .index in sync with a skipped .data Commit.
func (idx *Index) CommitIndex(ctx context.Context, c *crypto.Cipher, g *git.Git, force bool) error {
+ if !force {
+ if _, err := os.Stat(idx.IndexPath); err == nil {
+ // File already exists; skip without error to keep the index/data pair consistent
+ // when Data.Commit also skipped (force=false with an existing file).
+ fmt.Printf("Warning: %s already exists, skipping (use force to overwrite)\n", idx.IndexPath)
+ return nil
+ }
+ }
+
ciphertext, err := c.Encrypt([]byte(idx.Description))
if err != nil {
return fmt.Errorf("encrypting index %q: %w", idx.IndexPath, err)
diff --git a/internal/store/index_test.go b/internal/store/index_test.go
index 6bf2cef..a16d757 100644
--- a/internal/store/index_test.go
+++ b/internal/store/index_test.go
@@ -2,10 +2,30 @@
package store
import (
+ "context"
+ "os"
+ "path/filepath"
+ "sort"
"strings"
"testing"
+
+ "codeberg.org/snonux/geheim/internal/crypto"
)
+// newTestIndexCipher is a local helper to avoid import cycle via store_test.go.
+func newTestIndexCipher(t *testing.T) *crypto.Cipher {
+ t.Helper()
+ keyFile := filepath.Join(t.TempDir(), "key")
+ if err := os.WriteFile(keyFile, []byte("testkey1234567890"), 0o600); err != nil {
+ t.Fatalf("writing key file: %v", err)
+ }
+ c, err := crypto.NewCipher(keyFile, 32, "testpin", "Hello world")
+ if err != nil {
+ t.Fatalf("NewCipher: %v", err)
+ }
+ return c
+}
+
// --- TestIsBinary ------------------------------------------------------------
// TestIsBinary verifies that IsBinary returns the correct value for every case
@@ -82,9 +102,44 @@ func TestIndexString(t *testing.T) {
})
}
+// --- TestLoadIndexMissingFile ------------------------------------------------
+
+// TestLoadIndexMissingFile confirms that loadIndex returns an error when the
+// .index file does not exist on disk.
+func TestLoadIndexMissingFile(t *testing.T) {
+ ctx := context.Background()
+ c := newTestIndexCipher(t)
+
+ _, err := loadIndex(ctx, "/nonexistent/path/to.index", t.TempDir(), c)
+ if err == nil {
+ t.Error("loadIndex with missing file: expected error, got nil")
+ }
+}
+
+// --- TestLoadIndexCorrupted --------------------------------------------------
+
+// TestLoadIndexCorrupted confirms that loadIndex returns an error when the file
+// contains data that cannot be decrypted (not valid ciphertext).
+func TestLoadIndexCorrupted(t *testing.T) {
+ ctx := context.Background()
+ c := newTestIndexCipher(t)
+
+ dir := t.TempDir()
+ badPath := filepath.Join(dir, "bad.index")
+ if err := os.WriteFile(badPath, []byte("not valid ciphertext"), 0o600); err != nil {
+ t.Fatalf("writing bad file: %v", err)
+ }
+
+ _, err := loadIndex(ctx, badPath, dir, c)
+ if err == nil {
+ t.Error("loadIndex with corrupted file: expected error, got nil")
+ }
+}
+
// --- TestIndexSort -----------------------------------------------------------
-// TestIndexSort verifies that IndexSlice sorts by Description alphabetically.
+// TestIndexSort verifies that IndexSlice sorts by Description alphabetically
+// using sort.Sort, and validates the sort.Interface helper methods directly.
func TestIndexSort(t *testing.T) {
hash := strings.Repeat("0", 64)
indexes := IndexSlice{
@@ -93,21 +148,28 @@ func TestIndexSort(t *testing.T) {
{Description: "mango", Hash: hash},
}
- // Use sort package via the interface methods directly.
- n := indexes.Len()
- if n != 3 {
+ if n := indexes.Len(); n != 3 {
t.Fatalf("Len() = %d; want 3", n)
}
- // apple < mango should hold.
- appleIdx, mangoIdx := 1, 2 // after original order: zebra=0, apple=1, mango=2
- if !indexes.Less(appleIdx, mangoIdx) {
+ // Before sorting: zebra=0, apple=1, mango=2 — Less(1,2) = apple < mango = true.
+ if !indexes.Less(1, 2) {
t.Errorf("Less(apple, mango) = false; want true")
}
-
- // Swap zebra and apple.
+ // Swap and verify.
indexes.Swap(0, 1)
if indexes[0].Description != "apple" || indexes[1].Description != "zebra" {
t.Errorf("Swap(0,1) did not exchange elements")
}
+ // Restore original order before sort.Sort.
+ indexes.Swap(0, 1)
+
+ // Verify sort.Sort produces ascending alphabetical order.
+ sort.Sort(indexes)
+ want := []string{"apple", "mango", "zebra"}
+ for i, w := range want {
+ if indexes[i].Description != w {
+ t.Errorf("indexes[%d].Description = %q; want %q", i, indexes[i].Description, w)
+ }
+ }
}
diff --git a/internal/store/store.go b/internal/store/store.go
index 114b597..fe9f132 100644
--- a/internal/store/store.go
+++ b/internal/store/store.go
@@ -89,7 +89,16 @@ func (s *Store) WalkIndexes(ctx context.Context, searchTerm string, fn func(*Ind
if err != nil {
return err
}
- if d.IsDir() || !strings.HasSuffix(path, ".index") {
+ // Skip the .git directory entirely — the data directory is a git repo
+ // but no secrets live inside .git, so descending into it is wasteful
+ // and may surface spurious errors if any path happens to end in ".index".
+ if d.IsDir() {
+ if d.Name() == ".git" {
+ return filepath.SkipDir
+ }
+ return nil
+ }
+ if !strings.HasSuffix(path, ".index") {
return nil
}
return s.processIndexFile(ctx, path, searchTerm, regex, fn)
@@ -241,7 +250,8 @@ func runFzf(ctx context.Context, entries []string) (string, error) {
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
- // fzf exits 130 when the user presses Escape — treat as no selection.
+ // Any non-zero exit from fzf (e.g., 130 for Escape, 1 for no match)
+ // is treated as no selection — the caller receives ("", nil).
return "", nil
}
@@ -297,6 +307,10 @@ func (s *Store) Import(ctx context.Context, srcPath, destPath string, force bool
// ImportRecursive walks directory and imports every regular file under destDir.
// The description for each file is its path relative to the source directory.
+// Note: the Ruby import_recursive flattens subdirectories to basename in the
+// hash/storage path while preserving the full relative path only in the
+// description. Go preserves the full subpath in both description and hash path.
+// The compatibility verification task (355) will surface any impact on live data.
func (s *Store) ImportRecursive(ctx context.Context, directory, destDir string) error {
return filepath.WalkDir(directory, func(path string, d os.DirEntry, err error) error {
if err != nil {
@@ -316,7 +330,8 @@ func (s *Store) ImportRecursive(ctx context.Context, directory, destDir string)
// Remove finds all indexes matching searchTerm, prints each one, and prompts
// the user interactively before deleting the index+data pair. Mirrors Ruby's rm.
-func (s *Store) Remove(ctx context.Context, searchTerm string) error {
+// Pass os.Stdin as the reader for interactive use; a strings.Reader in tests.
+func (s *Store) Remove(ctx context.Context, searchTerm string, input io.Reader) error {
var indexes IndexSlice
if err := s.WalkIndexes(ctx, searchTerm, func(idx *Index) error {
indexes = append(indexes, idx)
@@ -327,7 +342,7 @@ func (s *Store) Remove(ctx context.Context, searchTerm string) error {
sort.Sort(indexes)
- scanner := bufio.NewScanner(os.Stdin)
+ scanner := bufio.NewScanner(input)
for _, idx := range indexes {
if err := s.confirmAndRemove(ctx, idx, scanner); err != nil {
return err
@@ -366,22 +381,27 @@ func (s *Store) confirmAndRemove(ctx context.Context, idx *Index, scanner *bufio
// ShredAllExported removes (shreds) every regular file in cfg.ExportDir.
// Uses GNU shred when available; falls back to "rm -Pfv" otherwise.
+// Mirrors Ruby's shred_all_exported: iterates all files and returns the last
+// non-nil error so that as many files as possible are shredded even on failure.
func (s *Store) ShredAllExported(ctx context.Context) error {
entries, err := filepath.Glob(filepath.Join(s.cfg.ExportDir, "*"))
if err != nil {
return fmt.Errorf("listing export dir: %w", err)
}
+ var lastErr error
for _, entry := range entries {
info, err := os.Stat(entry)
if err != nil || !info.Mode().IsRegular() {
continue
}
if err := shredFile(ctx, entry); err != nil {
- return err
+ // Record the error but keep shredding — security demands best-effort
+ // destruction of all exported secrets even if one fails.
+ lastErr = err
}
}
- return nil
+ return lastErr
}
// shredFile destroys a single file using shred(1) if available, or rm -Pfv.
@@ -404,8 +424,8 @@ func shredFile(ctx context.Context, filePath string) error {
func (s *Store) buildPair(description, hash string) (*Index, *Data) {
indexPath := filepath.Join(s.cfg.DataDir, hash+".index")
dataPath := filepath.Join(s.cfg.DataDir, hash+".data")
- hashBase := filepath.Base(hash + ".index")
- hashBase = strings.TrimSuffix(hashBase, ".index")
+ // filepath.Base of the hash gives the final path component (the filename stem).
+ hashBase := filepath.Base(hash)
idx := &Index{
Description: description,
diff --git a/internal/store/store_test.go b/internal/store/store_test.go
index ee1e07d..950c010 100644
--- a/internal/store/store_test.go
+++ b/internal/store/store_test.go
@@ -6,9 +6,11 @@ package store
import (
"context"
+ "io"
"os"
"os/exec"
"path/filepath"
+ "strings"
"testing"
"codeberg.org/snonux/geheim/internal/config"
@@ -276,6 +278,52 @@ func TestExport(t *testing.T) {
}
}
+// --- TestWalkIndexesInvalidRegex ---------------------------------------------
+
+// TestWalkIndexesInvalidRegex confirms that WalkIndexes returns an error when
+// the search term is not a valid regular expression.
+func TestWalkIndexesInvalidRegex(t *testing.T) {
+ ctx, store, _, _, _ := testSetup(t)
+
+ err := store.WalkIndexes(ctx, "[invalid", func(*Index) error { return nil })
+ if err == nil {
+ t.Error("WalkIndexes with invalid regex: expected error, got nil")
+ }
+}
+
+// --- TestImportMissingSourceFile ---------------------------------------------
+
+// TestImportMissingSourceFile confirms that Import returns an error when the
+// source file does not exist.
+func TestImportMissingSourceFile(t *testing.T) {
+ ctx, store, _, _, _ := testSetup(t)
+
+ err := store.Import(ctx, "/nonexistent/path/secret.txt", "dest/secret.txt", false)
+ if err == nil {
+ t.Error("Import with missing source file: expected error, got nil")
+ }
+}
+
+// --- TestHashPathEdgeCases ---------------------------------------------------
+
+// TestHashPathEdgeCases exercises edge inputs: empty string and a lone slash.
+func TestHashPathEdgeCases(t *testing.T) {
+ _, store, _, _, _ := testSetup(t)
+
+ // Empty string — HashPath("") should return sha256("") without panicking.
+ got := store.HashPath("")
+ if len(got) != 64 {
+ t.Errorf("HashPath(\"\") length = %d; want 64", len(got))
+ }
+
+ // Leading slash produces an empty first component after split on "/".
+ // Verify it does not panic and returns a non-empty result.
+ got2 := store.HashPath("/only")
+ if got2 == "" {
+ t.Error("HashPath(\"/only\") returned empty string")
+ }
+}
+
// --- TestRemoveEntry ---------------------------------------------------------
// TestRemoveEntry adds an entry, commits it so that git rm works, then removes
@@ -323,3 +371,377 @@ func TestRemoveEntry(t *testing.T) {
t.Errorf("expected 0 entries after remove; got %d", count)
}
}
+
+// --- TestSearch --------------------------------------------------------------
+
+// TestSearch adds two entries, then calls Search with ActionNone and verifies
+// both descriptions are returned sorted and printed to stdout.
+func TestSearch(t *testing.T) {
+ ctx, store, cfg, _, _ := testSetup(t)
+ initGitRepo(t, cfg.DataDir)
+
+ for _, desc := range []string{"zebra/entry", "apple/entry"} {
+ if err := store.Add(ctx, desc, "data"); err != nil {
+ t.Fatalf("Add %q: %v", desc, err)
+ }
+ }
+
+ results, err := store.Search(ctx, "", ActionNone, nil)
+ if err != nil {
+ t.Fatalf("Search: %v", err)
+ }
+ if len(results) != 2 {
+ t.Fatalf("expected 2 results; got %d", len(results))
+ }
+ // Search returns results sorted by Description.
+ if results[0].Description != "apple/entry" || results[1].Description != "zebra/entry" {
+ t.Errorf("unexpected sort order: %v, %v", results[0].Description, results[1].Description)
+ }
+}
+
+// --- TestSearchActionCat -----------------------------------------------------
+
+// TestSearchActionCat verifies that Search with ActionCat prints decrypted
+// content to stdout (we capture os.Stdout via a temp file redirect).
+func TestSearchActionCat(t *testing.T) {
+ ctx, store, cfg, _, _ := testSetup(t)
+ initGitRepo(t, cfg.DataDir)
+
+ if err := store.Add(ctx, "my/note.txt", "hello cat content\n"); err != nil {
+ t.Fatalf("Add: %v", err)
+ }
+
+ // Redirect stdout to capture output.
+ oldStdout := os.Stdout
+ r, w, err := os.Pipe()
+ if err != nil {
+ t.Fatalf("creating pipe: %v", err)
+ }
+ os.Stdout = w
+
+ results, err := store.Search(ctx, "note.txt", ActionCat, nil)
+
+ w.Close()
+ os.Stdout = oldStdout
+ var buf strings.Builder
+ io.Copy(&buf, r)
+
+ if err != nil {
+ t.Fatalf("Search ActionCat: %v", err)
+ }
+ if len(results) != 1 {
+ t.Fatalf("expected 1 result; got %d", len(results))
+ }
+ // The cat output should contain the decrypted content (tab-prefixed).
+ if !strings.Contains(buf.String(), "hello cat content") {
+ t.Errorf("stdout does not contain expected content: %q", buf.String())
+ }
+}
+
+// --- TestSearchActionCatBinarySkip -------------------------------------------
+
+// TestSearchActionCatBinarySkip confirms that ActionCat prints a skip message
+// rather than binary content when the description implies a binary file.
+func TestSearchActionCatBinarySkip(t *testing.T) {
+ ctx, store, cfg, _, _ := testSetup(t)
+ initGitRepo(t, cfg.DataDir)
+
+ // A .jpg description is detected as binary by IsBinary().
+ if err := store.Add(ctx, "photo.jpg", "\x89PNG\r\n\x1a\n"); err != nil {
+ t.Fatalf("Add: %v", err)
+ }
+
+ // Capture stdout to verify the skip message is printed.
+ r, w, err := os.Pipe()
+ if err != nil {
+ t.Fatalf("creating pipe: %v", err)
+ }
+ oldStdout := os.Stdout
+ os.Stdout = w
+
+ results, searchErr := store.Search(ctx, "photo.jpg", ActionCat, nil)
+
+ w.Close()
+ os.Stdout = oldStdout
+ var buf strings.Builder
+ io.Copy(&buf, r)
+
+ if searchErr != nil {
+ t.Fatalf("Search ActionCat (binary): %v", searchErr)
+ }
+ if len(results) != 1 {
+ t.Fatalf("expected 1 result; got %d", len(results))
+ }
+ // The "binary" warning must be present; the raw bytes must NOT be printed.
+ if !strings.Contains(buf.String(), "Not displaying") {
+ t.Errorf("expected binary-skip message; stdout = %q", buf.String())
+ }
+}
+
+// --- TestShredAllExported ----------------------------------------------------
+
+// TestShredAllExported writes two files to the export dir, calls ShredAllExported,
+// and verifies both files have been removed.
+func TestShredAllExported(t *testing.T) {
+ ctx, store, cfg, _, _ := testSetup(t)
+
+ // Write two plaintext files to the export directory.
+ for _, name := range []string{"secret1.txt", "secret2.txt"} {
+ p := filepath.Join(cfg.ExportDir, name)
+ if err := os.WriteFile(p, []byte("sensitive"), 0o600); err != nil {
+ t.Fatalf("writing export file: %v", err)
+ }
+ }
+
+ if err := store.ShredAllExported(ctx); err != nil {
+ t.Fatalf("ShredAllExported: %v", err)
+ }
+
+ // Both files should be gone.
+ for _, name := range []string{"secret1.txt", "secret2.txt"} {
+ p := filepath.Join(cfg.ExportDir, name)
+ if _, err := os.Stat(p); err == nil {
+ t.Errorf("file %q still exists after ShredAllExported", name)
+ }
+ }
+}
+
+// --- TestSearchActionExport --------------------------------------------------
+
+// TestSearchActionExport verifies that Search with ActionExport writes the
+// decrypted content to cfg.ExportDir using the basename of the description.
+func TestSearchActionExport(t *testing.T) {
+ ctx, store, cfg, _, _ := testSetup(t)
+ initGitRepo(t, cfg.DataDir)
+
+ wantContent := "exported via search\n"
+ if err := store.Add(ctx, "docs/report.txt", wantContent); err != nil {
+ t.Fatalf("Add: %v", err)
+ }
+
+ results, err := store.Search(ctx, "report.txt", ActionExport, nil)
+ if err != nil {
+ t.Fatalf("Search ActionExport: %v", err)
+ }
+ if len(results) != 1 {
+ t.Fatalf("expected 1 result; got %d", len(results))
+ }
+
+ // ActionExport uses the basename of the description as the export filename.
+ exportedPath := filepath.Join(cfg.ExportDir, "report.txt")
+ got, err := os.ReadFile(exportedPath)
+ if err != nil {
+ t.Fatalf("reading exported file: %v", err)
+ }
+ if string(got) != wantContent {
+ t.Errorf("exported content = %q; want %q", got, wantContent)
+ }
+}
+
+// --- TestImportRecursive -----------------------------------------------------
+
+// TestImportRecursive creates a directory tree, imports it, then verifies that
+// all files appear as indexed entries with the correct descriptions and content.
+func TestImportRecursive(t *testing.T) {
+ ctx, store, cfg, c, _ := testSetup(t)
+ initGitRepo(t, cfg.DataDir)
+
+ // Build a two-level source tree.
+ srcRoot := t.TempDir()
+ files := map[string]string{
+ "top.txt": "top level content\n",
+ "sub/nested.txt": "nested content\n",
+ }
+ for rel, content := range files {
+ full := filepath.Join(srcRoot, rel)
+ if err := os.MkdirAll(filepath.Dir(full), 0o700); err != nil {
+ t.Fatalf("mkdir: %v", err)
+ }
+ if err := os.WriteFile(full, []byte(content), 0o600); err != nil {
+ t.Fatalf("writing %q: %v", rel, err)
+ }
+ }
+
+ if err := store.ImportRecursive(ctx, srcRoot, "backup"); err != nil {
+ t.Fatalf("ImportRecursive: %v", err)
+ }
+
+ // Both files should now be indexed.
+ found := map[string]*Index{}
+ if err := store.WalkIndexes(ctx, "", func(idx *Index) error {
+ found[idx.Description] = idx
+ return nil
+ }); err != nil {
+ t.Fatalf("WalkIndexes: %v", err)
+ }
+
+ if len(found) != 2 {
+ t.Fatalf("expected 2 indexed entries; got %d", len(found))
+ }
+
+ // Verify content round-trips correctly.
+ for desc, wantContent := range map[string]string{
+ "backup/top.txt": "top level content\n",
+ "backup/sub/nested.txt": "nested content\n",
+ } {
+ idx, ok := found[desc]
+ if !ok {
+ t.Errorf("entry %q not found; found: %v", desc, func() []string {
+ keys := make([]string, 0, len(found))
+ for k := range found {
+ keys = append(keys, k)
+ }
+ return keys
+ }())
+ continue
+ }
+ d, err := loadData(ctx, filepath.Join(cfg.DataDir, idx.DataFile), c)
+ if err != nil {
+ t.Fatalf("loadData for %q: %v", desc, err)
+ }
+ if string(d.Content) != wantContent {
+ t.Errorf("content for %q = %q; want %q", desc, d.Content, wantContent)
+ }
+ }
+}
+
+// --- TestReimportAfterExport -------------------------------------------------
+
+// TestReimportAfterExport exports an entry, modifies the exported file, then
+// reimports it and verifies the updated content is stored in the encrypted .data file.
+func TestReimportAfterExport(t *testing.T) {
+ ctx, store, cfg, c, g := testSetup(t)
+ initGitRepo(t, cfg.DataDir)
+
+ original := "original content\n"
+ if err := store.Add(ctx, "editable/note.txt", original); err != nil {
+ t.Fatalf("Add: %v", err)
+ }
+
+ // Locate the entry.
+ var idx *Index
+ if err := store.WalkIndexes(ctx, "", func(i *Index) error { idx = i; return nil }); err != nil {
+ t.Fatalf("WalkIndexes: %v", err)
+ }
+
+ d, err := loadData(ctx, filepath.Join(cfg.DataDir, idx.DataFile), c)
+ if err != nil {
+ t.Fatalf("loadData: %v", err)
+ }
+
+ // Export to a temp export dir.
+ if err := d.Export(ctx, cfg.ExportDir, "note.txt"); err != nil {
+ t.Fatalf("Export: %v", err)
+ }
+
+ // Simulate editing the exported file.
+ updated := "updated content after edit\n"
+ if err := os.WriteFile(d.ExportedPath, []byte(updated), 0o600); err != nil {
+ t.Fatalf("updating exported file: %v", err)
+ }
+
+ // Reimport overwrites the encrypted .data with the updated content.
+ if err := d.ReimportAfterExport(ctx, c, g); err != nil {
+ t.Fatalf("ReimportAfterExport: %v", err)
+ }
+
+ // Reload from disk and verify the update was persisted.
+ reloaded, err := loadData(ctx, filepath.Join(cfg.DataDir, idx.DataFile), c)
+ if err != nil {
+ t.Fatalf("loadData after reimport: %v", err)
+ }
+ if string(reloaded.Content) != updated {
+ t.Errorf("reimported content = %q; want %q", reloaded.Content, updated)
+ }
+}
+
+// --- TestRemoveInteractive ---------------------------------------------------
+
+// TestRemoveInteractive tests Store.Remove by injecting a strings.Reader as the
+// interactive input (answering "y"). After removal WalkIndexes must find no entries.
+func TestRemoveInteractive(t *testing.T) {
+ ctx, store, cfg, _, g := testSetup(t)
+ initGitRepo(t, cfg.DataDir)
+
+ if err := store.Add(ctx, "interactive/remove", "data to delete"); err != nil {
+ t.Fatalf("Add: %v", err)
+ }
+ if err := g.Commit(ctx); err != nil {
+ t.Fatalf("Commit: %v", err)
+ }
+
+ // Inject "y\n" as user input to confirm deletion.
+ input := strings.NewReader("y\n")
+ if err := store.Remove(ctx, "interactive/remove", input); err != nil {
+ t.Fatalf("Remove: %v", err)
+ }
+
+ count := 0
+ if err := store.WalkIndexes(ctx, "", func(*Index) error { count++; return nil }); err != nil {
+ t.Fatalf("WalkIndexes after remove: %v", err)
+ }
+ if count != 0 {
+ t.Errorf("expected 0 entries after removal; got %d", count)
+ }
+}
+
+// --- TestRemoveInteractiveDecline --------------------------------------------
+
+// TestRemoveInteractiveDecline confirms that answering "n" leaves the entry intact.
+func TestRemoveInteractiveDecline(t *testing.T) {
+ ctx, store, cfg, _, g := testSetup(t)
+ initGitRepo(t, cfg.DataDir)
+
+ if err := store.Add(ctx, "keep/this", "important data"); err != nil {
+ t.Fatalf("Add: %v", err)
+ }
+ if err := g.Commit(ctx); err != nil {
+ t.Fatalf("Commit: %v", err)
+ }
+
+ // Inject "n\n" — user declines deletion.
+ input := strings.NewReader("n\n")
+ if err := store.Remove(ctx, "keep/this", input); err != nil {
+ t.Fatalf("Remove: %v", err)
+ }
+
+ count := 0
+ if err := store.WalkIndexes(ctx, "", func(*Index) error { count++; return nil }); err != nil {
+ t.Fatalf("WalkIndexes: %v", err)
+ }
+ if count != 1 {
+ t.Errorf("expected 1 entry after decline; got %d", count)
+ }
+}
+
+// --- TestCommitIndexSkipsExisting --------------------------------------------
+
+// TestCommitIndexSkipsExisting verifies that CommitIndex with force=false is a
+// no-op when IndexPath already exists, preserving the original encrypted content.
+func TestCommitIndexSkipsExisting(t *testing.T) {
+ ctx := context.Background()
+ c := newTestIndexCipher(t)
+ dir := t.TempDir()
+
+ indexPath := filepath.Join(dir, "existing.index")
+ sentinel := []byte("original encrypted content")
+ if err := os.WriteFile(indexPath, sentinel, 0o600); err != nil {
+ t.Fatalf("writing sentinel: %v", err)
+ }
+
+ idx := &Index{
+ Description: "should not be written",
+ IndexPath: indexPath,
+ Hash: strings.Repeat("0", 64),
+ }
+
+ // force=false must skip writing; passing nil for git since it won't be reached.
+ if err := idx.CommitIndex(ctx, c, nil, false); err != nil {
+ t.Errorf("CommitIndex(force=false) with existing file returned error: %v", err)
+ }
+
+ got, _ := os.ReadFile(indexPath)
+ if string(got) != string(sentinel) {
+ t.Errorf("index file was overwritten: got %q; want %q", got, sentinel)
+ }
+}