From 6963b12bbc62ced0e11f68c8094b2b8646121d97 Mon Sep 17 00:00:00 2001 From: sysid Date: Tue, 21 Oct 2025 18:49:47 +0200 Subject: [PATCH 01/32] feat: add inka module structure and dependencies Create hexagonal architecture module structure for inka2 migration: - domain: pure business logic - application: use cases - infrastructure: external adapters (markdown processing) - cli: command line interface Add dependencies: - markdown-it v0.6: Rust port of markdown-it.js for parsing - toml v0.8: config file management - sha2 v0.10: file hashing for change detection - walkdir v2.4: recursive directory scanning - reqwest v0.11: HTTP client for highlight.js downloads - lazy_static v1.4: static regex compilation Phase 1.1 of TDD migration plan complete. --- ankiview/Cargo.lock | 672 ++++++++++++++++-- ankiview/Cargo.toml | 8 + ankiview/src/inka/application/mod.rs | 1 + ankiview/src/inka/cli/mod.rs | 1 + ankiview/src/inka/domain/mod.rs | 1 + .../src/inka/infrastructure/markdown/mod.rs | 1 + ankiview/src/inka/infrastructure/mod.rs | 1 + ankiview/src/inka/mod.rs | 12 + ankiview/src/lib.rs | 1 + 9 files changed, 652 insertions(+), 46 deletions(-) create mode 100644 ankiview/src/inka/application/mod.rs create mode 100644 ankiview/src/inka/cli/mod.rs create mode 100644 ankiview/src/inka/domain/mod.rs create mode 100644 ankiview/src/inka/infrastructure/markdown/mod.rs create mode 100644 ankiview/src/inka/infrastructure/mod.rs create mode 100644 ankiview/src/inka/mod.rs diff --git a/ankiview/Cargo.lock b/ankiview/Cargo.lock index 4be8b99..5a503a4 100644 --- a/ankiview/Cargo.lock +++ b/ankiview/Cargo.lock @@ -92,7 +92,7 @@ dependencies = [ "bytes", "chrono", "coarsetime", - "convert_case", + "convert_case 0.6.0", "csv", "data-encoding", "difflib", @@ -106,7 +106,7 @@ dependencies = [ "futures", "hex", "htmlescape", - "hyper", + "hyper 1.6.0", "id_tree", "inflections", "itertools 0.13.0", @@ -124,9 +124,9 @@ dependencies = [ "pulldown-cmark", "rand", "regex", - "reqwest", + "reqwest 0.12.12", "rusqlite", - "rustls-pemfile", + "rustls-pemfile 2.2.0", "scopeguard", "serde", "serde-aux", @@ -228,14 +228,20 @@ dependencies = [ "dirs 6.0.0", "html-escape", "itertools 0.14.0", + "lazy_static", + "markdown-it", "regex", + "reqwest 0.11.27", "rstest 0.24.0", "serde", "serde_json", + "sha2", "tempfile", - "thiserror 2.0.11", + "thiserror 2.0.17", + "toml", "tracing", "tracing-subscriber", + "walkdir", ] [[package]] @@ -303,6 +309,12 @@ dependencies = [ "derive_arbitrary", ] +[[package]] +name = "argparse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f8ebf5827e4ac4fd5946560e6a99776ea73b596d80898f357007317a7141e47" + [[package]] name = "arrayref" version = "0.3.9" @@ -395,10 +407,10 @@ dependencies = [ "axum-macros", "bytes", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper", + "hyper 1.6.0", "hyper-util", "itoa", "matchit", @@ -412,7 +424,7 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 1.0.2", "tokio", "tower", "tower-layer", @@ -440,13 +452,13 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", "rustversion", - "sync_wrapper", + "sync_wrapper 1.0.2", "tower-layer", "tower-service", "tracing", @@ -464,8 +476,8 @@ dependencies = [ "fastrand", "futures-util", "headers", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "mime", "multer", @@ -520,6 +532,15 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bincode" version = "2.0.0-rc.3" @@ -646,7 +667,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f8ebbf7d5c8bdc269260bd8e7ce08e488e6625da19b3d80ca34a729d78a77ab" dependencies = [ "ahash", - "bincode", + "bincode 2.0.0-rc.3", "burn-autodiff", "burn-candle", "burn-common", @@ -706,7 +727,7 @@ dependencies = [ "strum", "strum_macros", "tempfile", - "thiserror 2.0.11", + "thiserror 2.0.17", ] [[package]] @@ -912,10 +933,11 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.15" +version = "1.2.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" +checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7" dependencies = [ + "find-msvc-tools", "jobserver", "libc", "shlex", @@ -1037,12 +1059,38 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "const_format" +version = "0.2.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + [[package]] name = "constant_time_eq" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "convert_case" version = "0.6.0" @@ -1228,7 +1276,7 @@ dependencies = [ "cubecl-macros", "cubecl-runtime", "derive-new 0.6.0", - "derive_more", + "derive_more 1.0.0", "half", "log", "num-traits", @@ -1437,6 +1485,17 @@ dependencies = [ "powerfmt", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "derive-new" version = "0.6.0" @@ -1470,6 +1529,19 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "derive_more" +version = "0.99.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.98", +] + [[package]] name = "derive_more" version = "1.0.0" @@ -1570,6 +1642,12 @@ dependencies = [ "litrs", ] +[[package]] +name = "downcast-rs" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" + [[package]] name = "dyn-stack" version = "0.10.0" @@ -1610,6 +1688,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "entities" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5320ae4c3782150d900b79807611a59a99fc9a1d61d686faafc24b93fc8d7ca" + [[package]] name = "enum-as-inner" version = "0.6.1" @@ -1680,12 +1764,29 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" +[[package]] +name = "fancy-regex" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "998b056554fbe42e03ae0e152895cd1a7e1002aec800fdc6635d20270260c46f" +dependencies = [ + "bit-set", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "find-msvc-tools" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127" + [[package]] name = "fixedbitset" version = "0.5.7" @@ -1758,6 +1859,15 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared 0.1.1", +] + [[package]] name = "foreign-types" version = "0.5.0" @@ -1765,7 +1875,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" dependencies = [ "foreign-types-macros", - "foreign-types-shared", + "foreign-types-shared 0.3.1", ] [[package]] @@ -1779,6 +1889,12 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "foreign-types-shared" version = "0.3.1" @@ -2317,6 +2433,25 @@ dependencies = [ "bitflags 2.8.0", ] +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "2.4.1" @@ -2381,7 +2516,7 @@ dependencies = [ "base64 0.21.7", "bytes", "headers-core", - "http", + "http 1.2.0", "httpdate", "mime", "sha1", @@ -2393,7 +2528,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" dependencies = [ - "http", + "http 1.2.0", ] [[package]] @@ -2458,6 +2593,17 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9025058dae765dee5070ec375f591e2ba14638c63feff74f13805a72e523163" +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http" version = "1.2.0" @@ -2469,6 +2615,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + [[package]] name = "http-body" version = "1.0.1" @@ -2476,7 +2633,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http", + "http 1.2.0", ] [[package]] @@ -2487,8 +2644,8 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "pin-project-lite", ] @@ -2504,6 +2661,30 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + [[package]] name = "hyper" version = "1.6.0" @@ -2513,8 +2694,8 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "httparse", "httpdate", "itoa", @@ -2524,6 +2705,19 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper 0.14.32", + "native-tls", + "tokio", + "tokio-native-tls", +] + [[package]] name = "hyper-util" version = "0.1.10" @@ -2533,9 +2727,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http", - "http-body", - "hyper", + "http 1.2.0", + "http-body 1.0.1", + "hyper 1.6.0", "pin-project-lite", "socket2", "tokio", @@ -2699,6 +2893,16 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "1.0.3" @@ -2891,6 +3095,21 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linkify" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1dfa36d52c581e9ec783a7ce2a5e0143da6237be5811a0b3153fedfdbe9f780" +dependencies = [ + "memchr", +] + [[package]] name = "linux-raw-sys" version = "0.4.15" @@ -2946,6 +3165,29 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" +[[package]] +name = "markdown-it" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f99c010929c8217b2dc0940954267a2e15a15f17cb309cd1f299e21933f84fac" +dependencies = [ + "argparse", + "const_format", + "derivative", + "derive_more 0.99.20", + "downcast-rs", + "entities", + "html-escape", + "linkify", + "mdurl", + "once_cell", + "readonly", + "regex", + "stacker", + "syntect", + "unicode-general-category", +] + [[package]] name = "markup5ever" version = "0.12.1" @@ -3000,6 +3242,17 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" +[[package]] +name = "mdurl" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5736ba45bbac8f7ccc99a897f88ce85e508a18baec973a040f2514e6cdbff0d2" +dependencies = [ + "idna 0.3.0", + "once_cell", + "regex", +] + [[package]] name = "memchr" version = "2.7.4" @@ -3025,7 +3278,7 @@ dependencies = [ "bitflags 2.8.0", "block", "core-graphics-types", - "foreign-types", + "foreign-types 0.5.0", "log", "objc", "paste", @@ -3082,7 +3335,7 @@ dependencies = [ "bytes", "encoding_rs", "futures-util", - "http", + "http 1.2.0", "httparse", "memchr", "mime", @@ -3117,6 +3370,23 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + [[package]] name = "ndarray" version = "0.15.6" @@ -3376,6 +3646,50 @@ version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" +[[package]] +name = "openssl" +version = "0.10.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24ad14dd45412269e1a30f52ad8f0664f0f4f4a89ee8fe28c3b3527021ebb654" +dependencies = [ + "bitflags 2.8.0", + "cfg-if", + "foreign-types 0.3.2", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a9f0075ba3c21b09f8e8b2026584b1d18d49388648f2fbbf3c97ea8deced8e2" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -3566,6 +3880,19 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" +[[package]] +name = "plist" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "740ebea15c5d1428f910cd1a5f52cebf8d25006245ed8ade92702f4943d91e07" +dependencies = [ + "base64 0.22.1", + "indexmap", + "quick-xml", + "serde", + "time", +] + [[package]] name = "portable-atomic" version = "1.10.0" @@ -3722,6 +4049,15 @@ dependencies = [ "prost", ] +[[package]] +name = "psm" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e66fcd288453b748497d8fb18bccc83a16b0518e3906d4b8df0a8d42d93dbb1c" +dependencies = [ + "cc", +] + [[package]] name = "pulldown-cmark" version = "0.9.6" @@ -3760,6 +4096,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "quick-xml" +version = "0.38.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" +dependencies = [ + "memchr", +] + [[package]] name = "quote" version = "1.0.38" @@ -3865,6 +4210,17 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "readonly" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2a62d85ed81ca5305dc544bd42c8804c5060b78ffa5ad3c64b0fb6a8c13d062" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "reborrow" version = "0.5.5" @@ -3899,7 +4255,7 @@ checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" dependencies = [ "getrandom 0.2.15", "libredox", - "thiserror 2.0.11", + "thiserror 2.0.17", ] [[package]] @@ -3958,6 +4314,46 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19b30a45b0cd0bcca8037f3d0dc3421eaf95327a17cad11964fb8179b4fc4832" +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile 1.0.4", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 0.1.2", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + [[package]] name = "reqwest" version = "0.12.12" @@ -3968,10 +4364,10 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper", + "hyper 1.6.0", "hyper-util", "ipnet", "js-sys", @@ -3984,7 +4380,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 1.0.2", "tokio", "tokio-socks", "tokio-util", @@ -4128,6 +4524,15 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + [[package]] name = "rustls-pemfile" version = "2.2.0" @@ -4193,12 +4598,44 @@ dependencies = [ "regex", ] +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.8.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "self_cell" version = "0.10.3" @@ -4310,6 +4747,15 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + [[package]] name = "serde_tuple" version = "0.5.0" @@ -4481,6 +4927,19 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +[[package]] +name = "stacker" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" +dependencies = [ + "cc", + "cfg-if", + "libc", + "psm", + "windows-sys 0.59.0", +] + [[package]] name = "static_assertions" version = "1.1.0" @@ -4568,6 +5027,12 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + [[package]] name = "sync_wrapper" version = "1.0.2" @@ -4588,6 +5053,27 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "syntect" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "656b45c05d95a5704399aeef6bd0ddec7b2b3531b7c9e900abbf7c4d2190c925" +dependencies = [ + "bincode 1.3.3", + "fancy-regex", + "flate2", + "fnv", + "once_cell", + "plist", + "regex-syntax 0.8.5", + "serde", + "serde_derive", + "serde_json", + "thiserror 2.0.17", + "walkdir", + "yaml-rust", +] + [[package]] name = "sysctl" version = "0.5.5" @@ -4630,6 +5116,27 @@ dependencies = [ "windows 0.57.0", ] +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "systemstat" version = "0.2.4" @@ -4700,11 +5207,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.11" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.11", + "thiserror-impl 2.0.17", ] [[package]] @@ -4720,9 +5227,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.11" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", @@ -4832,6 +5339,16 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + [[package]] name = "tokio-socks" version = "0.5.2" @@ -4857,11 +5374,26 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + [[package]] name = "toml_datetime" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] [[package]] name = "toml_edit" @@ -4870,6 +5402,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap", + "serde", + "serde_spanned", "toml_datetime", "winnow", ] @@ -4883,7 +5417,7 @@ dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper", + "sync_wrapper 1.0.2", "tokio", "tower-layer", "tower-service", @@ -4898,8 +5432,8 @@ checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" dependencies = [ "bitflags 2.8.0", "bytes", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "pin-project-lite", "tower-layer", @@ -5129,6 +5663,18 @@ dependencies = [ "version_check", ] +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-general-category" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2281c8c1d221438e373249e065ca4989c4c36952c211ff21a0ee91c44a3869e7" + [[package]] name = "unicode-ident" version = "1.0.17" @@ -5169,7 +5715,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", - "idna", + "idna 1.0.3", "percent-encoding", ] @@ -5653,6 +6199,12 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-registry" version = "0.2.0" @@ -5719,6 +6271,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -5849,6 +6410,16 @@ dependencies = [ "memchr", ] +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + [[package]] name = "wit-bindgen-rt" version = "0.33.0" @@ -5888,6 +6459,15 @@ version = "0.8.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5b940ebc25896e71dd073bad2dbaa2abfe97b0a391415e22ad1326d9c54e3c4" +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + [[package]] name = "yoke" version = "0.7.5" diff --git a/ankiview/Cargo.toml b/ankiview/Cargo.toml index f0858d4..9125c22 100644 --- a/ankiview/Cargo.toml +++ b/ankiview/Cargo.toml @@ -28,6 +28,14 @@ thiserror = "2.0.11" tracing = "0.1.41" tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } +# inka dependencies +markdown-it = "0.6" +toml = "0.8" +sha2 = "0.10" +walkdir = "2.4" +reqwest = { version = "0.11", features = ["blocking"] } +lazy_static = "1.4" + [[bin]] name = "build_test_collection" path = "tests/fixtures/build_test_collection.rs" diff --git a/ankiview/src/inka/application/mod.rs b/ankiview/src/inka/application/mod.rs new file mode 100644 index 0000000..7ee9ae5 --- /dev/null +++ b/ankiview/src/inka/application/mod.rs @@ -0,0 +1 @@ +// Application module placeholder diff --git a/ankiview/src/inka/cli/mod.rs b/ankiview/src/inka/cli/mod.rs new file mode 100644 index 0000000..78a1893 --- /dev/null +++ b/ankiview/src/inka/cli/mod.rs @@ -0,0 +1 @@ +// CLI module placeholder diff --git a/ankiview/src/inka/domain/mod.rs b/ankiview/src/inka/domain/mod.rs new file mode 100644 index 0000000..9b0ea03 --- /dev/null +++ b/ankiview/src/inka/domain/mod.rs @@ -0,0 +1 @@ +// Domain module placeholder diff --git a/ankiview/src/inka/infrastructure/markdown/mod.rs b/ankiview/src/inka/infrastructure/markdown/mod.rs new file mode 100644 index 0000000..b6ca76b --- /dev/null +++ b/ankiview/src/inka/infrastructure/markdown/mod.rs @@ -0,0 +1 @@ +// Markdown processing module diff --git a/ankiview/src/inka/infrastructure/mod.rs b/ankiview/src/inka/infrastructure/mod.rs new file mode 100644 index 0000000..163a4fb --- /dev/null +++ b/ankiview/src/inka/infrastructure/mod.rs @@ -0,0 +1 @@ +pub mod markdown; diff --git a/ankiview/src/inka/mod.rs b/ankiview/src/inka/mod.rs new file mode 100644 index 0000000..41b2f99 --- /dev/null +++ b/ankiview/src/inka/mod.rs @@ -0,0 +1,12 @@ +pub mod domain; +pub mod application; +pub mod infrastructure; +pub mod cli; + +#[cfg(test)] +mod tests { + #[test] + fn given_empty_project_when_building_then_compiles() { + assert!(true); + } +} diff --git a/ankiview/src/lib.rs b/ankiview/src/lib.rs index 498ddba..a0a1d37 100644 --- a/ankiview/src/lib.rs +++ b/ankiview/src/lib.rs @@ -5,6 +5,7 @@ pub mod domain; pub mod infrastructure; pub mod ports; pub mod util; +pub mod inka; use crate::cli::args::{Args, Command}; use anyhow::{Context, Result}; From a2116c5f1f427997affe9897a59e0576d864d7df Mon Sep 17 00:00:00 2001 From: sysid Date: Tue, 21 Oct 2025 18:52:45 +0200 Subject: [PATCH 02/32] feat: implement MathJax inline math plugin Add inline math recognition for markdown-it parser: - Detects $...$ patterns for inline LaTeX math - Converts to MathJax \(...\) delimiters for rendering - Validates no whitespace after opening $ or before closing $ - Implements InlineRule trait from markdown-it parser Test coverage: - given_inline_math_when_parsing_then_creates_math_token Note: Avoid regex lookahead/lookbehind (not supported by Rust regex) using manual character-by-character scanning instead. Phase 1.2 of TDD migration plan complete. --- ankiview/src/inka/infrastructure/markdown/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/ankiview/src/inka/infrastructure/markdown/mod.rs b/ankiview/src/inka/infrastructure/markdown/mod.rs index b6ca76b..5694f8c 100644 --- a/ankiview/src/inka/infrastructure/markdown/mod.rs +++ b/ankiview/src/inka/infrastructure/markdown/mod.rs @@ -1 +1,2 @@ // Markdown processing module +pub mod mathjax_plugin; From ed922edbf6aeb8958c0a65fbc03c3f53b902d9bf Mon Sep 17 00:00:00 2001 From: sysid Date: Tue, 21 Oct 2025 18:56:52 +0200 Subject: [PATCH 03/32] feat: add block math and mixed math support to MathJax plugin Implement BlockRule for $$...$$ block math delimiters: - Scans for opening $$ line - Extracts multi-line content until closing $$ - Renders as MathJax \[...\] delimiters Test coverage: - given_block_math_when_parsing_then_creates_block_math_token - given_mixed_math_when_parsing_then_handles_both_types Phase 1.3 and 1.4 of TDD migration plan complete. Phase 1 (Foundation & MathJax Plugin) fully implemented. --- .../infrastructure/markdown/mathjax_plugin.rs | 172 ++++++++++++++++++ 1 file changed, 172 insertions(+) create mode 100644 ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs diff --git a/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs b/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs new file mode 100644 index 0000000..49c0942 --- /dev/null +++ b/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs @@ -0,0 +1,172 @@ +use markdown_it::parser::inline::{InlineRule, InlineState}; +use markdown_it::parser::block::{BlockRule, BlockState}; +use markdown_it::{MarkdownIt, Node, NodeValue, Renderer}; + +#[derive(Debug)] +pub struct InlineMath { + pub content: String, +} + +impl NodeValue for InlineMath { + fn render(&self, _node: &Node, fmt: &mut dyn Renderer) { + // Render as \(...\) for MathJax + fmt.text(&format!(r"\({}\)", self.content)); + } +} + +struct InlineMathScanner; + +impl InlineRule for InlineMathScanner { + const MARKER: char = '$'; + + fn run(state: &mut InlineState) -> Option<(Node, usize)> { + let input = &state.src[state.pos..state.pos_max]; + + // Check if we start with $ + if !input.starts_with('$') { + return None; + } + + // Don't match if $ is followed by whitespace + if input.len() < 2 || input.chars().nth(1)?.is_whitespace() { + return None; + } + + // Find the closing $ + let mut end_pos = None; + let chars: Vec = input.chars().collect(); + + for i in 1..chars.len() { + if chars[i] == '$' { + // Don't match if $ is preceded by whitespace + if i > 0 && !chars[i - 1].is_whitespace() { + end_pos = Some(i); + break; + } + } + } + + if let Some(end) = end_pos { + // Extract content between the $...$ (excluding the $ markers) + let content: String = chars[1..end].iter().collect(); + let match_len = end + 1; // Include both $ markers + + let node = Node::new(InlineMath { content }); + return Some((node, match_len)); + } + + None + } +} + +#[derive(Debug)] +pub struct BlockMath { + pub content: String, +} + +impl NodeValue for BlockMath { + fn render(&self, _node: &Node, fmt: &mut dyn Renderer) { + // Render as \[...\] for MathJax + fmt.text(&format!(r"\[{}\]", self.content)); + } +} + +struct BlockMathScanner; + +impl BlockRule for BlockMathScanner { + fn run(state: &mut BlockState) -> Option<(Node, usize)> { + // Get the current line + if state.line >= state.line_max { + return None; + } + + let start_line = state.line; + let line = state.get_line(start_line); + + // Check if line starts with $$ + if !line.trim().starts_with("$$") { + return None; + } + + // Find the closing $$ + let mut end_line = None; + for line_num in (start_line + 1)..state.line_max { + let line = state.get_line(line_num); + if line.trim().starts_with("$$") { + end_line = Some(line_num); + break; + } + } + + if let Some(end) = end_line { + // Extract content between the $$ markers + let mut content_lines = Vec::new(); + for line_num in (start_line + 1)..end { + content_lines.push(state.get_line(line_num).to_string()); + } + let content = content_lines.join("\n"); + + let node = Node::new(BlockMath { content }); + let next_line = end + 1; + return Some((node, next_line)); + } + + None + } +} + +pub fn add_mathjax_plugin(md: &mut MarkdownIt) { + md.inline.add_rule::(); + md.block.add_rule::().before_all(); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn given_inline_math_when_parsing_then_creates_math_token() { + let input = "This is $f(x) = x^2$ inline math"; + let mut parser = MarkdownIt::new(); + markdown_it::plugins::cmark::add(&mut parser); + add_mathjax_plugin(&mut parser); + + let ast = parser.parse(input); + let html = ast.render(); + + // Should render with MathJax delimiters + assert!(html.contains(r"\(f(x) = x^2\)")); + } + + #[test] + fn given_block_math_when_parsing_then_creates_block_math_token() { + let input = "$$\nf(x) = \\int_0^1 x^2 dx\n$$"; + let mut parser = MarkdownIt::new(); + markdown_it::plugins::cmark::add(&mut parser); + add_mathjax_plugin(&mut parser); + + let html = parser.parse(input).render(); + + assert!(html.contains(r"\[f(x) = \int_0^1 x^2 dx\]")); + } + + #[test] + fn given_mixed_math_when_parsing_then_handles_both_types() { + let input = r#"Inline $a=b$ and block: + +$$ +\sum_{i=1}^n i = \frac{n(n+1)}{2} +$$ + +More text."#; + + let mut parser = MarkdownIt::new(); + markdown_it::plugins::cmark::add(&mut parser); + add_mathjax_plugin(&mut parser); + + let html = parser.parse(input).render(); + + assert!(html.contains(r"\(a=b\)")); + assert!(html.contains(r"\[\sum_{i=1}^n i = \frac{n(n+1)}{2}\]")); + } +} From 69525e488ae2f50cc91243c0c5adbac39f8ea5ce Mon Sep 17 00:00:00 2001 From: sysid Date: Tue, 21 Oct 2025 20:12:47 +0200 Subject: [PATCH 04/32] feat: implement domain models for Card trait, BasicCard and ClozeCard Add pure business logic domain entities: - Card trait: common interface for all card types - BasicCard: front/back question-answer cards - ClozeCard: fill-in-the-blank cards with cloze deletions Features: - Builder pattern (with_deck, with_tags, with_id) - Separate markdown and HTML storage - Anki ID tracking for updates - Support for tags and deck assignment Test coverage: - given_card_trait_when_implemented_then_provides_common_interface - given_front_and_back_when_creating_basic_card_then_stores_fields - given_basic_card_when_setting_deck_then_updates - given_basic_card_when_setting_tags_then_stores - given_text_with_cloze_when_creating_then_stores_text - given_cloze_card_when_implementing_trait_then_provides_interface Phase 2 (Domain Models) of TDD migration plan complete. --- ankiview/src/inka/domain/card.rs | 228 +++++++++++++++++++++++++++++++ ankiview/src/inka/domain/mod.rs | 2 +- 2 files changed, 229 insertions(+), 1 deletion(-) create mode 100644 ankiview/src/inka/domain/card.rs diff --git a/ankiview/src/inka/domain/card.rs b/ankiview/src/inka/domain/card.rs new file mode 100644 index 0000000..af12ca9 --- /dev/null +++ b/ankiview/src/inka/domain/card.rs @@ -0,0 +1,228 @@ +pub trait Card { + fn deck_name(&self) -> &str; + fn tags(&self) -> &[String]; + fn anki_id(&self) -> Option; + fn set_anki_id(&mut self, id: i64); + + /// Get raw markdown fields (for ID injection) + fn raw_fields(&self) -> Vec<&str>; + + /// Get HTML fields ready for Anki + fn html_fields(&self) -> Vec; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn given_card_trait_when_implemented_then_provides_common_interface() { + // This test just verifies compilation + fn accepts_card(_card: &C) { + // Any type implementing Card should work + } + + let basic = BasicCard::new("Q", "A"); + accepts_card(&basic); + } + + #[test] + fn given_front_and_back_when_creating_basic_card_then_stores_fields() { + let card = BasicCard::new("Question?", "Answer!"); + + assert_eq!(card.front_md(), "Question?"); + assert_eq!(card.back_md(), "Answer!"); + assert_eq!(card.deck_name(), "Default"); + } + + #[test] + fn given_basic_card_when_setting_deck_then_updates() { + let card = BasicCard::new("Q", "A") + .with_deck("MyDeck"); + + assert_eq!(card.deck_name(), "MyDeck"); + } + + #[test] + fn given_basic_card_when_setting_tags_then_stores() { + let card = BasicCard::new("Q", "A") + .with_tags(vec!["tag1".to_string(), "tag2".to_string()]); + + assert_eq!(card.tags(), &["tag1", "tag2"]); + } + + #[test] + fn given_text_with_cloze_when_creating_then_stores_text() { + let card = ClozeCard::new("The capital of {{c1::France}} is {{c2::Paris}}"); + + assert_eq!(card.text_md(), "The capital of {{c1::France}} is {{c2::Paris}}"); + } + + #[test] + fn given_cloze_card_when_implementing_trait_then_provides_interface() { + let card = ClozeCard::new("Text {{c1::cloze}}") + .with_deck("TestDeck"); + + assert_eq!(card.deck_name(), "TestDeck"); + assert_eq!(card.raw_fields(), vec!["Text {{c1::cloze}}"]); + } +} + +#[derive(Debug, Clone)] +pub struct BasicCard { + front_md: String, + back_md: String, + front_html: Option, + back_html: Option, + tags: Vec, + deck_name: String, + anki_id: Option, +} + +impl BasicCard { + pub fn new(front: impl Into, back: impl Into) -> Self { + Self { + front_md: front.into(), + back_md: back.into(), + front_html: None, + back_html: None, + tags: Vec::new(), + deck_name: "Default".to_string(), + anki_id: None, + } + } + + pub fn with_deck(mut self, deck: impl Into) -> Self { + self.deck_name = deck.into(); + self + } + + pub fn with_tags(mut self, tags: Vec) -> Self { + self.tags = tags; + self + } + + pub fn with_id(mut self, id: i64) -> Self { + self.anki_id = Some(id); + self + } + + pub fn front_md(&self) -> &str { + &self.front_md + } + + pub fn back_md(&self) -> &str { + &self.back_md + } + + pub fn set_html(&mut self, front: String, back: String) { + self.front_html = Some(front); + self.back_html = Some(back); + } +} + +impl Card for BasicCard { + fn deck_name(&self) -> &str { + &self.deck_name + } + + fn tags(&self) -> &[String] { + &self.tags + } + + fn anki_id(&self) -> Option { + self.anki_id + } + + fn set_anki_id(&mut self, id: i64) { + self.anki_id = Some(id); + } + + fn raw_fields(&self) -> Vec<&str> { + vec![&self.front_md, &self.back_md] + } + + fn html_fields(&self) -> Vec { + vec![ + self.front_html.clone().unwrap_or_else(|| self.front_md.clone()), + self.back_html.clone().unwrap_or_else(|| self.back_md.clone()), + ] + } +} + +#[derive(Debug, Clone)] +pub struct ClozeCard { + text_md: String, + text_html: Option, + tags: Vec, + deck_name: String, + anki_id: Option, +} + +impl ClozeCard { + pub fn new(text: impl Into) -> Self { + Self { + text_md: text.into(), + text_html: None, + tags: Vec::new(), + deck_name: "Default".to_string(), + anki_id: None, + } + } + + pub fn with_deck(mut self, deck: impl Into) -> Self { + self.deck_name = deck.into(); + self + } + + pub fn with_tags(mut self, tags: Vec) -> Self { + self.tags = tags; + self + } + + pub fn with_id(mut self, id: i64) -> Self { + self.anki_id = Some(id); + self + } + + pub fn text_md(&self) -> &str { + &self.text_md + } + + pub fn set_html(&mut self, html: String) { + self.text_html = Some(html); + } + + /// Update the markdown text (used during cloze syntax conversion) + pub fn update_text_md(&mut self, text: String) { + self.text_md = text; + } +} + +impl Card for ClozeCard { + fn deck_name(&self) -> &str { + &self.deck_name + } + + fn tags(&self) -> &[String] { + &self.tags + } + + fn anki_id(&self) -> Option { + self.anki_id + } + + fn set_anki_id(&mut self, id: i64) { + self.anki_id = Some(id); + } + + fn raw_fields(&self) -> Vec<&str> { + vec![&self.text_md] + } + + fn html_fields(&self) -> Vec { + vec![ + self.text_html.clone().unwrap_or_else(|| self.text_md.clone()) + ] + } +} diff --git a/ankiview/src/inka/domain/mod.rs b/ankiview/src/inka/domain/mod.rs index 9b0ea03..b4ba26a 100644 --- a/ankiview/src/inka/domain/mod.rs +++ b/ankiview/src/inka/domain/mod.rs @@ -1 +1 @@ -// Domain module placeholder +pub mod card; From 93bc5ed327c2bc90728e219c903655bec2b2a58a Mon Sep 17 00:00:00 2001 From: sysid Date: Tue, 21 Oct 2025 20:17:36 +0200 Subject: [PATCH 05/32] feat: implement markdown section parser Add section extraction and metadata parsing: - SectionParser: extract --- delimited sections from markdown - extract_deck_name: parse Deck: metadata from sections - extract_tags: parse Tags: metadata (space-separated) - extract_note_strings: split sections into individual note strings Features: - Regex-based section detection with multiline/dotall support - Manual parsing for note extraction (avoids unsupported lookahead) - Handles ID comments () before notes - Preserves note boundaries correctly Test coverage: - given_markdown_with_section_when_parsing_then_finds_section - given_markdown_with_multiple_sections_when_parsing_then_finds_all - given_markdown_without_sections_when_parsing_then_returns_empty - given_section_with_deck_when_extracting_then_returns_deck_name - given_section_without_deck_when_extracting_then_returns_none - given_section_with_deck_and_whitespace_when_extracting_then_trims - given_section_with_tags_when_extracting_then_returns_tag_vec - given_section_without_tags_when_extracting_then_returns_empty - given_section_with_empty_tags_when_extracting_then_returns_empty - given_section_with_two_notes_when_extracting_then_returns_two_strings - given_section_with_id_comments_when_extracting_then_includes_ids - given_section_with_cloze_and_basic_when_extracting_then_finds_both Phase 3 (Markdown Section Parser) of TDD migration plan complete. --- .../src/inka/infrastructure/markdown/mod.rs | 1 + .../infrastructure/markdown/section_parser.rs | 241 ++++++++++++++++++ 2 files changed, 242 insertions(+) create mode 100644 ankiview/src/inka/infrastructure/markdown/section_parser.rs diff --git a/ankiview/src/inka/infrastructure/markdown/mod.rs b/ankiview/src/inka/infrastructure/markdown/mod.rs index 5694f8c..8e6ab98 100644 --- a/ankiview/src/inka/infrastructure/markdown/mod.rs +++ b/ankiview/src/inka/infrastructure/markdown/mod.rs @@ -1,2 +1,3 @@ // Markdown processing module pub mod mathjax_plugin; +pub mod section_parser; diff --git a/ankiview/src/inka/infrastructure/markdown/section_parser.rs b/ankiview/src/inka/infrastructure/markdown/section_parser.rs new file mode 100644 index 0000000..6dfb61d --- /dev/null +++ b/ankiview/src/inka/infrastructure/markdown/section_parser.rs @@ -0,0 +1,241 @@ +use regex::Regex; +use lazy_static::lazy_static; + +pub struct SectionParser { + section_regex: Regex, +} + +impl SectionParser { + pub fn new() -> Self { + // Regex pattern: ^---\n(.+?)^---$ + // Multiline and dotall flags + let section_regex = Regex::new(r"(?ms)^---\n(.+?)^---$") + .expect("Failed to compile section regex"); + + Self { section_regex } + } + + pub fn parse<'a>(&self, input: &'a str) -> Vec<&'a str> { + self.section_regex + .captures_iter(input) + .filter_map(|cap| cap.get(1)) + .map(|m| m.as_str()) + .collect() + } +} + +impl Default for SectionParser { + fn default() -> Self { + Self::new() + } +} + +lazy_static! { + static ref DECK_REGEX: Regex = Regex::new(r"(?m)^Deck:\s*(.+?)$") + .expect("Failed to compile deck regex"); + + static ref TAGS_REGEX: Regex = Regex::new(r"(?m)^Tags:\s*(.+?)$") + .expect("Failed to compile tags regex"); + + static ref NOTE_START_REGEX: Regex = Regex::new(r"(?m)^(?:\n)?^\d+\.") + .expect("Failed to compile note start regex"); +} + +pub fn extract_deck_name(section: &str) -> Option { + DECK_REGEX + .captures(section) + .and_then(|cap| cap.get(1)) + .map(|m| m.as_str().trim().to_string()) +} + +pub fn extract_tags(section: &str) -> Vec { + TAGS_REGEX + .captures(section) + .and_then(|cap| cap.get(1)) + .map(|m| { + m.as_str() + .split_whitespace() + .map(|s| s.to_string()) + .collect() + }) + .unwrap_or_default() +} + +pub fn extract_note_strings(section: &str) -> Vec { + // Find all positions where notes start (either "1. " or "\n1. ") + let mut note_positions: Vec = Vec::new(); + + // Find all lines starting with digits followed by a dot + for line in section.lines() { + if let Some(trimmed) = line.trim_start().strip_prefix(|c: char| c.is_ascii_digit()) { + if trimmed.starts_with('.') { + // Found a note start, get its position in the original string + if let Some(pos) = section.find(line) { + // Check if there's an ID comment before this line + let before = §ion[..pos]; + if let Some(last_line) = before.lines().last() { + if last_line.trim().starts_with("\n1. Q1\n> A1\n\n2. Q2\n> A2"; + let notes = extract_note_strings(section); + + assert_eq!(notes.len(), 2); + assert!(notes[0].contains("")); + assert!(notes[1].contains("")); + } + + #[test] + fn given_section_with_cloze_and_basic_when_extracting_then_finds_both() { + let section = "1. Basic Q\n> Basic A\n2. Cloze {{c1::text}}"; + let notes = extract_note_strings(section); + + assert_eq!(notes.len(), 2); + } +} From c105028852c40e373084a822b29a7425420cc767 Mon Sep 17 00:00:00 2001 From: sysid Date: Tue, 21 Oct 2025 20:28:38 +0200 Subject: [PATCH 06/32] feat: implement card type detection and field parsing Add card detection and parsing logic: - is_basic_card: detect Q&A cards with > answer markers - is_cloze_card: detect cloze cards with {} syntax - parse_basic_card_fields: extract front/back from basic cards - parse_cloze_card_field: extract text from cloze cards - extract_anki_id: parse \n)?^\d+\.[\s\S]+?(?:^>.*?(?:\n|$))+" + ).expect("Failed to compile basic card regex"); + + static ref ID_REGEX: Regex = Regex::new(r"(?m)^$") + .expect("Failed to compile ID regex"); +} + +pub fn is_basic_card(note_str: &str) -> bool { + BASIC_CARD_REGEX.is_match(note_str) +} + +pub fn is_cloze_card(note_str: &str) -> bool { + // A cloze card has curly braces (for cloze deletions) + // and doesn't have the answer marker (>) + note_str.contains('{') && !note_str.lines().any(|line| line.trim_start().starts_with('>')) +} + +pub fn parse_basic_card_fields(note_str: &str) -> Result<(String, String)> { + // Find the first line with a number and dot + let lines: Vec<&str> = note_str.lines().collect(); + let mut question_lines = Vec::new(); + let mut answer_lines = Vec::new(); + let mut in_answer = false; + + for line in lines { + let trimmed = line.trim(); + + // Skip ID comments + if trimmed.starts_with("\n1. Question\n> Answer"; + let (front, back) = parse_basic_card_fields(note_str).unwrap(); + + assert_eq!(front, "Question"); + assert_eq!(back, "Answer"); + } + + #[test] + fn given_note_without_answer_when_parsing_then_returns_error() { + let note_str = "1. Only question"; + let result = parse_basic_card_fields(note_str); + + assert!(result.is_err()); + } + + #[test] + fn given_cloze_note_string_when_parsing_then_extracts_text() { + let note_str = "1. Paris is the {{c1::capital}} of {{c2::France}}"; + let text = parse_cloze_card_field(note_str).unwrap(); + + assert_eq!(text, "Paris is the {{c1::capital}} of {{c2::France}}"); + } + + #[test] + fn given_cloze_with_id_when_parsing_then_excludes_id() { + let note_str = "\n1. Text {{c1::cloze}}"; + let text = parse_cloze_card_field(note_str).unwrap(); + + assert_eq!(text, "Text {{c1::cloze}}"); + } + + #[test] + fn given_cloze_with_short_syntax_when_parsing_then_extracts() { + let note_str = "1. Capital is {Paris}"; + let text = parse_cloze_card_field(note_str).unwrap(); + + assert_eq!(text, "Capital is {Paris}"); + } + + #[test] + fn given_note_with_id_when_parsing_then_extracts_id() { + let note_str = "\n1. Question?"; + let id = extract_anki_id(note_str); + + assert_eq!(id, Some(1234567890)); + } + + #[test] + fn given_note_without_id_when_parsing_then_returns_none() { + let note_str = "1. Question?"; + let id = extract_anki_id(note_str); + + assert_eq!(id, None); + } + + #[test] + fn given_note_with_invalid_id_when_parsing_then_returns_none() { + let note_str = "\n1. Q"; + let id = extract_anki_id(note_str); + + assert_eq!(id, None); + } +} diff --git a/ankiview/src/inka/infrastructure/markdown/mod.rs b/ankiview/src/inka/infrastructure/markdown/mod.rs index 8e6ab98..70d893d 100644 --- a/ankiview/src/inka/infrastructure/markdown/mod.rs +++ b/ankiview/src/inka/infrastructure/markdown/mod.rs @@ -1,3 +1,4 @@ // Markdown processing module pub mod mathjax_plugin; pub mod section_parser; +pub mod card_parser; From d9041f495496137c2224f01dc88e9578313a8d69 Mon Sep 17 00:00:00 2001 From: sysid Date: Wed, 22 Oct 2025 08:26:16 +0200 Subject: [PATCH 07/32] feat: implement cloze deletion syntax transformation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add cloze syntax converter with three format support: - Anki format: {{c1::text}} (passes through unchanged) - Explicit short: {1::text} or {c1::text} → {{c1::text}} - Implicit short: {text} → {{c1::text}}, {{c2::text}}, etc. Protection system for code and math blocks: - Temporarily replaces code blocks with placeholders - Temporarily replaces math blocks with placeholders - Prevents cloze conversion inside protected regions - Restores blocks after transformation Manual brace matching to find cloze patterns: - Character-by-character scanning with brace counting - Handles nested braces correctly - Skips Anki format (double braces) Test coverage (7/7 tests passing): - given_anki_format_cloze_when_checking_then_returns_true - given_explicit_short_cloze_when_converting_then_transforms_to_anki - given_already_anki_format_when_converting_then_unchanged - given_implicit_short_cloze_when_converting_then_numbers_sequentially - given_cloze_with_code_block_when_converting_then_preserves_code - given_cloze_with_inline_code_when_converting_then_preserves_code - given_cloze_with_math_when_converting_then_preserves_math Phase 5 of TDD migration plan complete. --- ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs | 4 ++-- ankiview/src/inka/infrastructure/markdown/mod.rs | 2 ++ ankiview/src/inka/infrastructure/markdown/section_parser.rs | 6 ++++-- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs b/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs index 49c0942..8456e46 100644 --- a/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs +++ b/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs @@ -107,8 +107,8 @@ impl BlockRule for BlockMathScanner { let content = content_lines.join("\n"); let node = Node::new(BlockMath { content }); - let next_line = end + 1; - return Some((node, next_line)); + // Return the closing $$ line - the parser will advance past it + return Some((node, end)); } None diff --git a/ankiview/src/inka/infrastructure/markdown/mod.rs b/ankiview/src/inka/infrastructure/markdown/mod.rs index 70d893d..d6ceea7 100644 --- a/ankiview/src/inka/infrastructure/markdown/mod.rs +++ b/ankiview/src/inka/infrastructure/markdown/mod.rs @@ -2,3 +2,5 @@ pub mod mathjax_plugin; pub mod section_parser; pub mod card_parser; +pub mod cloze_converter; +pub mod converter; diff --git a/ankiview/src/inka/infrastructure/markdown/section_parser.rs b/ankiview/src/inka/infrastructure/markdown/section_parser.rs index 6dfb61d..8c8a8e6 100644 --- a/ankiview/src/inka/infrastructure/markdown/section_parser.rs +++ b/ankiview/src/inka/infrastructure/markdown/section_parser.rs @@ -31,10 +31,10 @@ impl Default for SectionParser { } lazy_static! { - static ref DECK_REGEX: Regex = Regex::new(r"(?m)^Deck:\s*(.+?)$") + static ref DECK_REGEX: Regex = Regex::new(r"(?m)^Deck:[ \t]*(.+?)$") .expect("Failed to compile deck regex"); - static ref TAGS_REGEX: Regex = Regex::new(r"(?m)^Tags:\s*(.+?)$") + static ref TAGS_REGEX: Regex = Regex::new(r"(?m)^Tags:[ \t]*(.+?)$") .expect("Failed to compile tags regex"); static ref NOTE_START_REGEX: Regex = Regex::new(r"(?m)^(?:\n)?^\d+\.") @@ -208,6 +208,8 @@ Deck: Second let section = "Tags: \n1. Q"; let tags = extract_tags(section); + println!("Tags result: {:?}", tags); + assert_eq!(tags, Vec::::new()); } From b8ff08e47812ec472c6da9d88f8c4ac856d45929 Mon Sep 17 00:00:00 2001 From: sysid Date: Wed, 22 Oct 2025 08:26:48 +0200 Subject: [PATCH 08/32] feat: implement markdown to HTML converter with MathJax Add full markdown rendering pipeline: - Integrates cmark (CommonMark), extra plugins, and custom MathJax plugin - Converts inline math $f(x)$ to \(f(x)\) delimiters - Converts block math $$...$$ to \[...\] delimiters - Removes newlines around HTML tags (Anki rendering quirk) - Uses syntect for syntax highlighting with inline styles Fixes: - BlockMathScanner: return end line instead of end+1 to avoid index bounds error - Section parser regex: use [ \t]* instead of \s* to prevent matching newlines (was incorrectly capturing next line content for empty tags) Test coverage (6/6 converter tests + 3/3 mathjax tests passing): - given_markdown_text_when_converting_then_renders_html - given_markdown_with_newlines_around_tags_when_converting_then_removes_them - given_markdown_with_math_when_converting_then_uses_mathjax_delimiters - given_complex_math_when_converting_then_preserves_latex - given_code_block_when_converting_then_preserves_for_highlightjs - given_inline_code_when_converting_then_wraps_in_code_tag Phase 6 (Markdown to HTML Conversion) of TDD migration plan complete. All 41 markdown infrastructure tests passing. --- .../markdown/cloze_converter.rs | 234 ++++++++++++++++++ .../inka/infrastructure/markdown/converter.rs | 88 +++++++ 2 files changed, 322 insertions(+) create mode 100644 ankiview/src/inka/infrastructure/markdown/cloze_converter.rs create mode 100644 ankiview/src/inka/infrastructure/markdown/converter.rs diff --git a/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs b/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs new file mode 100644 index 0000000..28b7dc4 --- /dev/null +++ b/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs @@ -0,0 +1,234 @@ +use regex::Regex; +use lazy_static::lazy_static; + +lazy_static! { + static ref ANKI_CLOZE_REGEX: Regex = Regex::new(r"\{\{c\d+::[\s\S]*?\}\}") + .expect("Failed to compile Anki cloze regex"); + + static ref EXPLICIT_SHORT_CLOZE_REGEX: Regex = Regex::new(r"\{c?(\d+)::([\s\S]*?)\}") + .expect("Failed to compile explicit short cloze regex"); + + static ref IMPLICIT_SHORT_CLOZE_REGEX: Regex = Regex::new(r"\{([\s\S]*?)\}") + .expect("Failed to compile implicit short cloze regex"); + + static ref CODE_BLOCK_REGEX: Regex = Regex::new(r"```[\s\S]+?```") + .expect("Failed to compile code block regex"); + + static ref INLINE_CODE_REGEX: Regex = Regex::new(r"`[\S\s]+?`") + .expect("Failed to compile inline code regex"); + + static ref BLOCK_MATH_REGEX: Regex = Regex::new(r"\$\$[\s\S]+?\$\$") + .expect("Failed to compile block math regex"); + + static ref INLINE_MATH_REGEX: Regex = Regex::new(r"\$[^\s$][^$]*?\$") + .expect("Failed to compile inline math regex"); +} + +pub fn is_anki_cloze(text: &str) -> bool { + ANKI_CLOZE_REGEX.is_match(text) +} + +pub fn convert_cloze_syntax(text: &str) -> String { + // Protect code and math blocks + let (text, code_blocks) = protect_code_blocks(text); + let (text, math_blocks) = protect_math_blocks(&text); + + // Find all cloze-like patterns + let mut result = text.clone(); + let mut counter = 1; + + // Process each potential cloze deletion + let all_clozes: Vec<_> = find_all_clozes(&text); + + for cloze in all_clozes { + if is_anki_cloze(&cloze) { + // Already in Anki format, skip + continue; + } + + // Try explicit short syntax: {1::text} or {c1::text} + if let Some(caps) = EXPLICIT_SHORT_CLOZE_REGEX.captures(&cloze) { + let index = caps.get(1).unwrap().as_str(); + let content = caps.get(2).unwrap().as_str(); + let replacement = format!("{{{{c{}::{}}}}}", index, content); + result = result.replacen(&cloze, &replacement, 1); + continue; + } + + // Try implicit short syntax: {text} + if let Some(caps) = IMPLICIT_SHORT_CLOZE_REGEX.captures(&cloze) { + let content = caps.get(1).unwrap().as_str(); + let replacement = format!("{{{{c{}::{}}}}}", counter, content); + result = result.replacen(&cloze, &replacement, 1); + counter += 1; + } + } + + // Restore protected blocks + let result = restore_math_blocks(&result, math_blocks); + let result = restore_code_blocks(&result, code_blocks); + + result +} + +fn find_all_clozes(text: &str) -> Vec { + // Find all {...} patterns that aren't already {{c...}} + let mut clozes = Vec::new(); + let mut chars = text.chars().peekable(); + let mut current = String::new(); + let mut in_cloze = false; + let mut brace_count = 0; + + while let Some(c) = chars.next() { + if c == '{' { + if chars.peek() == Some(&'{') { + // Skip Anki format + current.push(c); + current.push(chars.next().unwrap()); + continue; + } + in_cloze = true; + brace_count = 1; + current.push(c); + } else if c == '}' && in_cloze { + current.push(c); + brace_count -= 1; + if brace_count == 0 { + clozes.push(current.clone()); + current.clear(); + in_cloze = false; + } + } else if in_cloze { + current.push(c); + if c == '{' { + brace_count += 1; + } + } + } + + clozes +} + +fn protect_code_blocks(text: &str) -> (String, Vec) { + let mut blocks = Vec::new(); + let mut result = text.to_string(); + + // Block code first (must come before inline) + for mat in CODE_BLOCK_REGEX.find_iter(text) { + blocks.push(mat.as_str().to_string()); + } + result = CODE_BLOCK_REGEX.replace_all(&result, "___CODE_BLOCK___").to_string(); + + // Inline code + for mat in INLINE_CODE_REGEX.find_iter(&result) { + blocks.push(mat.as_str().to_string()); + } + result = INLINE_CODE_REGEX.replace_all(&result, "___INLINE_CODE___").to_string(); + + (result, blocks) +} + +fn protect_math_blocks(text: &str) -> (String, Vec) { + let mut blocks = Vec::new(); + let mut result = text.to_string(); + + // Block math first (MUST come before inline to avoid matching $$ as two $ markers) + for mat in BLOCK_MATH_REGEX.find_iter(text) { + blocks.push(mat.as_str().to_string()); + } + result = BLOCK_MATH_REGEX.replace_all(&result, "___MATH_BLOCK___").to_string(); + + // Inline math - now the $$ are already protected + for mat in INLINE_MATH_REGEX.find_iter(&result) { + blocks.push(mat.as_str().to_string()); + } + result = INLINE_MATH_REGEX.replace_all(&result, "___INLINE_MATH___").to_string(); + + (result, blocks) +} + +fn restore_code_blocks(text: &str, blocks: Vec) -> String { + let mut result = text.to_string(); + for block in blocks { + if result.contains("___CODE_BLOCK___") { + result = result.replacen("___CODE_BLOCK___", &block, 1); + } else if result.contains("___INLINE_CODE___") { + result = result.replacen("___INLINE_CODE___", &block, 1); + } + } + result +} + +fn restore_math_blocks(text: &str, blocks: Vec) -> String { + let mut result = text.to_string(); + for block in blocks { + if result.contains("___MATH_BLOCK___") { + result = result.replacen("___MATH_BLOCK___", &block, 1); + } else if result.contains("___INLINE_MATH___") { + result = result.replacen("___INLINE_MATH___", &block, 1); + } + } + result +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn given_anki_format_cloze_when_checking_then_returns_true() { + assert!(is_anki_cloze("{{c1::text}}")); + assert!(is_anki_cloze("{{c12::multiple words}}")); + assert!(!is_anki_cloze("{1::text}")); + assert!(!is_anki_cloze("{text}")); + } + + #[test] + fn given_explicit_short_cloze_when_converting_then_transforms_to_anki() { + let input = "Text {1::hidden} more {c2::also}"; + let output = convert_cloze_syntax(input); + + assert_eq!(output, "Text {{c1::hidden}} more {{c2::also}}"); + } + + #[test] + fn given_already_anki_format_when_converting_then_unchanged() { + let input = "Text {{c1::already}} correct"; + let output = convert_cloze_syntax(input); + + assert_eq!(output, "Text {{c1::already}} correct"); + } + + #[test] + fn given_implicit_short_cloze_when_converting_then_numbers_sequentially() { + let input = "First {one} then {two} finally {three}"; + let output = convert_cloze_syntax(input); + + assert_eq!(output, "First {{c1::one}} then {{c2::two}} finally {{c3::three}}"); + } + + #[test] + fn given_cloze_with_code_block_when_converting_then_preserves_code() { + let input = "Text {answer}\n```\n{not_a_cloze}\n```"; + let output = convert_cloze_syntax(input); + + assert_eq!(output, "Text {{c1::answer}}\n```\n{not_a_cloze}\n```"); + } + + #[test] + fn given_cloze_with_inline_code_when_converting_then_preserves_code() { + let input = "Text {answer} and `code {with braces}`"; + let output = convert_cloze_syntax(input); + + assert!(output.contains("{{c1::answer}}")); + assert!(output.contains("`code {with braces}`")); + } + + #[test] + fn given_cloze_with_math_when_converting_then_preserves_math() { + let input = "Equation {answer} is $$x^{2}$$ and inline $y^{3}$"; + let output = convert_cloze_syntax(input); + + assert_eq!(output, "Equation {{c1::answer}} is $$x^{2}$$ and inline $y^{3}$"); + } +} diff --git a/ankiview/src/inka/infrastructure/markdown/converter.rs b/ankiview/src/inka/infrastructure/markdown/converter.rs new file mode 100644 index 0000000..eb7f74d --- /dev/null +++ b/ankiview/src/inka/infrastructure/markdown/converter.rs @@ -0,0 +1,88 @@ +use markdown_it::MarkdownIt; +use regex::Regex; +use lazy_static::lazy_static; +use super::mathjax_plugin::add_mathjax_plugin; + +lazy_static! { + static ref NEWLINE_TAG_REGEX: Regex = Regex::new(r"\n?(<.+?>)\n?") + .expect("Failed to compile newline tag regex"); +} + +pub fn markdown_to_html(text: &str) -> String { + let mut parser = MarkdownIt::new(); + markdown_it::plugins::cmark::add(&mut parser); + markdown_it::plugins::extra::add(&mut parser); + add_mathjax_plugin(&mut parser); + + let html = parser.parse(text).render(); + + // Remove newlines around HTML tags (Anki rendering quirk) + remove_newlines_around_tags(&html) +} + +fn remove_newlines_around_tags(html: &str) -> String { + NEWLINE_TAG_REGEX.replace_all(html, "$1").to_string() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn given_markdown_text_when_converting_then_renders_html() { + let input = "**bold** and *italic*"; + let html = markdown_to_html(input); + + assert!(html.contains("bold")); + assert!(html.contains("italic")); + } + + #[test] + fn given_markdown_with_newlines_around_tags_when_converting_then_removes_them() { + // Anki quirk: newlines around HTML tags render as visible breaks + let input = "Text\n\n**bold**\n\nMore"; + let html = markdown_to_html(input); + + // Should not have \n or \n + assert!(!html.contains("\n<")); + assert!(!html.contains(">\n")); + } + + #[test] + fn given_markdown_with_math_when_converting_then_uses_mathjax_delimiters() { + let input = "Inline $f(x)$ and block:\n$$\ng(x)\n$$"; + let html = markdown_to_html(input); + + assert!(html.contains(r"\(f(x)\)")); + assert!(html.contains(r"\[g(x)\]")); + } + + #[test] + fn given_complex_math_when_converting_then_preserves_latex() { + let input = r"$$ +\sum_{i=1}^{n} i = \frac{n(n+1)}{2} +$$"; + let html = markdown_to_html(input); + + assert!(html.contains(r"\[\sum_{i=1}^{n}")); + } + + #[test] + fn given_code_block_when_converting_then_preserves_for_highlightjs() { + let input = "```rust\nfn main() {}\n```"; + let html = markdown_to_html(input); + + // markdown-it extra plugin uses syntect for syntax highlighting with inline styles + assert!(html.contains("inline code")); + } +} From 16935b368427b3ac86e67b63daf2b2a15b200de0 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 09:36:23 +0100 Subject: [PATCH 09/32] feat: implement file reader with ID preservation Add file_writer module with markdown file reading: - read_markdown_file: reads file content preserving IDs - Uses anyhow for error handling with context - Handles nonexistent files with errors Test coverage: - given_markdown_file_when_reading_then_returns_content - given_file_with_ids_when_reading_then_preserves_ids - given_nonexistent_file_when_reading_then_returns_error Phase 7.1 (File Reader) of TDD migration plan complete. Total tests: 66 (3 new file_writer tests) --- .../src/inka/infrastructure/file_writer.rs | 57 +++++++++++++++++++ ankiview/src/inka/infrastructure/mod.rs | 1 + 2 files changed, 58 insertions(+) create mode 100644 ankiview/src/inka/infrastructure/file_writer.rs diff --git a/ankiview/src/inka/infrastructure/file_writer.rs b/ankiview/src/inka/infrastructure/file_writer.rs new file mode 100644 index 0000000..e4946e9 --- /dev/null +++ b/ankiview/src/inka/infrastructure/file_writer.rs @@ -0,0 +1,57 @@ +use anyhow::{Context, Result}; +use std::path::Path; + +/// Read markdown file content +pub fn read_markdown_file(path: impl AsRef) -> Result { + std::fs::read_to_string(path.as_ref()) + .context("Failed to read markdown file") +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[test] + fn given_markdown_file_when_reading_then_returns_content() { + // Create temp file + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("test.md"); + + let content = "# Test\n\nSome content"; + fs::write(&file_path, content).unwrap(); + + // Read file + let result = read_markdown_file(&file_path).unwrap(); + + assert_eq!(result, content); + } + + #[test] + fn given_file_with_ids_when_reading_then_preserves_ids() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("test.md"); + + let content = r#"--- +Deck: Test + + +1. Question? +> Answer! +---"#; + fs::write(&file_path, content).unwrap(); + + let result = read_markdown_file(&file_path).unwrap(); + + assert!(result.contains("")); + assert_eq!(result, content); + } + + #[test] + fn given_nonexistent_file_when_reading_then_returns_error() { + let result = read_markdown_file("/nonexistent/path/file.md"); + + assert!(result.is_err()); + } +} diff --git a/ankiview/src/inka/infrastructure/mod.rs b/ankiview/src/inka/infrastructure/mod.rs index 163a4fb..20c1e87 100644 --- a/ankiview/src/inka/infrastructure/mod.rs +++ b/ankiview/src/inka/infrastructure/mod.rs @@ -1 +1,2 @@ pub mod markdown; +pub mod file_writer; From 021b76ae92c2df38983f9cec273087fb335143fa Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 09:39:46 +0100 Subject: [PATCH 10/32] feat: implement ID injection into markdown Add inject_anki_id function: - Injects comment before note pattern - Skips injection if ID already exists - Preserves formatting and whitespace - Handles multiple notes correctly Test coverage: - given_note_without_id_when_injecting_then_adds_id - given_note_with_existing_id_when_injecting_then_unchanged - given_multiple_notes_when_injecting_then_targets_correct_note - given_note_pattern_when_injecting_then_preserves_formatting Phase 7.2 (ID Injection) of TDD migration plan complete. Total tests: 70 (7 file_writer tests) --- .../src/inka/infrastructure/file_writer.rs | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/ankiview/src/inka/infrastructure/file_writer.rs b/ankiview/src/inka/infrastructure/file_writer.rs index e4946e9..f137263 100644 --- a/ankiview/src/inka/infrastructure/file_writer.rs +++ b/ankiview/src/inka/infrastructure/file_writer.rs @@ -7,6 +7,39 @@ pub fn read_markdown_file(path: impl AsRef) -> Result { .context("Failed to read markdown file") } +/// Inject Anki ID before a note in markdown content +/// If the note already has an ID, returns content unchanged +pub fn inject_anki_id(content: &str, note_pattern: &str, anki_id: i64) -> String { + // Find the position of the note pattern + let Some(note_pos) = content.find(note_pattern) else { + // Pattern not found, return unchanged + return content.to_string(); + }; + + // Check if there's already an ID before this note + // Look at the content before the note pattern + let before_note = &content[..note_pos]; + + // Check if the previous line (or within a few chars) has an ID comment + // We'll look for \n", anki_id); + let mut result = String::with_capacity(content.len() + id_comment.len()); + result.push_str(&content[..note_pos]); + result.push_str(&id_comment); + result.push_str(&content[note_pos..]); + + result +} + #[cfg(test)] mod tests { use super::*; @@ -54,4 +87,65 @@ Deck: Test assert!(result.is_err()); } + + #[test] + fn given_note_without_id_when_injecting_then_adds_id() { + let content = r#"--- +Deck: Test + +1. Question? +> Answer! +---"#; + + let result = inject_anki_id(content, "1. Question?", 1234567890); + + assert!(result.contains("")); + assert!(result.contains("\n1. Question?")); + } + + #[test] + fn given_note_with_existing_id_when_injecting_then_unchanged() { + let content = r#"--- +Deck: Test + + +1. Question? +> Answer! +---"#; + + let result = inject_anki_id(content, "1. Question?", 1234567890); + + // Should keep original ID + assert!(result.contains("")); + assert!(!result.contains("")); + assert_eq!(result, content); + } + + #[test] + fn given_multiple_notes_when_injecting_then_targets_correct_note() { + let content = r#"--- +Deck: Test + +1. First question? +> First answer + +2. Second question? +> Second answer +---"#; + + let result = inject_anki_id(content, "2. Second question?", 5555555555); + + assert!(result.contains("\n2. Second question?")); + // First note should remain untouched + assert!(result.contains("1. First question?\n> First answer")); + } + + #[test] + fn given_note_pattern_when_injecting_then_preserves_formatting() { + let content = "Some text\n\n1. Question\n> Answer\n\nMore text"; + + let result = inject_anki_id(content, "1. Question", 1111111111); + + assert_eq!(result, "Some text\n\n\n1. Question\n> Answer\n\nMore text"); + } } From 456d4710fc9f2221efe5c1c5093ae29e306775e8 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 09:43:16 +0100 Subject: [PATCH 11/32] feat: implement file writer with formatting preservation Add write_markdown_file function: - Writes content to markdown file - Overwrites existing files - Preserves formatting in round-trip read/write Test coverage: - given_content_when_writing_then_creates_file - given_existing_file_when_writing_then_overwrites - given_round_trip_when_reading_and_writing_then_preserves_content Phase 7 (File Writing with ID Injection) complete. Total tests: 73 (10 file_writer tests) --- .../src/inka/infrastructure/file_writer.rs | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/ankiview/src/inka/infrastructure/file_writer.rs b/ankiview/src/inka/infrastructure/file_writer.rs index f137263..d24f6e7 100644 --- a/ankiview/src/inka/infrastructure/file_writer.rs +++ b/ankiview/src/inka/infrastructure/file_writer.rs @@ -7,6 +7,12 @@ pub fn read_markdown_file(path: impl AsRef) -> Result { .context("Failed to read markdown file") } +/// Write markdown content to file +pub fn write_markdown_file(path: impl AsRef, content: &str) -> Result<()> { + std::fs::write(path.as_ref(), content) + .context("Failed to write markdown file") +} + /// Inject Anki ID before a note in markdown content /// If the note already has an ID, returns content unchanged pub fn inject_anki_id(content: &str, note_pattern: &str, anki_id: i64) -> String { @@ -148,4 +154,59 @@ Deck: Test assert_eq!(result, "Some text\n\n\n1. Question\n> Answer\n\nMore text"); } + + #[test] + fn given_content_when_writing_then_creates_file() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("output.md"); + + let content = "# Test\n\nSome content"; + + write_markdown_file(&file_path, content).unwrap(); + + assert!(file_path.exists()); + let written = fs::read_to_string(&file_path).unwrap(); + assert_eq!(written, content); + } + + #[test] + fn given_existing_file_when_writing_then_overwrites() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("output.md"); + + // Write initial content + fs::write(&file_path, "Old content").unwrap(); + + // Overwrite with new content + let new_content = "New content"; + write_markdown_file(&file_path, new_content).unwrap(); + + let written = fs::read_to_string(&file_path).unwrap(); + assert_eq!(written, new_content); + } + + #[test] + fn given_round_trip_when_reading_and_writing_then_preserves_content() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("roundtrip.md"); + + let original = r#"--- +Deck: Test + + +1. Question? +> Answer! + +2. Another question +> Another answer +---"#; + + // Write + write_markdown_file(&file_path, original).unwrap(); + + // Read back + let read_back = read_markdown_file(&file_path).unwrap(); + + assert_eq!(read_back, original); + } } From d6eaba91f79279dbe693fae804bf86ef5063a65d Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 12:25:44 +0100 Subject: [PATCH 12/32] feat: implement note type finder for Basic and Cloze Add AnkiRepository methods: - find_or_create_basic_notetype: finds existing Basic notetype - find_or_create_cloze_notetype: finds existing Cloze notetype - Returns notetype ID for use in note creation - Uses NotetypeKind to distinguish between types Test coverage: - given_new_collection_when_finding_basic_notetype_then_creates_and_returns_id - given_existing_basic_notetype_when_finding_then_returns_same_id - given_new_collection_when_finding_cloze_notetype_then_creates_and_returns_id - given_existing_cloze_notetype_when_finding_then_returns_same_id Phase 8.1 (Note Type Finder) of TDD migration plan complete. Total tests: 77 (4 new anki infrastructure tests) --- ankiview/src/infrastructure/anki.rs | 112 ++++++++++++++++++++++++++++ 1 file changed, 112 insertions(+) diff --git a/ankiview/src/infrastructure/anki.rs b/ankiview/src/infrastructure/anki.rs index 0ddcae6..1955c2d 100644 --- a/ankiview/src/infrastructure/anki.rs +++ b/ankiview/src/infrastructure/anki.rs @@ -65,6 +65,59 @@ impl AnkiRepository { pub fn media_dir(&self) -> &Path { &self.media_dir } + + /// Find or create a Basic note type with front/back fields + /// Returns the notetype ID + pub fn find_or_create_basic_notetype(&mut self) -> Result { + use anki::notetype::NotetypeKind; + + // Look for existing Basic notetype + let all_notetypes = self + .collection + .get_all_notetypes() + .context("Failed to get all notetypes")?; + + // Find a Basic-type notetype (non-cloze) + for notetype in all_notetypes { + if notetype.config.kind() != NotetypeKind::Cloze && notetype.fields.len() >= 2 { + // Found a suitable basic notetype + debug!(notetype_id = notetype.id.0, name = %notetype.name, "Found existing Basic notetype"); + return Ok(notetype.id.0); + } + } + + // No suitable notetype found - this shouldn't happen in normal Anki collections + // For now, return an error. In the future, we could create one programmatically. + Err(anyhow::anyhow!( + "No Basic notetype found. Please create a Basic notetype in Anki first." + )) + } + + /// Find or create a Cloze note type + /// Returns the notetype ID + pub fn find_or_create_cloze_notetype(&mut self) -> Result { + use anki::notetype::NotetypeKind; + + // Look for existing Cloze notetype + let all_notetypes = self + .collection + .get_all_notetypes() + .context("Failed to get all notetypes")?; + + // Find a Cloze-type notetype + for notetype in all_notetypes { + if notetype.config.kind() == NotetypeKind::Cloze { + // Found a cloze notetype + debug!(notetype_id = notetype.id.0, name = %notetype.name, "Found existing Cloze notetype"); + return Ok(notetype.id.0); + } + } + + // No cloze notetype found - this shouldn't happen in normal Anki collections + Err(anyhow::anyhow!( + "No Cloze notetype found. Please create a Cloze notetype in Anki first." + )) + } } impl NoteRepository for AnkiRepository { @@ -180,3 +233,62 @@ impl NoteRepository for AnkiRepository { Ok(notes) } } + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + // Helper to create a temporary test collection + fn create_test_collection() -> Result<(TempDir, AnkiRepository)> { + let temp_dir = TempDir::new()?; + let collection_path = temp_dir.path().join("collection.anki2"); + + // Create a new Anki collection + let collection = CollectionBuilder::new(&collection_path).build()?; + drop(collection); // Close it + + // Open it with our repository + let repo = AnkiRepository::new(&collection_path)?; + + Ok((temp_dir, repo)) + } + + #[test] + fn given_new_collection_when_finding_basic_notetype_then_creates_and_returns_id() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let notetype_id = repo.find_or_create_basic_notetype().unwrap(); + + assert!(notetype_id > 0); + } + + #[test] + fn given_existing_basic_notetype_when_finding_then_returns_same_id() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let first_id = repo.find_or_create_basic_notetype().unwrap(); + let second_id = repo.find_or_create_basic_notetype().unwrap(); + + assert_eq!(first_id, second_id); + } + + #[test] + fn given_new_collection_when_finding_cloze_notetype_then_creates_and_returns_id() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let notetype_id = repo.find_or_create_cloze_notetype().unwrap(); + + assert!(notetype_id > 0); + } + + #[test] + fn given_existing_cloze_notetype_when_finding_then_returns_same_id() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let first_id = repo.find_or_create_cloze_notetype().unwrap(); + let second_id = repo.find_or_create_cloze_notetype().unwrap(); + + assert_eq!(first_id, second_id); + } +} From 7d5549fe268fd3ae5ad1588fe5b8525503d6d229 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 14:34:43 +0100 Subject: [PATCH 13/32] feat: implement create note methods for Basic and Cloze Add AnkiRepository methods: - create_basic_note: creates Basic note with front/back fields - create_cloze_note: creates Cloze note with cloze deletions - Both methods handle deck creation and tag assignment - Return generated note IDs for ID injection Test coverage: - given_basic_card_fields_when_creating_note_then_returns_note_id - given_basic_note_when_created_then_can_retrieve - given_cloze_text_when_creating_note_then_returns_note_id - given_cloze_note_when_created_then_can_retrieve Phase 8.2 (Create Note in Anki) of TDD migration plan complete. Total tests: 81 (8 anki infrastructure tests) --- ankiview/src/infrastructure/anki.rs | 156 ++++++++++++++++++++++++++++ 1 file changed, 156 insertions(+) diff --git a/ankiview/src/infrastructure/anki.rs b/ankiview/src/infrastructure/anki.rs index 1955c2d..bdccf86 100644 --- a/ankiview/src/infrastructure/anki.rs +++ b/ankiview/src/infrastructure/anki.rs @@ -118,6 +118,101 @@ impl AnkiRepository { "No Cloze notetype found. Please create a Cloze notetype in Anki first." )) } + + /// Create a new Basic note in the collection + /// Returns the created note ID + pub fn create_basic_note( + &mut self, + front: &str, + back: &str, + deck_name: &str, + tags: &[String], + ) -> Result { + use anki::notes::Note; + use anki::notetype::NotetypeId; + + // Find or create the Basic notetype + let notetype_id = self.find_or_create_basic_notetype()?; + + // Get the notetype to create the note + let notetype = self + .collection + .get_notetype(NotetypeId(notetype_id)) + .context("Failed to get notetype")? + .context("Notetype not found")?; + + // Find or create the deck + let deck_id = self + .collection + .get_or_create_normal_deck(deck_name) + .context("Failed to get or create deck")? + .id; + + // Create a new note + let mut note = Note::new(¬etype); + note.set_field(0, front) + .context("Failed to set front field")?; + note.set_field(1, back).context("Failed to set back field")?; + + // Add tags + for tag in tags { + note.tags.push(tag.clone()); + } + + // Add the note to the collection + self.collection + .add_note(&mut note, deck_id) + .context("Failed to add note to collection")?; + + debug!(note_id = note.id.0, "Created Basic note"); + Ok(note.id.0) + } + + /// Create a new Cloze note in the collection + /// Returns the created note ID + pub fn create_cloze_note( + &mut self, + text: &str, + deck_name: &str, + tags: &[String], + ) -> Result { + use anki::notes::Note; + use anki::notetype::NotetypeId; + + // Find or create the Cloze notetype + let notetype_id = self.find_or_create_cloze_notetype()?; + + // Get the notetype to create the note + let notetype = self + .collection + .get_notetype(NotetypeId(notetype_id)) + .context("Failed to get notetype")? + .context("Notetype not found")?; + + // Find or create the deck + let deck_id = self + .collection + .get_or_create_normal_deck(deck_name) + .context("Failed to get or create deck")? + .id; + + // Create a new note + let mut note = Note::new(¬etype); + note.set_field(0, text).context("Failed to set text field")?; + + // Add tags + for tag in tags { + note.tags.push(tag.clone()); + } + + // Add the note to the collection + self.collection + .add_note(&mut note, deck_id) + .context("Failed to add note to collection")?; + + debug!(note_id = note.id.0, "Created Cloze note"); + Ok(note.id.0) + } } impl NoteRepository for AnkiRepository { @@ -291,4 +386,65 @@ mod tests { assert_eq!(first_id, second_id); } + + #[test] + fn given_basic_card_fields_when_creating_note_then_returns_note_id() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let note_id = repo + .create_basic_note( + "What is Rust?", + "A systems programming language", + "Default", + &vec!["rust".to_string(), "programming".to_string()], + ) + .unwrap(); + + assert!(note_id > 0); + } + + #[test] + fn given_basic_note_when_created_then_can_retrieve() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let note_id = repo + .create_basic_note("Front", "Back", "Default", &vec![]) + .unwrap(); + + // Should be able to retrieve the note + let note = repo.get_note(note_id).unwrap(); + assert_eq!(note.id, note_id); + assert!(note.front.contains("Front")); + assert!(note.back.contains("Back")); + } + + #[test] + fn given_cloze_text_when_creating_note_then_returns_note_id() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let note_id = repo + .create_cloze_note( + "The capital of {{c1::France}} is {{c2::Paris}}", + "Default", + &vec!["geography".to_string()], + ) + .unwrap(); + + assert!(note_id > 0); + } + + #[test] + fn given_cloze_note_when_created_then_can_retrieve() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let cloze_text = "Answer: {{c1::42}}"; + let note_id = repo + .create_cloze_note(cloze_text, "Default", &vec![]) + .unwrap(); + + // Should be able to retrieve the note + let note = repo.get_note(note_id).unwrap(); + assert_eq!(note.id, note_id); + assert!(note.front.contains("42")); + } } From 7fab68964aaaee19ebcb53dc22c6d27a2cf686b6 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 14:43:00 +0100 Subject: [PATCH 14/32] feat: implement update note and duplicate detection Add AnkiRepository methods: - update_note: updates existing note fields by ID - note_exists: checks if note exists for duplicate detection - Handles note not found errors gracefully Test coverage: - given_existing_note_when_updating_then_fields_change - given_nonexistent_note_when_updating_then_returns_error - given_existing_note_when_checking_exists_then_returns_true - given_nonexistent_note_when_checking_exists_then_returns_false Phase 8.3 & 8.4 (Update Note & Duplicate Detection) complete. Total tests: 85 (12 anki infrastructure tests) --- ankiview/src/infrastructure/anki.rs | 89 +++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) diff --git a/ankiview/src/infrastructure/anki.rs b/ankiview/src/infrastructure/anki.rs index bdccf86..4381175 100644 --- a/ankiview/src/infrastructure/anki.rs +++ b/ankiview/src/infrastructure/anki.rs @@ -213,6 +213,49 @@ impl AnkiRepository { debug!(note_id = note.id.0, "Created Cloze note"); Ok(note.id.0) } + + /// Update an existing note's fields + /// For Basic notes: updates front (field 0) and back (field 1) + /// For Cloze notes: updates text (field 0) + pub fn update_note(&mut self, note_id: i64, fields: &[String]) -> Result<()> { + use anki::notes::NoteId; + + // Get the existing note + let mut note = self + .collection + .storage + .get_note(NoteId(note_id)) + .context("Failed to get note from storage")? + .ok_or_else(|| anyhow::anyhow!("Note not found: {}", note_id))?; + + // Update each field + for (index, field_value) in fields.iter().enumerate() { + note.set_field(index, field_value) + .with_context(|| format!("Failed to set field {} on note {}", index, note_id))?; + } + + // Save the updated note + self.collection + .update_note(&mut note) + .context("Failed to update note in collection")?; + + debug!(note_id, "Updated note fields"); + Ok(()) + } + + /// Check if a note exists by ID + pub fn note_exists(&self, note_id: i64) -> Result { + use anki::notes::NoteId; + + let exists = self + .collection + .storage + .get_note(NoteId(note_id)) + .context("Failed to check note existence")? + .is_some(); + + Ok(exists) + } } impl NoteRepository for AnkiRepository { @@ -447,4 +490,50 @@ mod tests { assert_eq!(note.id, note_id); assert!(note.front.contains("42")); } + + #[test] + fn given_existing_note_when_updating_then_fields_change() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + // Create a note + let note_id = repo + .create_basic_note("Original Front", "Original Back", "Default", &vec![]) + .unwrap(); + + // Update it + let new_fields = vec!["Updated Front".to_string(), "Updated Back".to_string()]; + repo.update_note(note_id, &new_fields).unwrap(); + + // Retrieve and verify + let note = repo.get_note(note_id).unwrap(); + assert!(note.front.contains("Updated Front")); + assert!(note.back.contains("Updated Back")); + } + + #[test] + fn given_nonexistent_note_when_updating_then_returns_error() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let result = repo.update_note(9999999, &vec!["Test".to_string()]); + + assert!(result.is_err()); + } + + #[test] + fn given_existing_note_when_checking_exists_then_returns_true() { + let (_temp_dir, mut repo) = create_test_collection().unwrap(); + + let note_id = repo + .create_basic_note("Front", "Back", "Default", &vec![]) + .unwrap(); + + assert!(repo.note_exists(note_id).unwrap()); + } + + #[test] + fn given_nonexistent_note_when_checking_exists_then_returns_false() { + let (_temp_dir, repo) = create_test_collection().unwrap(); + + assert!(!repo.note_exists(9999999).unwrap()); + } } From fdac97433ddf70d456a80253437db3ab9cb4dbd6 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 15:45:42 +0100 Subject: [PATCH 15/32] feat: implement image extraction from markdown Add media_handler module with image path extraction: - extract_image_paths: extracts local image references from markdown - Supports markdown syntax: ![alt](path) - Supports HTML img tags: - Filters out HTTP(S) URLs (external images) - Uses regex for pattern matching Test coverage: - given_markdown_image_when_extracting_then_returns_path - given_multiple_images_when_extracting_then_returns_all_paths - given_html_img_tag_when_extracting_then_returns_path - given_mixed_formats_when_extracting_then_returns_all - given_no_images_when_extracting_then_returns_empty - given_absolute_urls_when_extracting_then_excludes_them Task 9.1 (Image Extraction) of Week 5 TDD migration plan complete. Total tests: 91 (6 new media_handler tests) --- .../src/inka/infrastructure/media_handler.rs | 117 ++++++++++++++++++ ankiview/src/inka/infrastructure/mod.rs | 1 + 2 files changed, 118 insertions(+) create mode 100644 ankiview/src/inka/infrastructure/media_handler.rs diff --git a/ankiview/src/inka/infrastructure/media_handler.rs b/ankiview/src/inka/infrastructure/media_handler.rs new file mode 100644 index 0000000..7e16e51 --- /dev/null +++ b/ankiview/src/inka/infrastructure/media_handler.rs @@ -0,0 +1,117 @@ +use regex::Regex; +use lazy_static::lazy_static; +use std::path::PathBuf; + +lazy_static! { + // Match markdown images: ![alt](path) + static ref MD_IMAGE_REGEX: Regex = Regex::new(r"!\[.*?\]\(([^)]+)\)") + .expect("Failed to compile markdown image regex"); + + // Match HTML img tags: + static ref HTML_IMAGE_REGEX: Regex = Regex::new(r#"]+src="([^"]+)""#) + .expect("Failed to compile HTML image regex"); +} + +/// Extract image paths from markdown content +/// Supports both markdown syntax ![alt](path) and HTML +pub fn extract_image_paths(markdown: &str) -> Vec { + let mut paths = Vec::new(); + + // Extract markdown format images + for cap in MD_IMAGE_REGEX.captures_iter(markdown) { + if let Some(path_match) = cap.get(1) { + let path = path_match.as_str(); + // Skip HTTP(S) URLs + if !path.starts_with("http://") && !path.starts_with("https://") { + paths.push(path.to_string()); + } + } + } + + // Extract HTML format images + for cap in HTML_IMAGE_REGEX.captures_iter(markdown) { + if let Some(path_match) = cap.get(1) { + let path = path_match.as_str(); + // Skip HTTP(S) URLs + if !path.starts_with("http://") && !path.starts_with("https://") { + paths.push(path.to_string()); + } + } + } + + paths +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn given_markdown_image_when_extracting_then_returns_path() { + let markdown = "Some text ![alt text](images/photo.png) more text"; + let paths = extract_image_paths(markdown); + + assert_eq!(paths, vec!["images/photo.png"]); + } + + #[test] + fn given_multiple_images_when_extracting_then_returns_all_paths() { + let markdown = r#" +![First](image1.png) +Some text +![Second](path/to/image2.jpg) +More text +![Third](../relative/image3.gif) +"#; + let paths = extract_image_paths(markdown); + + assert_eq!(paths.len(), 3); + assert!(paths.contains(&"image1.png".to_string())); + assert!(paths.contains(&"path/to/image2.jpg".to_string())); + assert!(paths.contains(&"../relative/image3.gif".to_string())); + } + + #[test] + fn given_html_img_tag_when_extracting_then_returns_path() { + let markdown = r#"Some text more text"#; + let paths = extract_image_paths(markdown); + + assert_eq!(paths, vec!["diagrams/flow.png"]); + } + + #[test] + fn given_mixed_formats_when_extracting_then_returns_all() { + let markdown = r#" +Markdown: ![logo](logo.png) +HTML: +Another: ![icon](icons/star.svg) +"#; + let paths = extract_image_paths(markdown); + + assert_eq!(paths.len(), 3); + assert!(paths.contains(&"logo.png".to_string())); + assert!(paths.contains(&"banner.jpg".to_string())); + assert!(paths.contains(&"icons/star.svg".to_string())); + } + + #[test] + fn given_no_images_when_extracting_then_returns_empty() { + let markdown = "Just text with no images at all"; + let paths = extract_image_paths(markdown); + + assert!(paths.is_empty()); + } + + #[test] + fn given_absolute_urls_when_extracting_then_excludes_them() { + let markdown = r#" +Local: ![local](image.png) +HTTP: ![remote](http://example.com/image.jpg) +HTTPS: ![secure](https://example.com/photo.png) +"#; + let paths = extract_image_paths(markdown); + + // Should only return local path, not HTTP(S) URLs + assert_eq!(paths, vec!["image.png"]); + } +} diff --git a/ankiview/src/inka/infrastructure/mod.rs b/ankiview/src/inka/infrastructure/mod.rs index 20c1e87..348c239 100644 --- a/ankiview/src/inka/infrastructure/mod.rs +++ b/ankiview/src/inka/infrastructure/mod.rs @@ -1,2 +1,3 @@ pub mod markdown; pub mod file_writer; +pub mod media_handler; From a6fbd94b505d9b16510abc3c1afe5915186df626 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 15:50:47 +0100 Subject: [PATCH 16/32] feat: implement media file copying to Anki collection Add copy_media_to_anki function with deduplication: - Extracts filename from source path - Copies file to Anki's flat media directory - Skips copying if file already exists (no overwrite) - Returns filename only (not full path) for Anki references Features: - Handles nested source paths, flattens to media root - Error handling for nonexistent source files - Uses anyhow for error context Test coverage (4 new tests): - given_source_file_when_copying_then_file_appears_in_media_dir - given_existing_file_when_copying_then_skips_duplicate - given_nonexistent_source_when_copying_then_returns_error - given_file_with_path_when_copying_then_returns_basename Phase 9.2 (Media File Copying) of TDD migration plan complete. Total tests: 95 (10 media_handler tests) --- .../src/inka/infrastructure/media_handler.rs | 116 +++++++++++++++++- 1 file changed, 115 insertions(+), 1 deletion(-) diff --git a/ankiview/src/inka/infrastructure/media_handler.rs b/ankiview/src/inka/infrastructure/media_handler.rs index 7e16e51..e4fb161 100644 --- a/ankiview/src/inka/infrastructure/media_handler.rs +++ b/ankiview/src/inka/infrastructure/media_handler.rs @@ -1,6 +1,5 @@ use regex::Regex; use lazy_static::lazy_static; -use std::path::PathBuf; lazy_static! { // Match markdown images: ![alt](path) @@ -42,6 +41,31 @@ pub fn extract_image_paths(markdown: &str) -> Vec { paths } +/// Copy a media file to Anki's collection.media directory +/// Returns the filename (not full path) that Anki will use +pub fn copy_media_to_anki( + source_path: &std::path::Path, + media_dir: &std::path::Path, +) -> anyhow::Result { + use anyhow::Context; + + // Extract filename from source path + let filename = source_path + .file_name() + .and_then(|n| n.to_str()) + .ok_or_else(|| anyhow::anyhow!("Invalid filename"))?; + + let dest_path = media_dir.join(filename); + + // Skip copying if file already exists in media directory + if !dest_path.exists() { + std::fs::copy(source_path, &dest_path) + .context("Failed to copy media file")?; + } + + Ok(filename.to_string()) +} + #[cfg(test)] mod tests { use super::*; @@ -114,4 +138,94 @@ HTTPS: ![secure](https://example.com/photo.png) // Should only return local path, not HTTP(S) URLs assert_eq!(paths, vec!["image.png"]); } + + #[test] + fn given_source_file_when_copying_then_file_appears_in_media_dir() { + use std::fs; + use tempfile::TempDir; + + let temp_dir = TempDir::new().unwrap(); + let source_file = temp_dir.path().join("test_image.png"); + fs::write(&source_file, b"fake image data").unwrap(); + + let media_dir = temp_dir.path().join("collection.media"); + fs::create_dir(&media_dir).unwrap(); + + let filename = copy_media_to_anki(&source_file, &media_dir).unwrap(); + + // Should return just the filename + assert_eq!(filename, "test_image.png"); + + // File should exist in media directory + let dest_path = media_dir.join(&filename); + assert!(dest_path.exists()); + + // Content should match + let content = fs::read(&dest_path).unwrap(); + assert_eq!(content, b"fake image data"); + } + + #[test] + fn given_existing_file_when_copying_then_skips_duplicate() { + use std::fs; + use tempfile::TempDir; + + let temp_dir = TempDir::new().unwrap(); + let source_file = temp_dir.path().join("image.png"); + fs::write(&source_file, b"original").unwrap(); + + let media_dir = temp_dir.path().join("collection.media"); + fs::create_dir(&media_dir).unwrap(); + + // Pre-create the file in media dir + let existing_file = media_dir.join("image.png"); + fs::write(&existing_file, b"already exists").unwrap(); + + // Copy should succeed and return filename + let filename = copy_media_to_anki(&source_file, &media_dir).unwrap(); + assert_eq!(filename, "image.png"); + + // Should not overwrite existing file + let content = fs::read(&existing_file).unwrap(); + assert_eq!(content, b"already exists"); + } + + #[test] + fn given_nonexistent_source_when_copying_then_returns_error() { + use std::fs; + use tempfile::TempDir; + + let temp_dir = TempDir::new().unwrap(); + let nonexistent = temp_dir.path().join("doesnt_exist.png"); + + let media_dir = temp_dir.path().join("collection.media"); + fs::create_dir(&media_dir).unwrap(); + + let result = copy_media_to_anki(&nonexistent, &media_dir); + assert!(result.is_err()); + } + + #[test] + fn given_file_with_path_when_copying_then_returns_basename() { + use std::fs; + use tempfile::TempDir; + + let temp_dir = TempDir::new().unwrap(); + let subdir = temp_dir.path().join("images"); + fs::create_dir(&subdir).unwrap(); + + let source_file = subdir.join("photo.jpg"); + fs::write(&source_file, b"photo data").unwrap(); + + let media_dir = temp_dir.path().join("collection.media"); + fs::create_dir(&media_dir).unwrap(); + + let filename = copy_media_to_anki(&source_file, &media_dir).unwrap(); + + // Should return just filename, not path + assert_eq!(filename, "photo.jpg"); + + // File should be in media dir root (not in subdirectory) + assert!(media_dir.join("photo.jpg").exists()); + } } From 4bf341f9452374286b5561202edfeeaaa81cc167 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 16:04:42 +0100 Subject: [PATCH 17/32] feat: implement media path updating in HTML Add update_media_paths_in_html function: - Takes mapping of original paths to Anki filenames - Replaces all occurrences of original paths in HTML - Handles both markdown and HTML img tag syntax - Preserves unmapped paths unchanged Features: - Simple string replacement for maximum compatibility - Supports multiple images in single HTML document - Returns unchanged HTML when no mappings apply Test coverage (5 new tests): - given_html_with_image_src_when_updating_then_replaces_path - given_html_with_multiple_images_when_updating_then_replaces_all - given_html_with_no_matching_paths_when_updating_then_unchanged - given_html_with_unmapped_image_when_updating_then_leaves_unchanged - given_markdown_img_syntax_when_updating_then_replaces_path Phase 9.3 (Media Path Updating) of TDD migration plan complete. Total tests: 100 (15 media_handler tests) --- .../src/inka/infrastructure/media_handler.rs | 90 +++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/ankiview/src/inka/infrastructure/media_handler.rs b/ankiview/src/inka/infrastructure/media_handler.rs index e4fb161..02dc428 100644 --- a/ankiview/src/inka/infrastructure/media_handler.rs +++ b/ankiview/src/inka/infrastructure/media_handler.rs @@ -66,6 +66,19 @@ pub fn copy_media_to_anki( Ok(filename.to_string()) } +/// Update image paths in HTML to use Anki media filenames +/// Takes a mapping of original paths to Anki filenames +pub fn update_media_paths_in_html(html: &str, path_mapping: &std::collections::HashMap) -> String { + let mut result = html.to_string(); + + // Replace each original path with its Anki filename + for (original_path, anki_filename) in path_mapping { + result = result.replace(original_path, anki_filename); + } + + result +} + #[cfg(test)] mod tests { use super::*; @@ -228,4 +241,81 @@ HTTPS: ![secure](https://example.com/photo.png) // File should be in media dir root (not in subdirectory) assert!(media_dir.join("photo.jpg").exists()); } + + #[test] + fn given_html_with_image_src_when_updating_then_replaces_path() { + use std::collections::HashMap; + + let html = r#"

Some text Photo more text

"#; + let mut mapping = HashMap::new(); + mapping.insert("images/photo.png".to_string(), "photo.png".to_string()); + + let updated = update_media_paths_in_html(html, &mapping); + + assert!(updated.contains(r#" +

Text

+ + "#; + + let mut mapping = HashMap::new(); + mapping.insert("path/to/image1.jpg".to_string(), "image1.jpg".to_string()); + mapping.insert("another/image2.png".to_string(), "image2.png".to_string()); + + let updated = update_media_paths_in_html(html, &mapping); + + assert!(updated.contains(r#"src="image1.jpg""#)); + assert!(updated.contains(r#"src="image2.png""#)); + assert!(!updated.contains("path/to/")); + assert!(!updated.contains("another/")); + } + + #[test] + fn given_html_with_no_matching_paths_when_updating_then_unchanged() { + use std::collections::HashMap; + + let html = r#"

Text without images

"#; + let mapping = HashMap::new(); + + let updated = update_media_paths_in_html(html, &mapping); + + assert_eq!(updated, html); + } + + #[test] + fn given_html_with_unmapped_image_when_updating_then_leaves_unchanged() { + use std::collections::HashMap; + + let html = r#" and "#; + let mut mapping = HashMap::new(); + mapping.insert("mapped.jpg".to_string(), "new_mapped.jpg".to_string()); + + let updated = update_media_paths_in_html(html, &mapping); + + // Should update only mapped path + assert!(updated.contains(r#"src="new_mapped.jpg""#)); + // Should leave unmapped path as-is + assert!(updated.contains(r#"src="unmapped.png""#)); + } + + #[test] + fn given_markdown_img_syntax_when_updating_then_replaces_path() { + use std::collections::HashMap; + + let html = r#"

Diagram

"#; + let mut mapping = HashMap::new(); + mapping.insert("images/diagram.png".to_string(), "diagram.png".to_string()); + + let updated = update_media_paths_in_html(html, &mapping); + + assert!(updated.contains(r#"src="diagram.png""#)); + } } From 081b69f37b08b2033ed40ab0073904ae58112d19 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 16:23:05 +0100 Subject: [PATCH 18/32] feat: implement TOML config file support Add Config struct with TOML serialization: - Supports three sections: defaults, anki, highlight - Loads config from inka.toml file - Saves config to TOML file with pretty formatting - Creates default config file - All fields have sensible defaults via serde attributes Config structure: - defaults: profile, deck, folder - anki: path, basic_type, front_field, back_field, cloze_type, cloze_field - highlight: style Features: - Partial TOML support (missing fields use defaults) - Round-trip serialization preserves values - Error handling with anyhow context Test coverage (6 new tests): - given_no_file_when_creating_default_then_creates_with_defaults - given_config_when_saving_then_writes_toml_file - given_toml_file_when_loading_then_reads_values - given_partial_toml_when_loading_then_uses_defaults - given_nonexistent_file_when_loading_then_returns_error - given_round_trip_when_saving_and_loading_then_preserves_values Phase 10.1 (TOML Config) of TDD migration plan complete. Total tests: 106 (6 config tests) --- ankiview/src/inka/infrastructure/config.rs | 253 +++++++++++++++++++++ ankiview/src/inka/infrastructure/mod.rs | 1 + 2 files changed, 254 insertions(+) create mode 100644 ankiview/src/inka/infrastructure/config.rs diff --git a/ankiview/src/inka/infrastructure/config.rs b/ankiview/src/inka/infrastructure/config.rs new file mode 100644 index 0000000..88ce2ab --- /dev/null +++ b/ankiview/src/inka/infrastructure/config.rs @@ -0,0 +1,253 @@ +use serde::{Deserialize, Serialize}; +use anyhow::{Context, Result}; +use std::path::Path; + +/// TOML configuration for inka collection +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct Config { + #[serde(default)] + pub defaults: Defaults, + #[serde(default)] + pub anki: AnkiConfig, + #[serde(default)] + pub highlight: HighlightConfig, +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct Defaults { + #[serde(default = "default_profile")] + pub profile: String, + #[serde(default = "default_deck")] + pub deck: String, + #[serde(default = "default_folder")] + pub folder: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct AnkiConfig { + #[serde(default = "default_path")] + pub path: String, + #[serde(default = "default_basic_type")] + pub basic_type: String, + #[serde(default = "default_front_field")] + pub front_field: String, + #[serde(default = "default_back_field")] + pub back_field: String, + #[serde(default = "default_cloze_type")] + pub cloze_type: String, + #[serde(default = "default_cloze_field")] + pub cloze_field: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct HighlightConfig { + #[serde(default = "default_highlight_style")] + pub style: String, +} + +// Default value functions +fn default_profile() -> String { String::new() } +fn default_deck() -> String { "Default".to_string() } +fn default_folder() -> String { String::new() } +fn default_path() -> String { String::new() } +fn default_basic_type() -> String { "Inka Basic".to_string() } +fn default_front_field() -> String { "Front".to_string() } +fn default_back_field() -> String { "Back".to_string() } +fn default_cloze_type() -> String { "Inka Cloze".to_string() } +fn default_cloze_field() -> String { "Text".to_string() } +fn default_highlight_style() -> String { "monokai".to_string() } + +impl Default for Defaults { + fn default() -> Self { + Self { + profile: default_profile(), + deck: default_deck(), + folder: default_folder(), + } + } +} + +impl Default for AnkiConfig { + fn default() -> Self { + Self { + path: default_path(), + basic_type: default_basic_type(), + front_field: default_front_field(), + back_field: default_back_field(), + cloze_type: default_cloze_type(), + cloze_field: default_cloze_field(), + } + } +} + +impl Default for HighlightConfig { + fn default() -> Self { + Self { + style: default_highlight_style(), + } + } +} + +impl Default for Config { + fn default() -> Self { + Self { + defaults: Defaults::default(), + anki: AnkiConfig::default(), + highlight: HighlightConfig::default(), + } + } +} + +impl Config { + /// Load configuration from TOML file + pub fn load(path: impl AsRef) -> Result { + let content = std::fs::read_to_string(path.as_ref()) + .context("Failed to read config file")?; + + let config: Config = toml::from_str(&content) + .context("Failed to parse TOML config")?; + + Ok(config) + } + + /// Save configuration to TOML file + pub fn save(&self, path: impl AsRef) -> Result<()> { + let toml_string = toml::to_string_pretty(self) + .context("Failed to serialize config to TOML")?; + + std::fs::write(path.as_ref(), toml_string) + .context("Failed to write config file")?; + + Ok(()) + } + + /// Create default configuration file at path + pub fn create_default(path: impl AsRef) -> Result { + let config = Self::default(); + config.save(path)?; + Ok(config) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[test] + fn given_no_file_when_creating_default_then_creates_with_defaults() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("inka.toml"); + + let config = Config::create_default(&config_path).unwrap(); + + assert_eq!(config.defaults.deck, "Default"); + assert_eq!(config.anki.basic_type, "Inka Basic"); + assert_eq!(config.highlight.style, "monokai"); + assert!(config_path.exists()); + } + + #[test] + fn given_config_when_saving_then_writes_toml_file() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("test.toml"); + + let config = Config::default(); + config.save(&config_path).unwrap(); + + assert!(config_path.exists()); + let content = fs::read_to_string(&config_path).unwrap(); + assert!(content.contains("[defaults]")); + assert!(content.contains("[anki]")); + assert!(content.contains("[highlight]")); + } + + #[test] + fn given_toml_file_when_loading_then_reads_values() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("load_test.toml"); + + let toml_content = r#" +[defaults] +profile = "User 1" +deck = "TestDeck" +folder = "/path/to/notes" + +[anki] +path = "/custom/collection.anki2" +basic_type = "Custom Basic" +front_field = "Question" +back_field = "Answer" +cloze_type = "Custom Cloze" +cloze_field = "Content" + +[highlight] +style = "github" +"#; + fs::write(&config_path, toml_content).unwrap(); + + let config = Config::load(&config_path).unwrap(); + + assert_eq!(config.defaults.profile, "User 1"); + assert_eq!(config.defaults.deck, "TestDeck"); + assert_eq!(config.defaults.folder, "/path/to/notes"); + assert_eq!(config.anki.path, "/custom/collection.anki2"); + assert_eq!(config.anki.basic_type, "Custom Basic"); + assert_eq!(config.highlight.style, "github"); + } + + #[test] + fn given_partial_toml_when_loading_then_uses_defaults() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("partial.toml"); + + let toml_content = r#" +[defaults] +deck = "MyDeck" +"#; + fs::write(&config_path, toml_content).unwrap(); + + let config = Config::load(&config_path).unwrap(); + + // Specified value + assert_eq!(config.defaults.deck, "MyDeck"); + // Default values + assert_eq!(config.defaults.profile, ""); + assert_eq!(config.anki.basic_type, "Inka Basic"); + assert_eq!(config.highlight.style, "monokai"); + } + + #[test] + fn given_nonexistent_file_when_loading_then_returns_error() { + let result = Config::load("/nonexistent/path/config.toml"); + + assert!(result.is_err()); + } + + #[test] + fn given_round_trip_when_saving_and_loading_then_preserves_values() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("roundtrip.toml"); + + let original = Config { + defaults: Defaults { + profile: "Test Profile".to_string(), + deck: "Test Deck".to_string(), + folder: "/test/folder".to_string(), + }, + anki: AnkiConfig { + path: "/test/collection.anki2".to_string(), + ..Default::default() + }, + highlight: HighlightConfig { + style: "nord".to_string(), + }, + }; + + original.save(&config_path).unwrap(); + let loaded = Config::load(&config_path).unwrap(); + + assert_eq!(loaded, original); + } +} diff --git a/ankiview/src/inka/infrastructure/mod.rs b/ankiview/src/inka/infrastructure/mod.rs index 348c239..7ec2436 100644 --- a/ankiview/src/inka/infrastructure/mod.rs +++ b/ankiview/src/inka/infrastructure/mod.rs @@ -1,3 +1,4 @@ pub mod markdown; pub mod file_writer; pub mod media_handler; +pub mod config; From f5bf77e85295109273d87712342c79efd00f661a Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 16:28:23 +0100 Subject: [PATCH 19/32] feat: implement SHA256 hash-based change detection Add hasher module for file change detection: - calculate_file_hash: computes SHA256 hash of file content - has_file_changed: compares current hash with previous hash - Uses sha2 crate for cryptographic hashing - Returns lowercase hex string (64 characters) Features: - Detects file modifications by content changes - Same content produces identical hashes - Different content produces different hashes - Error handling for nonexistent files - Handles multiline content correctly Use case: - Enables incremental updates (only process changed markdown files) - Will be combined with hash cache in Task 10.3 Test coverage (8 new tests): - given_file_when_calculating_hash_then_returns_sha256 - given_same_content_when_calculating_hash_then_returns_same_value - given_different_content_when_calculating_hash_then_returns_different_values - given_nonexistent_file_when_calculating_hash_then_returns_error - given_multiline_content_when_calculating_hash_then_handles_correctly - given_matching_hash_when_checking_change_then_returns_false - given_different_hash_when_checking_change_then_returns_true - given_file_modified_when_checking_then_detects_change Phase 10.2 (SHA256 Hashing) of TDD migration plan complete. Total tests: 114 (8 hasher tests) --- ankiview/src/inka/infrastructure/hasher.rs | 135 +++++++++++++++++++++ ankiview/src/inka/infrastructure/mod.rs | 1 + 2 files changed, 136 insertions(+) create mode 100644 ankiview/src/inka/infrastructure/hasher.rs diff --git a/ankiview/src/inka/infrastructure/hasher.rs b/ankiview/src/inka/infrastructure/hasher.rs new file mode 100644 index 0000000..42c3c3b --- /dev/null +++ b/ankiview/src/inka/infrastructure/hasher.rs @@ -0,0 +1,135 @@ +use anyhow::{Context, Result}; +use sha2::{Sha256, Digest}; +use std::path::Path; + +/// Calculate SHA256 hash of a file's content +pub fn calculate_file_hash(path: impl AsRef) -> Result { + let content = std::fs::read_to_string(path.as_ref()) + .context("Failed to read file for hashing")?; + + let mut hasher = Sha256::new(); + hasher.update(content.as_bytes()); + let result = hasher.finalize(); + + // Convert to lowercase hex string + Ok(format!("{:x}", result)) +} + +/// Check if file content has changed by comparing hashes +pub fn has_file_changed(path: impl AsRef, previous_hash: &str) -> Result { + let current_hash = calculate_file_hash(path)?; + Ok(current_hash != previous_hash) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[test] + fn given_file_when_calculating_hash_then_returns_sha256() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("test.md"); + fs::write(&file_path, "Hello, world!").unwrap(); + + let hash = calculate_file_hash(&file_path).unwrap(); + + // SHA256 hash should be 64 hex characters + assert_eq!(hash.len(), 64); + assert!(hash.chars().all(|c| c.is_ascii_hexdigit())); + } + + #[test] + fn given_same_content_when_calculating_hash_then_returns_same_value() { + let temp_dir = TempDir::new().unwrap(); + let file1 = temp_dir.path().join("file1.md"); + let file2 = temp_dir.path().join("file2.md"); + + let content = "Identical content"; + fs::write(&file1, content).unwrap(); + fs::write(&file2, content).unwrap(); + + let hash1 = calculate_file_hash(&file1).unwrap(); + let hash2 = calculate_file_hash(&file2).unwrap(); + + assert_eq!(hash1, hash2); + } + + #[test] + fn given_different_content_when_calculating_hash_then_returns_different_values() { + let temp_dir = TempDir::new().unwrap(); + let file1 = temp_dir.path().join("file1.md"); + let file2 = temp_dir.path().join("file2.md"); + + fs::write(&file1, "Content A").unwrap(); + fs::write(&file2, "Content B").unwrap(); + + let hash1 = calculate_file_hash(&file1).unwrap(); + let hash2 = calculate_file_hash(&file2).unwrap(); + + assert_ne!(hash1, hash2); + } + + #[test] + fn given_nonexistent_file_when_calculating_hash_then_returns_error() { + let result = calculate_file_hash("/nonexistent/file.md"); + + assert!(result.is_err()); + } + + #[test] + fn given_multiline_content_when_calculating_hash_then_handles_correctly() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("multi.md"); + + let content = "Line 1\nLine 2\nLine 3\n"; + fs::write(&file_path, content).unwrap(); + + let hash = calculate_file_hash(&file_path).unwrap(); + + // Should produce valid SHA256 hash + assert_eq!(hash.len(), 64); + } + + #[test] + fn given_matching_hash_when_checking_change_then_returns_false() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("unchanged.md"); + fs::write(&file_path, "Unchanged content").unwrap(); + + let current_hash = calculate_file_hash(&file_path).unwrap(); + let changed = has_file_changed(&file_path, ¤t_hash).unwrap(); + + assert!(!changed); + } + + #[test] + fn given_different_hash_when_checking_change_then_returns_true() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("changed.md"); + fs::write(&file_path, "New content").unwrap(); + + let old_hash = "0000000000000000000000000000000000000000000000000000000000000000"; + let changed = has_file_changed(&file_path, old_hash).unwrap(); + + assert!(changed); + } + + #[test] + fn given_file_modified_when_checking_then_detects_change() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("modified.md"); + + // Write initial content and get hash + fs::write(&file_path, "Original").unwrap(); + let original_hash = calculate_file_hash(&file_path).unwrap(); + + // Modify content + fs::write(&file_path, "Modified").unwrap(); + + // Should detect change + let changed = has_file_changed(&file_path, &original_hash).unwrap(); + assert!(changed); + } +} diff --git a/ankiview/src/inka/infrastructure/mod.rs b/ankiview/src/inka/infrastructure/mod.rs index 7ec2436..80eb7f9 100644 --- a/ankiview/src/inka/infrastructure/mod.rs +++ b/ankiview/src/inka/infrastructure/mod.rs @@ -2,3 +2,4 @@ pub mod markdown; pub mod file_writer; pub mod media_handler; pub mod config; +pub mod hasher; From c2f04329bb11d78d24368125c6dc1457e5e8bdc1 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 16:31:42 +0100 Subject: [PATCH 20/32] feat: implement hash cache persistence with JSON storage Add HashCache struct for file change tracking: - Loads hash cache from JSON file (creates empty if missing) - Saves hash cache to JSON file with pretty formatting - Checks if file has changed (new files return true) - Updates hash for a file in the cache - Clears all hashes from cache - Uses HashMap internally Features: - Persistent storage enables incremental updates across sessions - JSON format for human readability and debugging - Handles nonexistent cache files gracefully - Tracks multiple files independently - Detects both new files and modified files Integration: - Builds on calculate_file_hash and has_file_changed from Task 10.2 - Completes change detection infrastructure for inka collect command Test coverage (7 new HashCache tests): - given_nonexistent_cache_when_loading_then_creates_empty - given_cache_when_saving_then_creates_json_file - given_new_file_when_checking_then_returns_changed - given_unchanged_file_when_checking_then_returns_false - given_modified_file_when_checking_then_returns_changed - given_cache_with_hashes_when_clearing_then_removes_all - given_multiple_files_when_updating_then_tracks_all Phase 10.3 (Hash Cache Persistence) of TDD migration plan complete. Week 5 (Media & Config) fully implemented. Total tests: 121 (15 hasher tests including HashCache) --- ankiview/src/inka/infrastructure/hasher.rs | 186 +++++++++++++++++++++ 1 file changed, 186 insertions(+) diff --git a/ankiview/src/inka/infrastructure/hasher.rs b/ankiview/src/inka/infrastructure/hasher.rs index 42c3c3b..a43c997 100644 --- a/ankiview/src/inka/infrastructure/hasher.rs +++ b/ankiview/src/inka/infrastructure/hasher.rs @@ -1,6 +1,8 @@ use anyhow::{Context, Result}; use sha2::{Sha256, Digest}; use std::path::Path; +use std::collections::HashMap; +use serde::{Deserialize, Serialize}; /// Calculate SHA256 hash of a file's content pub fn calculate_file_hash(path: impl AsRef) -> Result { @@ -21,6 +23,78 @@ pub fn has_file_changed(path: impl AsRef, previous_hash: &str) -> Result hash mapping in a JSON file +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HashCache { + cache_path: std::path::PathBuf, + hashes: HashMap, +} + +impl HashCache { + /// Load hash cache from file, or create empty cache if file doesn't exist + pub fn load(path: impl AsRef) -> Result { + let cache_path = path.as_ref().to_path_buf(); + + let hashes = if cache_path.exists() { + let content = std::fs::read_to_string(&cache_path) + .context("Failed to read hash cache file")?; + serde_json::from_str(&content) + .context("Failed to parse hash cache JSON")? + } else { + HashMap::new() + }; + + Ok(Self { cache_path, hashes }) + } + + /// Save hash cache to file + pub fn save(&self) -> Result<()> { + let json = serde_json::to_string_pretty(&self.hashes) + .context("Failed to serialize hash cache")?; + + std::fs::write(&self.cache_path, json) + .context("Failed to write hash cache file")?; + + Ok(()) + } + + /// Check if file has changed compared to cached hash + /// Returns true if file is new or content has changed + pub fn file_has_changed(&self, filepath: impl AsRef) -> Result { + let path_str = filepath.as_ref() + .to_str() + .ok_or_else(|| anyhow::anyhow!("Invalid file path"))? + .to_string(); + + // If not in cache, it's a new file (changed) + let Some(cached_hash) = self.hashes.get(&path_str) else { + return Ok(true); + }; + + // Compare current hash with cached hash + has_file_changed(filepath, cached_hash) + } + + /// Update hash for a file in the cache + pub fn update_hash(&mut self, filepath: impl AsRef) -> Result<()> { + let path_str = filepath.as_ref() + .to_str() + .ok_or_else(|| anyhow::anyhow!("Invalid file path"))? + .to_string(); + + let hash = calculate_file_hash(filepath)?; + self.hashes.insert(path_str, hash); + + Ok(()) + } + + /// Clear all hashes from cache + pub fn clear(&mut self) { + self.hashes.clear(); + } +} + #[cfg(test)] mod tests { use super::*; @@ -132,4 +206,116 @@ mod tests { let changed = has_file_changed(&file_path, &original_hash).unwrap(); assert!(changed); } + + // HashCache tests + #[test] + fn given_nonexistent_cache_when_loading_then_creates_empty() { + let temp_dir = TempDir::new().unwrap(); + let cache_path = temp_dir.path().join("hashes.json"); + + let cache = HashCache::load(&cache_path).unwrap(); + + assert_eq!(cache.hashes.len(), 0); + } + + #[test] + fn given_cache_when_saving_then_creates_json_file() { + let temp_dir = TempDir::new().unwrap(); + let cache_path = temp_dir.path().join("cache.json"); + + let cache = HashCache::load(&cache_path).unwrap(); + cache.save().unwrap(); + + assert!(cache_path.exists()); + let content = fs::read_to_string(&cache_path).unwrap(); + assert!(content.contains("{") && content.contains("}")); + } + + #[test] + fn given_new_file_when_checking_then_returns_changed() { + let temp_dir = TempDir::new().unwrap(); + let cache_path = temp_dir.path().join("cache.json"); + let file_path = temp_dir.path().join("test.md"); + fs::write(&file_path, "Content").unwrap(); + + let cache = HashCache::load(&cache_path).unwrap(); + let changed = cache.file_has_changed(&file_path).unwrap(); + + assert!(changed); + } + + #[test] + fn given_unchanged_file_when_checking_then_returns_false() { + let temp_dir = TempDir::new().unwrap(); + let cache_path = temp_dir.path().join("cache.json"); + let file_path = temp_dir.path().join("unchanged.md"); + fs::write(&file_path, "Stable content").unwrap(); + + let mut cache = HashCache::load(&cache_path).unwrap(); + cache.update_hash(&file_path).unwrap(); + cache.save().unwrap(); + + // Reload cache and check same file + let cache = HashCache::load(&cache_path).unwrap(); + let changed = cache.file_has_changed(&file_path).unwrap(); + + assert!(!changed); + } + + #[test] + fn given_modified_file_when_checking_then_returns_changed() { + let temp_dir = TempDir::new().unwrap(); + let cache_path = temp_dir.path().join("cache.json"); + let file_path = temp_dir.path().join("modified.md"); + fs::write(&file_path, "Original").unwrap(); + + let mut cache = HashCache::load(&cache_path).unwrap(); + cache.update_hash(&file_path).unwrap(); + cache.save().unwrap(); + + // Modify file + fs::write(&file_path, "Modified").unwrap(); + + // Reload and check + let cache = HashCache::load(&cache_path).unwrap(); + let changed = cache.file_has_changed(&file_path).unwrap(); + + assert!(changed); + } + + #[test] + fn given_cache_with_hashes_when_clearing_then_removes_all() { + let temp_dir = TempDir::new().unwrap(); + let cache_path = temp_dir.path().join("cache.json"); + let file_path = temp_dir.path().join("file.md"); + fs::write(&file_path, "Content").unwrap(); + + let mut cache = HashCache::load(&cache_path).unwrap(); + cache.update_hash(&file_path).unwrap(); + assert_eq!(cache.hashes.len(), 1); + + cache.clear(); + assert_eq!(cache.hashes.len(), 0); + } + + #[test] + fn given_multiple_files_when_updating_then_tracks_all() { + let temp_dir = TempDir::new().unwrap(); + let cache_path = temp_dir.path().join("cache.json"); + let file1 = temp_dir.path().join("file1.md"); + let file2 = temp_dir.path().join("file2.md"); + fs::write(&file1, "Content 1").unwrap(); + fs::write(&file2, "Content 2").unwrap(); + + let mut cache = HashCache::load(&cache_path).unwrap(); + cache.update_hash(&file1).unwrap(); + cache.update_hash(&file2).unwrap(); + cache.save().unwrap(); + + // Reload and verify both tracked + let cache = HashCache::load(&cache_path).unwrap(); + assert_eq!(cache.hashes.len(), 2); + assert!(!cache.file_has_changed(&file1).unwrap()); + assert!(!cache.file_has_changed(&file2).unwrap()); + } } From 4da4e34cbc0179ec74a2054846685c5e5cc06493 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 16:51:12 +0100 Subject: [PATCH 21/32] feat: implement CardCollector use case orchestration Add CardCollector application use case that orchestrates the entire workflow: - Reads markdown files with section_parser - Extracts deck names, tags, and note strings from sections - Detects card types (basic vs cloze) - Parses card fields with card_parser - Converts markdown to HTML with converter - Transforms cloze syntax - Creates new notes or updates existing notes in Anki - Injects Anki IDs back into markdown files - Handles media directory creation Implementation details: - Uses SectionParser::new().parse() for section extraction - Converts sections to owned Strings to avoid borrow checker issues - Supports both numbered basic cards ("1. Question? > Answer") and numbered cloze cards ("1. Text with {cloze deletion}") - Maintains AnkiRepository instance for database operations Test coverage (5 new tests): - given_markdown_with_basic_card_when_processing_then_creates_note - given_markdown_with_cloze_card_when_processing_then_creates_note - given_markdown_with_multiple_cards_when_processing_then_creates_all - given_markdown_with_id_when_processing_second_time_then_updates_note - given_empty_markdown_when_processing_then_returns_zero Task 11.1 (CardCollector orchestration) complete. Total tests: 126 (5 new application tests) --- .../src/inka/application/card_collector.rs | 283 ++++++++++++++++++ ankiview/src/inka/application/mod.rs | 1 + 2 files changed, 284 insertions(+) create mode 100644 ankiview/src/inka/application/card_collector.rs diff --git a/ankiview/src/inka/application/card_collector.rs b/ankiview/src/inka/application/card_collector.rs new file mode 100644 index 0000000..84dec01 --- /dev/null +++ b/ankiview/src/inka/application/card_collector.rs @@ -0,0 +1,283 @@ +use anyhow::{Context, Result}; +use std::path::{Path, PathBuf}; +use crate::infrastructure::anki::AnkiRepository; +use crate::inka::infrastructure::markdown::section_parser; +use crate::inka::infrastructure::markdown::card_parser; +use crate::inka::infrastructure::markdown::converter; +use crate::inka::infrastructure::file_writer; + +/// Main use case for collecting markdown cards into Anki +pub struct CardCollector { + collection_path: PathBuf, + media_dir: PathBuf, + repository: AnkiRepository, +} + +impl CardCollector { + /// Create a new CardCollector with Anki collection path + pub fn new(collection_path: impl AsRef) -> Result { + let collection_path = collection_path.as_ref().to_path_buf(); + + // Determine media directory path + let media_dir = collection_path + .parent() + .ok_or_else(|| anyhow::anyhow!("Invalid collection path"))? + .join("collection.media"); + + // Create media directory if it doesn't exist + if !media_dir.exists() { + std::fs::create_dir_all(&media_dir) + .context("Failed to create media directory")?; + } + + // Open repository + let repository = AnkiRepository::new(&collection_path)?; + + Ok(Self { + collection_path, + media_dir, + repository, + }) + } + + /// Process a single markdown file and add/update cards in Anki + /// Returns the number of cards processed + pub fn process_file(&mut self, markdown_path: impl AsRef) -> Result { + let markdown_path = markdown_path.as_ref(); + + // Read markdown file + let mut content = file_writer::read_markdown_file(markdown_path)?; + + // Parse sections + let parser = section_parser::SectionParser::new(); + let sections = parser.parse(&content); + + if sections.is_empty() { + return Ok(0); + } + + // Convert sections to owned Strings to avoid borrowing issues when mutating content + let sections: Vec = sections.iter().map(|s| s.to_string()).collect(); + + let mut card_count = 0; + + for section in §ions { + // Extract metadata + let deck_name = section_parser::extract_deck_name(section) + .unwrap_or_else(|| "Default".to_string()); + let tags = section_parser::extract_tags(section); + + // Extract note strings + let note_strings = section_parser::extract_note_strings(section); + + for note_str in note_strings { + // Extract existing ID if present + let existing_id = card_parser::extract_anki_id(¬e_str); + + // Determine card type and process + if card_parser::is_basic_card(¬e_str) { + // Parse basic card fields + let (front_md, back_md) = card_parser::parse_basic_card_fields(¬e_str)?; + + // Convert to HTML + let front_html = converter::markdown_to_html(&front_md); + let back_html = converter::markdown_to_html(&back_md); + + // Create or update note + let note_id = if let Some(id) = existing_id { + // Update existing note + self.repository.update_note(id, &[front_html, back_html])?; + id + } else { + // Create new note + let id = self.repository.create_basic_note( + &front_html, + &back_html, + &deck_name, + &tags, + )?; + + // Inject ID back into markdown + content = file_writer::inject_anki_id(&content, ¬e_str, id); + id + }; + + card_count += 1; + + } else if card_parser::is_cloze_card(¬e_str) { + // Parse cloze card + let text_md = card_parser::parse_cloze_card_field(¬e_str)?; + + // Transform cloze syntax + let text_transformed = crate::inka::infrastructure::markdown::cloze_converter::convert_cloze_syntax(&text_md); + + // Convert to HTML + let text_html = converter::markdown_to_html(&text_transformed); + + // Create or update note + let note_id = if let Some(id) = existing_id { + // Update existing note + self.repository.update_note(id, &[text_html])?; + id + } else { + // Create new note + let id = self.repository.create_cloze_note( + &text_html, + &deck_name, + &tags, + )?; + + // Inject ID back into markdown + content = file_writer::inject_anki_id(&content, ¬e_str, id); + id + }; + + card_count += 1; + } + } + } + + // Write updated content back to file if IDs were injected + file_writer::write_markdown_file(markdown_path, &content)?; + + Ok(card_count) + } + + /// Process a directory recursively + /// Returns the number of cards processed + pub fn process_directory(&mut self, dir_path: impl AsRef) -> Result { + todo!("Implement CardCollector::process_directory") + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + + // Test helper that creates a temporary test collection + fn create_test_collection() -> (tempfile::TempDir, std::path::PathBuf, std::path::PathBuf) { + use std::path::PathBuf; + let temp_dir = tempfile::tempdir().unwrap(); + + let fixture_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("tests/fixtures/test_collection/collection.anki2"); + let collection_path = temp_dir.path().join("collection.anki2"); + + std::fs::copy(&fixture_path, &collection_path).unwrap(); + + let media_dir = temp_dir.path().join("collection.media"); + std::fs::create_dir_all(&media_dir).unwrap(); + + (temp_dir, collection_path, media_dir) + } + + #[test] + fn given_markdown_with_basic_card_when_processing_then_creates_note() { + let (temp_dir, collection_path, _media_dir) = create_test_collection(); + + let markdown_path = temp_dir.path().join("test.md"); + let markdown_content = r#"--- +Deck: TestDeck + +1. What is Rust? +> A systems programming language +---"#; + fs::write(&markdown_path, markdown_content).unwrap(); + + let mut collector = CardCollector::new(&collection_path).unwrap(); + let count = collector.process_file(&markdown_path).unwrap(); + + assert_eq!(count, 1); + } + + #[test] + fn given_markdown_with_cloze_card_when_processing_then_creates_note() { + let (temp_dir, collection_path, _media_dir) = create_test_collection(); + + let markdown_path = temp_dir.path().join("cloze.md"); + let markdown_content = r#"--- +Deck: TestDeck + +1. Rust is a {systems programming} language. +---"#; + fs::write(&markdown_path, markdown_content).unwrap(); + + let mut collector = CardCollector::new(&collection_path).unwrap(); + let count = collector.process_file(&markdown_path).unwrap(); + + assert_eq!(count, 1); + } + + #[test] + fn given_markdown_with_multiple_cards_when_processing_then_creates_all() { + let (temp_dir, collection_path, _media_dir) = create_test_collection(); + + let markdown_path = temp_dir.path().join("multi.md"); + let markdown_content = r#"--- +Deck: TestDeck + +1. What is Rust? +> A systems programming language + +2. What is Cargo? +> Rust's package manager + +3. Rust was created by {Mozilla}. +---"#; + fs::write(&markdown_path, markdown_content).unwrap(); + + let mut collector = CardCollector::new(&collection_path).unwrap(); + let count = collector.process_file(&markdown_path).unwrap(); + + assert_eq!(count, 3); + } + + #[test] + fn given_markdown_with_id_when_processing_second_time_then_updates_note() { + let (temp_dir, collection_path, _media_dir) = create_test_collection(); + + let markdown_path = temp_dir.path().join("update.md"); + let markdown_content = r#"--- +Deck: TestDeck + +1. What is Rust? +> A systems programming language +---"#; + fs::write(&markdown_path, markdown_content).unwrap(); + + let mut collector = CardCollector::new(&collection_path).unwrap(); + + // First run creates note + let count1 = collector.process_file(&markdown_path).unwrap(); + assert_eq!(count1, 1); + + // Markdown should now have ID + let updated_content = fs::read_to_string(&markdown_path).unwrap(); + assert!(updated_content.contains("") + .next()? + .trim() + .parse::() + .ok() + }) + .collect(); + + assert_eq!(ids.len(), 3, "Should extract 3 valid IDs"); + + // Verify IDs are non-zero and unique + for id in &ids { + assert!(*id > 0, "ID should be positive"); + } + + let unique_ids: std::collections::HashSet<_> = ids.iter().collect(); + assert_eq!(unique_ids.len(), 3, "All IDs should be unique"); + + Ok(()) +} From 79f71768e4c83578304014dc9583d0b3633cf046 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 17:05:25 +0100 Subject: [PATCH 24/32] docs: add sample markdown notes with format guide Add comprehensive example file demonstrating: - Section structure with --- delimiters - Deck and Tags metadata - Basic card format (Q&A with > marker) - Cloze deletion format ({} syntax) - LaTeX math support ($...$ and 58120...58120) - Usage examples for collect command Provides users with a complete reference for writing markdown flashcards compatible with ankiview collect. Phase 11.5 (Documentation) of TDD migration plan complete. All Week 6 tasks complete. --- ankiview/examples/sample-notes.md | 91 +++++++++++++++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 ankiview/examples/sample-notes.md diff --git a/ankiview/examples/sample-notes.md b/ankiview/examples/sample-notes.md new file mode 100644 index 0000000..f27ad1b --- /dev/null +++ b/ankiview/examples/sample-notes.md @@ -0,0 +1,91 @@ +# Sample Anki Notes in Markdown + +This is an example markdown file showing how to write Anki flashcards. + +--- +Deck: Programming +Tags: rust basics + +1. What is Rust? +> Rust is a systems programming language focused on safety, speed, and concurrency. + +2. What is Cargo? +> Cargo is Rust's package manager and build system. + +3. Rust's ownership system prevents {data races} at compile time. + +4. What are the three rules of ownership in Rust? +> 1. Each value has a variable called its owner +> 2. There can only be one owner at a time +> 3. When the owner goes out of scope, the value is dropped +--- + +--- +Deck: Mathematics +Tags: algebra formulas + +1. What is the quadratic formula? +> $x = \frac{-b \pm \sqrt{b^2 - 4ac}}{2a}$ + +2. The {Pythagorean theorem} states that $a^2 + b^2 = c^2$. + +3. What is the formula for the area of a circle? +> $A = \pi r^2$ where $r$ is the radius +--- + +## Usage + +Process this file with: + +```bash +# Process single file +ankiview collect examples/sample-notes.md + +# Process with specific collection +ankiview -c /path/to/collection.anki2 collect examples/sample-notes.md + +# Process entire directory recursively +ankiview collect ./my-notes --recursive +``` + +## Format Guide + +### Sections +- Sections are delimited by `---` +- Each section can have `Deck:` and `Tags:` metadata +- All cards in a section share the same deck and tags + +### Card Types + +**Basic Cards** (front/back): +```markdown +1. Question here? +> Answer here +``` + +**Cloze Deletions** (fill-in-the-blank): +```markdown +1. This is a {cloze deletion} example. +``` + +### Math Support + +Use `$` for inline math: `$E = mc^2$` + +Use `$$` for block math: +```markdown +$$ +\int_a^b f(x) dx +$$ +``` + +### IDs + +After first run, Anki IDs are injected: +```markdown + +1. Question? +> Answer +``` + +Don't modify IDs - they link to Anki notes for updates. From b8b152c7b5a7ac95e025bb87543c4560f2149d85 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 26 Oct 2025 17:15:51 +0100 Subject: [PATCH 25/32] refactor: fix all clippy warnings Address all clippy warnings across the codebase: CardCollector: - Remove unused note_id variables (just execute side effects) - Prefix unused fields with _ (collection_path, media_dir) Infrastructure: - Use strip_prefix() instead of manual string slicing (card_parser) - Return expression directly without intermediate let (cloze_converter) - Derive Default instead of manual impl (config) - Remove trivial assert!(true) test (inka/mod) Tests: - Replace vec![] with &[] slice syntax (anki.rs, test helpers) - Use assert!(bool) instead of assert_eq!(bool, true/false) (test_cli) - Use !is_empty() instead of len() > 0 (test_anki) - Remove unused NoteRepository import (test_collect) - Remove needless borrows for generic args (build_test_collection) - Mark unused helper method with #[allow(dead_code)] All 171 tests passing. Zero clippy warnings. --- Makefile | 9 ++++++++- ankiview/src/infrastructure/anki.rs | 14 +++++++------- ankiview/src/inka/application/card_collector.rs | 16 ++++++---------- ankiview/src/inka/infrastructure/config.rs | 12 +----------- .../inka/infrastructure/markdown/card_parser.rs | 4 ++-- .../infrastructure/markdown/cloze_converter.rs | 4 +--- ankiview/src/inka/mod.rs | 8 -------- ankiview/tests/fixtures/build_test_collection.rs | 4 ++-- ankiview/tests/helpers/mod.rs | 1 + ankiview/tests/test_anki.rs | 2 +- ankiview/tests/test_cli.rs | 10 +++++----- ankiview/tests/test_collect.rs | 1 - 12 files changed, 34 insertions(+), 51 deletions(-) diff --git a/Makefile b/Makefile index e170303..e098571 100644 --- a/Makefile +++ b/Makefile @@ -31,7 +31,14 @@ ADMIN:: ## ################################################################## .PHONY: init-env init-env: ## init-env @rm -fr ~/xxx/* - @mkdir -p ~/xxx + @mkdir -p ~/xxx/ankiview-test + @cp -r ankiview/tests/fixtures/test_collection/* ~/xxx/ankiview-test/ + + +.PHONY: create-note +create-note: ## create a note from markdown + cargo run --bin ankiview -- -c ~/xxx/ankiview-test/collection.anki2 view 1695797540371 + cargo run --bin ankiview -- -c ~/xxx/ankiview-test/collection.anki2 collect test.md .PHONY: test test: ## Run all tests (unit, integration, and doc tests) with debug logging diff --git a/ankiview/src/infrastructure/anki.rs b/ankiview/src/infrastructure/anki.rs index 4381175..52bffd6 100644 --- a/ankiview/src/infrastructure/anki.rs +++ b/ankiview/src/infrastructure/anki.rs @@ -439,7 +439,7 @@ mod tests { "What is Rust?", "A systems programming language", "Default", - &vec!["rust".to_string(), "programming".to_string()], + &["rust".to_string(), "programming".to_string()], ) .unwrap(); @@ -451,7 +451,7 @@ mod tests { let (_temp_dir, mut repo) = create_test_collection().unwrap(); let note_id = repo - .create_basic_note("Front", "Back", "Default", &vec![]) + .create_basic_note("Front", "Back", "Default", &[]) .unwrap(); // Should be able to retrieve the note @@ -469,7 +469,7 @@ mod tests { .create_cloze_note( "The capital of {{c1::France}} is {{c2::Paris}}", "Default", - &vec!["geography".to_string()], + &["geography".to_string()], ) .unwrap(); @@ -482,7 +482,7 @@ mod tests { let cloze_text = "Answer: {{c1::42}}"; let note_id = repo - .create_cloze_note(cloze_text, "Default", &vec![]) + .create_cloze_note(cloze_text, "Default", &[]) .unwrap(); // Should be able to retrieve the note @@ -497,7 +497,7 @@ mod tests { // Create a note let note_id = repo - .create_basic_note("Original Front", "Original Back", "Default", &vec![]) + .create_basic_note("Original Front", "Original Back", "Default", &[]) .unwrap(); // Update it @@ -514,7 +514,7 @@ mod tests { fn given_nonexistent_note_when_updating_then_returns_error() { let (_temp_dir, mut repo) = create_test_collection().unwrap(); - let result = repo.update_note(9999999, &vec!["Test".to_string()]); + let result = repo.update_note(9999999, &["Test".to_string()]); assert!(result.is_err()); } @@ -524,7 +524,7 @@ mod tests { let (_temp_dir, mut repo) = create_test_collection().unwrap(); let note_id = repo - .create_basic_note("Front", "Back", "Default", &vec![]) + .create_basic_note("Front", "Back", "Default", &[]) .unwrap(); assert!(repo.note_exists(note_id).unwrap()); diff --git a/ankiview/src/inka/application/card_collector.rs b/ankiview/src/inka/application/card_collector.rs index c7f055e..67da4d5 100644 --- a/ankiview/src/inka/application/card_collector.rs +++ b/ankiview/src/inka/application/card_collector.rs @@ -8,8 +8,8 @@ use crate::inka::infrastructure::file_writer; /// Main use case for collecting markdown cards into Anki pub struct CardCollector { - collection_path: PathBuf, - media_dir: PathBuf, + _collection_path: PathBuf, + _media_dir: PathBuf, repository: AnkiRepository, } @@ -34,8 +34,8 @@ impl CardCollector { let repository = AnkiRepository::new(&collection_path)?; Ok(Self { - collection_path, - media_dir, + _collection_path: collection_path, + _media_dir: media_dir, repository, }) } @@ -84,10 +84,9 @@ impl CardCollector { let back_html = converter::markdown_to_html(&back_md); // Create or update note - let note_id = if let Some(id) = existing_id { + if let Some(id) = existing_id { // Update existing note self.repository.update_note(id, &[front_html, back_html])?; - id } else { // Create new note let id = self.repository.create_basic_note( @@ -99,7 +98,6 @@ impl CardCollector { // Inject ID back into markdown content = file_writer::inject_anki_id(&content, ¬e_str, id); - id }; card_count += 1; @@ -115,10 +113,9 @@ impl CardCollector { let text_html = converter::markdown_to_html(&text_transformed); // Create or update note - let note_id = if let Some(id) = existing_id { + if let Some(id) = existing_id { // Update existing note self.repository.update_note(id, &[text_html])?; - id } else { // Create new note let id = self.repository.create_cloze_note( @@ -129,7 +126,6 @@ impl CardCollector { // Inject ID back into markdown content = file_writer::inject_anki_id(&content, ¬e_str, id); - id }; card_count += 1; diff --git a/ankiview/src/inka/infrastructure/config.rs b/ankiview/src/inka/infrastructure/config.rs index 88ce2ab..d300cf2 100644 --- a/ankiview/src/inka/infrastructure/config.rs +++ b/ankiview/src/inka/infrastructure/config.rs @@ -3,7 +3,7 @@ use anyhow::{Context, Result}; use std::path::Path; /// TOML configuration for inka collection -#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Default)] pub struct Config { #[serde(default)] pub defaults: Defaults, @@ -88,16 +88,6 @@ impl Default for HighlightConfig { } } -impl Default for Config { - fn default() -> Self { - Self { - defaults: Defaults::default(), - anki: AnkiConfig::default(), - highlight: HighlightConfig::default(), - } - } -} - impl Config { /// Load configuration from TOML file pub fn load(path: impl AsRef) -> Result { diff --git a/ankiview/src/inka/infrastructure/markdown/card_parser.rs b/ankiview/src/inka/infrastructure/markdown/card_parser.rs index 02fd426..1e8b554 100644 --- a/ankiview/src/inka/infrastructure/markdown/card_parser.rs +++ b/ankiview/src/inka/infrastructure/markdown/card_parser.rs @@ -88,8 +88,8 @@ fn clean_answer(answer_raw: &str) -> String { } else { without_prefix } - } else if line.starts_with('>') { - &line[1..] + } else if let Some(stripped) = line.strip_prefix('>') { + stripped } else { line } diff --git a/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs b/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs index 28b7dc4..d0fe332 100644 --- a/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs +++ b/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs @@ -66,9 +66,7 @@ pub fn convert_cloze_syntax(text: &str) -> String { // Restore protected blocks let result = restore_math_blocks(&result, math_blocks); - let result = restore_code_blocks(&result, code_blocks); - - result + restore_code_blocks(&result, code_blocks) } fn find_all_clozes(text: &str) -> Vec { diff --git a/ankiview/src/inka/mod.rs b/ankiview/src/inka/mod.rs index 41b2f99..3087c17 100644 --- a/ankiview/src/inka/mod.rs +++ b/ankiview/src/inka/mod.rs @@ -2,11 +2,3 @@ pub mod domain; pub mod application; pub mod infrastructure; pub mod cli; - -#[cfg(test)] -mod tests { - #[test] - fn given_empty_project_when_building_then_compiles() { - assert!(true); - } -} diff --git a/ankiview/tests/fixtures/build_test_collection.rs b/ankiview/tests/fixtures/build_test_collection.rs index e654ed4..938a2b6 100644 --- a/ankiview/tests/fixtures/build_test_collection.rs +++ b/ankiview/tests/fixtures/build_test_collection.rs @@ -112,12 +112,12 @@ fn create_test_media(media_dir: &std::path::Path) -> anyhow::Result<()> { ]; let rust_logo_path = media_dir.join("rust-logo.png"); - std::fs::write(&rust_logo_path, &rust_logo_png)?; + std::fs::write(&rust_logo_path, rust_logo_png)?; println!("Created test image: {:?}", rust_logo_path); // Create another simple PNG (sample.jpg - actually a PNG despite the name) let sample_path = media_dir.join("sample.jpg"); - std::fs::write(&sample_path, &rust_logo_png)?; + std::fs::write(&sample_path, rust_logo_png)?; println!("Created test image: {:?}", sample_path); Ok(()) diff --git a/ankiview/tests/helpers/mod.rs b/ankiview/tests/helpers/mod.rs index 7d087dd..b837a8f 100644 --- a/ankiview/tests/helpers/mod.rs +++ b/ankiview/tests/helpers/mod.rs @@ -47,6 +47,7 @@ impl TestCollection { } /// Open repository for this test collection + #[allow(dead_code)] pub fn open_repository(&self) -> Result { AnkiRepository::new(&self.collection_path) } diff --git a/ankiview/tests/test_anki.rs b/ankiview/tests/test_anki.rs index 606e89f..d922ca2 100644 --- a/ankiview/tests/test_anki.rs +++ b/ankiview/tests/test_anki.rs @@ -169,7 +169,7 @@ fn given_collection_when_listing_with_search_then_returns_filtered_notes() -> Re let notes = repo.list_notes(Some("Tree"))?; // Assert - assert!(notes.len() > 0); + assert!(!notes.is_empty()); assert!(notes.iter().any(|n| n.front.contains("Tree"))); Ok(()) } diff --git a/ankiview/tests/test_cli.rs b/ankiview/tests/test_cli.rs index 90ecdc9..019d49f 100644 --- a/ankiview/tests/test_cli.rs +++ b/ankiview/tests/test_cli.rs @@ -23,7 +23,7 @@ fn given_explicit_view_command_when_parsing_then_succeeds() { match parsed.command { Command::View { note_id, json } => { assert_eq!(note_id, 1234567890); - assert_eq!(json, false); + assert!(!json); } _ => panic!("Expected View command"), } @@ -90,7 +90,7 @@ fn given_global_profile_flag_when_parsing_then_succeeds() { match parsed.command { Command::View { note_id, json } => { assert_eq!(note_id, 1234567890); - assert_eq!(json, false); + assert!(!json); } _ => panic!("Expected View command"), } @@ -149,7 +149,7 @@ fn given_json_flag_when_parsing_view_command_then_json_is_true() { match parsed.command { Command::View { note_id, json } => { assert_eq!(note_id, 1234567890); - assert_eq!(json, true); + assert!(json); } _ => panic!("Expected View command"), } @@ -167,7 +167,7 @@ fn given_no_json_flag_when_parsing_view_command_then_json_is_false() { match parsed.command { Command::View { note_id, json } => { assert_eq!(note_id, 1234567890); - assert_eq!(json, false); + assert!(!json); } _ => panic!("Expected View command"), } @@ -185,7 +185,7 @@ fn given_json_flag_with_global_flags_when_parsing_then_succeeds() { match parsed.command { Command::View { note_id, json } => { assert_eq!(note_id, 1234567890); - assert_eq!(json, true); + assert!(json); } _ => panic!("Expected View command"), } diff --git a/ankiview/tests/test_collect.rs b/ankiview/tests/test_collect.rs index 119007a..ba44911 100644 --- a/ankiview/tests/test_collect.rs +++ b/ankiview/tests/test_collect.rs @@ -1,6 +1,5 @@ mod helpers; -use ankiview::application::NoteRepository; use anyhow::Result; use helpers::TestCollection; use std::fs; From ad380c381af2bcd3429aa4e0223820befb4c6e94 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 2 Nov 2025 11:53:46 +0100 Subject: [PATCH 26/32] feat: add media file conflict detection and --force flag Implement Phase 1 of inka2 collect parity by adding file comparison logic and --force flag to handle media file conflicts. Changes: - Add --force and --ignore-errors CLI flags to collect command - Implement byte-by-byte file comparison in media_handler - Add error on media file conflict without --force flag - Skip copy optimization for identical files - Update CardCollector to accept and thread force parameter - Add comprehensive tests for all conflict scenarios This is a breaking change: previously, conflicting media files were silently skipped. Now they error unless --force is specified. Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- ankiview/src/cli/args.rs | 8 ++ ankiview/src/infrastructure/anki.rs | 10 +- .../src/inka/application/card_collector.rs | 58 ++++---- ankiview/src/inka/domain/card.rs | 29 ++-- ankiview/src/inka/infrastructure/config.rs | 56 +++++--- .../src/inka/infrastructure/file_writer.rs | 11 +- ankiview/src/inka/infrastructure/hasher.rs | 30 ++-- .../infrastructure/markdown/card_parser.rs | 20 +-- .../markdown/cloze_converter.rs | 58 ++++---- .../inka/infrastructure/markdown/converter.rs | 8 +- .../infrastructure/markdown/mathjax_plugin.rs | 2 +- .../src/inka/infrastructure/markdown/mod.rs | 4 +- .../infrastructure/markdown/section_parser.rs | 20 ++- .../src/inka/infrastructure/media_handler.rs | 131 +++++++++++++++--- ankiview/src/inka/infrastructure/mod.rs | 6 +- ankiview/src/inka/mod.rs | 4 +- ankiview/src/lib.rs | 30 +++- ankiview/tests/test_collect.rs | 38 +++-- 18 files changed, 350 insertions(+), 173 deletions(-) diff --git a/ankiview/src/cli/args.rs b/ankiview/src/cli/args.rs index 1843851..69c1744 100644 --- a/ankiview/src/cli/args.rs +++ b/ankiview/src/cli/args.rs @@ -59,5 +59,13 @@ pub enum Command { /// Process directory recursively #[arg(short, long)] recursive: bool, + + /// Overwrite conflicting media files + #[arg(long)] + force: bool, + + /// Continue processing on errors without pausing + #[arg(short, long)] + ignore_errors: bool, }, } diff --git a/ankiview/src/infrastructure/anki.rs b/ankiview/src/infrastructure/anki.rs index 52bffd6..dc36d89 100644 --- a/ankiview/src/infrastructure/anki.rs +++ b/ankiview/src/infrastructure/anki.rs @@ -152,7 +152,8 @@ impl AnkiRepository { let mut note = Note::new(¬etype); note.set_field(0, front) .context("Failed to set front field")?; - note.set_field(1, back).context("Failed to set back field")?; + note.set_field(1, back) + .context("Failed to set back field")?; // Add tags for tag in tags { @@ -198,7 +199,8 @@ impl AnkiRepository { // Create a new note let mut note = Note::new(¬etype); - note.set_field(0, text).context("Failed to set text field")?; + note.set_field(0, text) + .context("Failed to set text field")?; // Add tags for tag in tags { @@ -481,9 +483,7 @@ mod tests { let (_temp_dir, mut repo) = create_test_collection().unwrap(); let cloze_text = "Answer: {{c1::42}}"; - let note_id = repo - .create_cloze_note(cloze_text, "Default", &[]) - .unwrap(); + let note_id = repo.create_cloze_note(cloze_text, "Default", &[]).unwrap(); // Should be able to retrieve the note let note = repo.get_note(note_id).unwrap(); diff --git a/ankiview/src/inka/application/card_collector.rs b/ankiview/src/inka/application/card_collector.rs index 67da4d5..9cf64f9 100644 --- a/ankiview/src/inka/application/card_collector.rs +++ b/ankiview/src/inka/application/card_collector.rs @@ -1,21 +1,22 @@ -use anyhow::{Context, Result}; -use std::path::{Path, PathBuf}; use crate::infrastructure::anki::AnkiRepository; -use crate::inka::infrastructure::markdown::section_parser; +use crate::inka::infrastructure::file_writer; use crate::inka::infrastructure::markdown::card_parser; use crate::inka::infrastructure::markdown::converter; -use crate::inka::infrastructure::file_writer; +use crate::inka::infrastructure::markdown::section_parser; +use anyhow::{Context, Result}; +use std::path::{Path, PathBuf}; /// Main use case for collecting markdown cards into Anki pub struct CardCollector { _collection_path: PathBuf, _media_dir: PathBuf, repository: AnkiRepository, + _force: bool, } impl CardCollector { /// Create a new CardCollector with Anki collection path - pub fn new(collection_path: impl AsRef) -> Result { + pub fn new(collection_path: impl AsRef, force: bool) -> Result { let collection_path = collection_path.as_ref().to_path_buf(); // Determine media directory path @@ -26,8 +27,7 @@ impl CardCollector { // Create media directory if it doesn't exist if !media_dir.exists() { - std::fs::create_dir_all(&media_dir) - .context("Failed to create media directory")?; + std::fs::create_dir_all(&media_dir).context("Failed to create media directory")?; } // Open repository @@ -37,6 +37,7 @@ impl CardCollector { _collection_path: collection_path, _media_dir: media_dir, repository, + _force: force, }) } @@ -63,8 +64,8 @@ impl CardCollector { for section in §ions { // Extract metadata - let deck_name = section_parser::extract_deck_name(section) - .unwrap_or_else(|| "Default".to_string()); + let deck_name = + section_parser::extract_deck_name(section).unwrap_or_else(|| "Default".to_string()); let tags = section_parser::extract_tags(section); // Extract note strings @@ -101,7 +102,6 @@ impl CardCollector { }; card_count += 1; - } else if card_parser::is_cloze_card(¬e_str) { // Parse cloze card let text_md = card_parser::parse_cloze_card_field(¬e_str)?; @@ -118,11 +118,9 @@ impl CardCollector { self.repository.update_note(id, &[text_html])?; } else { // Create new note - let id = self.repository.create_cloze_note( - &text_html, - &deck_name, - &tags, - )?; + let id = self + .repository + .create_cloze_note(&text_html, &deck_name, &tags)?; // Inject ID back into markdown content = file_writer::inject_anki_id(&content, ¬e_str, id); @@ -203,7 +201,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path).unwrap(); + let mut collector = CardCollector::new(&collection_path, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); @@ -221,7 +219,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path).unwrap(); + let mut collector = CardCollector::new(&collection_path, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); @@ -245,7 +243,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path).unwrap(); + let mut collector = CardCollector::new(&collection_path, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 3); @@ -264,7 +262,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path).unwrap(); + let mut collector = CardCollector::new(&collection_path, false).unwrap(); // First run creates note let count1 = collector.process_file(&markdown_path).unwrap(); @@ -277,7 +275,7 @@ Deck: TestDeck // Modify the answer let modified = updated_content.replace( "A systems programming language", - "A safe systems programming language" + "A safe systems programming language", ); fs::write(&markdown_path, &modified).unwrap(); @@ -293,7 +291,7 @@ Deck: TestDeck let markdown_path = temp_dir.path().join("empty.md"); fs::write(&markdown_path, "Just text, no sections").unwrap(); - let mut collector = CardCollector::new(&collection_path).unwrap(); + let mut collector = CardCollector::new(&collection_path, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 0); @@ -312,27 +310,35 @@ Deck: TestDeck // File 1 in root notes dir let file1 = notes_dir.join("file1.md"); - fs::write(&file1, r#"--- + fs::write( + &file1, + r#"--- Deck: Test 1. Question 1? > Answer 1 ----"#).unwrap(); +---"#, + ) + .unwrap(); // File 2 in subdirectory let file2 = subdir.join("file2.md"); - fs::write(&file2, r#"--- + fs::write( + &file2, + r#"--- Deck: Test 1. Question 2? > Answer 2 ----"#).unwrap(); +---"#, + ) + .unwrap(); // Non-markdown file (should be ignored) let txt_file = notes_dir.join("readme.txt"); fs::write(&txt_file, "This is not markdown").unwrap(); - let mut collector = CardCollector::new(&collection_path).unwrap(); + let mut collector = CardCollector::new(&collection_path, false).unwrap(); let count = collector.process_directory(¬es_dir).unwrap(); // Should process both markdown files diff --git a/ankiview/src/inka/domain/card.rs b/ankiview/src/inka/domain/card.rs index af12ca9..588fa33 100644 --- a/ankiview/src/inka/domain/card.rs +++ b/ankiview/src/inka/domain/card.rs @@ -37,16 +37,14 @@ mod tests { #[test] fn given_basic_card_when_setting_deck_then_updates() { - let card = BasicCard::new("Q", "A") - .with_deck("MyDeck"); + let card = BasicCard::new("Q", "A").with_deck("MyDeck"); assert_eq!(card.deck_name(), "MyDeck"); } #[test] fn given_basic_card_when_setting_tags_then_stores() { - let card = BasicCard::new("Q", "A") - .with_tags(vec!["tag1".to_string(), "tag2".to_string()]); + let card = BasicCard::new("Q", "A").with_tags(vec!["tag1".to_string(), "tag2".to_string()]); assert_eq!(card.tags(), &["tag1", "tag2"]); } @@ -55,13 +53,15 @@ mod tests { fn given_text_with_cloze_when_creating_then_stores_text() { let card = ClozeCard::new("The capital of {{c1::France}} is {{c2::Paris}}"); - assert_eq!(card.text_md(), "The capital of {{c1::France}} is {{c2::Paris}}"); + assert_eq!( + card.text_md(), + "The capital of {{c1::France}} is {{c2::Paris}}" + ); } #[test] fn given_cloze_card_when_implementing_trait_then_provides_interface() { - let card = ClozeCard::new("Text {{c1::cloze}}") - .with_deck("TestDeck"); + let card = ClozeCard::new("Text {{c1::cloze}}").with_deck("TestDeck"); assert_eq!(card.deck_name(), "TestDeck"); assert_eq!(card.raw_fields(), vec!["Text {{c1::cloze}}"]); @@ -144,8 +144,12 @@ impl Card for BasicCard { fn html_fields(&self) -> Vec { vec![ - self.front_html.clone().unwrap_or_else(|| self.front_md.clone()), - self.back_html.clone().unwrap_or_else(|| self.back_md.clone()), + self.front_html + .clone() + .unwrap_or_else(|| self.front_md.clone()), + self.back_html + .clone() + .unwrap_or_else(|| self.back_md.clone()), ] } } @@ -221,8 +225,9 @@ impl Card for ClozeCard { } fn html_fields(&self) -> Vec { - vec![ - self.text_html.clone().unwrap_or_else(|| self.text_md.clone()) - ] + vec![self + .text_html + .clone() + .unwrap_or_else(|| self.text_md.clone())] } } diff --git a/ankiview/src/inka/infrastructure/config.rs b/ankiview/src/inka/infrastructure/config.rs index d300cf2..9c1efdf 100644 --- a/ankiview/src/inka/infrastructure/config.rs +++ b/ankiview/src/inka/infrastructure/config.rs @@ -1,5 +1,5 @@ -use serde::{Deserialize, Serialize}; use anyhow::{Context, Result}; +use serde::{Deserialize, Serialize}; use std::path::Path; /// TOML configuration for inka collection @@ -46,16 +46,36 @@ pub struct HighlightConfig { } // Default value functions -fn default_profile() -> String { String::new() } -fn default_deck() -> String { "Default".to_string() } -fn default_folder() -> String { String::new() } -fn default_path() -> String { String::new() } -fn default_basic_type() -> String { "Inka Basic".to_string() } -fn default_front_field() -> String { "Front".to_string() } -fn default_back_field() -> String { "Back".to_string() } -fn default_cloze_type() -> String { "Inka Cloze".to_string() } -fn default_cloze_field() -> String { "Text".to_string() } -fn default_highlight_style() -> String { "monokai".to_string() } +fn default_profile() -> String { + String::new() +} +fn default_deck() -> String { + "Default".to_string() +} +fn default_folder() -> String { + String::new() +} +fn default_path() -> String { + String::new() +} +fn default_basic_type() -> String { + "Inka Basic".to_string() +} +fn default_front_field() -> String { + "Front".to_string() +} +fn default_back_field() -> String { + "Back".to_string() +} +fn default_cloze_type() -> String { + "Inka Cloze".to_string() +} +fn default_cloze_field() -> String { + "Text".to_string() +} +fn default_highlight_style() -> String { + "monokai".to_string() +} impl Default for Defaults { fn default() -> Self { @@ -91,22 +111,20 @@ impl Default for HighlightConfig { impl Config { /// Load configuration from TOML file pub fn load(path: impl AsRef) -> Result { - let content = std::fs::read_to_string(path.as_ref()) - .context("Failed to read config file")?; + let content = + std::fs::read_to_string(path.as_ref()).context("Failed to read config file")?; - let config: Config = toml::from_str(&content) - .context("Failed to parse TOML config")?; + let config: Config = toml::from_str(&content).context("Failed to parse TOML config")?; Ok(config) } /// Save configuration to TOML file pub fn save(&self, path: impl AsRef) -> Result<()> { - let toml_string = toml::to_string_pretty(self) - .context("Failed to serialize config to TOML")?; + let toml_string = + toml::to_string_pretty(self).context("Failed to serialize config to TOML")?; - std::fs::write(path.as_ref(), toml_string) - .context("Failed to write config file")?; + std::fs::write(path.as_ref(), toml_string).context("Failed to write config file")?; Ok(()) } diff --git a/ankiview/src/inka/infrastructure/file_writer.rs b/ankiview/src/inka/infrastructure/file_writer.rs index d24f6e7..5067806 100644 --- a/ankiview/src/inka/infrastructure/file_writer.rs +++ b/ankiview/src/inka/infrastructure/file_writer.rs @@ -3,14 +3,12 @@ use std::path::Path; /// Read markdown file content pub fn read_markdown_file(path: impl AsRef) -> Result { - std::fs::read_to_string(path.as_ref()) - .context("Failed to read markdown file") + std::fs::read_to_string(path.as_ref()).context("Failed to read markdown file") } /// Write markdown content to file pub fn write_markdown_file(path: impl AsRef, content: &str) -> Result<()> { - std::fs::write(path.as_ref(), content) - .context("Failed to write markdown file") + std::fs::write(path.as_ref(), content).context("Failed to write markdown file") } /// Inject Anki ID before a note in markdown content @@ -152,7 +150,10 @@ Deck: Test let result = inject_anki_id(content, "1. Question", 1111111111); - assert_eq!(result, "Some text\n\n\n1. Question\n> Answer\n\nMore text"); + assert_eq!( + result, + "Some text\n\n\n1. Question\n> Answer\n\nMore text" + ); } #[test] diff --git a/ankiview/src/inka/infrastructure/hasher.rs b/ankiview/src/inka/infrastructure/hasher.rs index a43c997..30c57b4 100644 --- a/ankiview/src/inka/infrastructure/hasher.rs +++ b/ankiview/src/inka/infrastructure/hasher.rs @@ -1,13 +1,13 @@ use anyhow::{Context, Result}; -use sha2::{Sha256, Digest}; -use std::path::Path; -use std::collections::HashMap; use serde::{Deserialize, Serialize}; +use sha2::{Digest, Sha256}; +use std::collections::HashMap; +use std::path::Path; /// Calculate SHA256 hash of a file's content pub fn calculate_file_hash(path: impl AsRef) -> Result { - let content = std::fs::read_to_string(path.as_ref()) - .context("Failed to read file for hashing")?; + let content = + std::fs::read_to_string(path.as_ref()).context("Failed to read file for hashing")?; let mut hasher = Sha256::new(); hasher.update(content.as_bytes()); @@ -37,10 +37,9 @@ impl HashCache { let cache_path = path.as_ref().to_path_buf(); let hashes = if cache_path.exists() { - let content = std::fs::read_to_string(&cache_path) - .context("Failed to read hash cache file")?; - serde_json::from_str(&content) - .context("Failed to parse hash cache JSON")? + let content = + std::fs::read_to_string(&cache_path).context("Failed to read hash cache file")?; + serde_json::from_str(&content).context("Failed to parse hash cache JSON")? } else { HashMap::new() }; @@ -50,11 +49,10 @@ impl HashCache { /// Save hash cache to file pub fn save(&self) -> Result<()> { - let json = serde_json::to_string_pretty(&self.hashes) - .context("Failed to serialize hash cache")?; + let json = + serde_json::to_string_pretty(&self.hashes).context("Failed to serialize hash cache")?; - std::fs::write(&self.cache_path, json) - .context("Failed to write hash cache file")?; + std::fs::write(&self.cache_path, json).context("Failed to write hash cache file")?; Ok(()) } @@ -62,7 +60,8 @@ impl HashCache { /// Check if file has changed compared to cached hash /// Returns true if file is new or content has changed pub fn file_has_changed(&self, filepath: impl AsRef) -> Result { - let path_str = filepath.as_ref() + let path_str = filepath + .as_ref() .to_str() .ok_or_else(|| anyhow::anyhow!("Invalid file path"))? .to_string(); @@ -78,7 +77,8 @@ impl HashCache { /// Update hash for a file in the cache pub fn update_hash(&mut self, filepath: impl AsRef) -> Result<()> { - let path_str = filepath.as_ref() + let path_str = filepath + .as_ref() .to_str() .ok_or_else(|| anyhow::anyhow!("Invalid file path"))? .to_string(); diff --git a/ankiview/src/inka/infrastructure/markdown/card_parser.rs b/ankiview/src/inka/infrastructure/markdown/card_parser.rs index 1e8b554..3c87701 100644 --- a/ankiview/src/inka/infrastructure/markdown/card_parser.rs +++ b/ankiview/src/inka/infrastructure/markdown/card_parser.rs @@ -1,14 +1,13 @@ -use regex::Regex; -use lazy_static::lazy_static; use anyhow::Result; +use lazy_static::lazy_static; +use regex::Regex; lazy_static! { - static ref BASIC_CARD_REGEX: Regex = Regex::new( - r"(?m)(?:^\n)?^\d+\.[\s\S]+?(?:^>.*?(?:\n|$))+" - ).expect("Failed to compile basic card regex"); - - static ref ID_REGEX: Regex = Regex::new(r"(?m)^$") - .expect("Failed to compile ID regex"); + static ref BASIC_CARD_REGEX: Regex = + Regex::new(r"(?m)(?:^\n)?^\d+\.[\s\S]+?(?:^>.*?(?:\n|$))+") + .expect("Failed to compile basic card regex"); + static ref ID_REGEX: Regex = + Regex::new(r"(?m)^$").expect("Failed to compile ID regex"); } pub fn is_basic_card(note_str: &str) -> bool { @@ -18,7 +17,10 @@ pub fn is_basic_card(note_str: &str) -> bool { pub fn is_cloze_card(note_str: &str) -> bool { // A cloze card has curly braces (for cloze deletions) // and doesn't have the answer marker (>) - note_str.contains('{') && !note_str.lines().any(|line| line.trim_start().starts_with('>')) + note_str.contains('{') + && !note_str + .lines() + .any(|line| line.trim_start().starts_with('>')) } pub fn parse_basic_card_fields(note_str: &str) -> Result<(String, String)> { diff --git a/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs b/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs index d0fe332..58b0e5f 100644 --- a/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs +++ b/ankiview/src/inka/infrastructure/markdown/cloze_converter.rs @@ -1,27 +1,21 @@ -use regex::Regex; use lazy_static::lazy_static; +use regex::Regex; lazy_static! { - static ref ANKI_CLOZE_REGEX: Regex = Regex::new(r"\{\{c\d+::[\s\S]*?\}\}") - .expect("Failed to compile Anki cloze regex"); - + static ref ANKI_CLOZE_REGEX: Regex = + Regex::new(r"\{\{c\d+::[\s\S]*?\}\}").expect("Failed to compile Anki cloze regex"); static ref EXPLICIT_SHORT_CLOZE_REGEX: Regex = Regex::new(r"\{c?(\d+)::([\s\S]*?)\}") .expect("Failed to compile explicit short cloze regex"); - - static ref IMPLICIT_SHORT_CLOZE_REGEX: Regex = Regex::new(r"\{([\s\S]*?)\}") - .expect("Failed to compile implicit short cloze regex"); - - static ref CODE_BLOCK_REGEX: Regex = Regex::new(r"```[\s\S]+?```") - .expect("Failed to compile code block regex"); - - static ref INLINE_CODE_REGEX: Regex = Regex::new(r"`[\S\s]+?`") - .expect("Failed to compile inline code regex"); - - static ref BLOCK_MATH_REGEX: Regex = Regex::new(r"\$\$[\s\S]+?\$\$") - .expect("Failed to compile block math regex"); - - static ref INLINE_MATH_REGEX: Regex = Regex::new(r"\$[^\s$][^$]*?\$") - .expect("Failed to compile inline math regex"); + static ref IMPLICIT_SHORT_CLOZE_REGEX: Regex = + Regex::new(r"\{([\s\S]*?)\}").expect("Failed to compile implicit short cloze regex"); + static ref CODE_BLOCK_REGEX: Regex = + Regex::new(r"```[\s\S]+?```").expect("Failed to compile code block regex"); + static ref INLINE_CODE_REGEX: Regex = + Regex::new(r"`[\S\s]+?`").expect("Failed to compile inline code regex"); + static ref BLOCK_MATH_REGEX: Regex = + Regex::new(r"\$\$[\s\S]+?\$\$").expect("Failed to compile block math regex"); + static ref INLINE_MATH_REGEX: Regex = + Regex::new(r"\$[^\s$][^$]*?\$").expect("Failed to compile inline math regex"); } pub fn is_anki_cloze(text: &str) -> bool { @@ -115,13 +109,17 @@ fn protect_code_blocks(text: &str) -> (String, Vec) { for mat in CODE_BLOCK_REGEX.find_iter(text) { blocks.push(mat.as_str().to_string()); } - result = CODE_BLOCK_REGEX.replace_all(&result, "___CODE_BLOCK___").to_string(); + result = CODE_BLOCK_REGEX + .replace_all(&result, "___CODE_BLOCK___") + .to_string(); // Inline code for mat in INLINE_CODE_REGEX.find_iter(&result) { blocks.push(mat.as_str().to_string()); } - result = INLINE_CODE_REGEX.replace_all(&result, "___INLINE_CODE___").to_string(); + result = INLINE_CODE_REGEX + .replace_all(&result, "___INLINE_CODE___") + .to_string(); (result, blocks) } @@ -134,13 +132,17 @@ fn protect_math_blocks(text: &str) -> (String, Vec) { for mat in BLOCK_MATH_REGEX.find_iter(text) { blocks.push(mat.as_str().to_string()); } - result = BLOCK_MATH_REGEX.replace_all(&result, "___MATH_BLOCK___").to_string(); + result = BLOCK_MATH_REGEX + .replace_all(&result, "___MATH_BLOCK___") + .to_string(); // Inline math - now the $$ are already protected for mat in INLINE_MATH_REGEX.find_iter(&result) { blocks.push(mat.as_str().to_string()); } - result = INLINE_MATH_REGEX.replace_all(&result, "___INLINE_MATH___").to_string(); + result = INLINE_MATH_REGEX + .replace_all(&result, "___INLINE_MATH___") + .to_string(); (result, blocks) } @@ -202,7 +204,10 @@ mod tests { let input = "First {one} then {two} finally {three}"; let output = convert_cloze_syntax(input); - assert_eq!(output, "First {{c1::one}} then {{c2::two}} finally {{c3::three}}"); + assert_eq!( + output, + "First {{c1::one}} then {{c2::two}} finally {{c3::three}}" + ); } #[test] @@ -227,6 +232,9 @@ mod tests { let input = "Equation {answer} is $$x^{2}$$ and inline $y^{3}$"; let output = convert_cloze_syntax(input); - assert_eq!(output, "Equation {{c1::answer}} is $$x^{2}$$ and inline $y^{3}$"); + assert_eq!( + output, + "Equation {{c1::answer}} is $$x^{2}$$ and inline $y^{3}$" + ); } } diff --git a/ankiview/src/inka/infrastructure/markdown/converter.rs b/ankiview/src/inka/infrastructure/markdown/converter.rs index eb7f74d..dfdbd90 100644 --- a/ankiview/src/inka/infrastructure/markdown/converter.rs +++ b/ankiview/src/inka/infrastructure/markdown/converter.rs @@ -1,11 +1,11 @@ +use super::mathjax_plugin::add_mathjax_plugin; +use lazy_static::lazy_static; use markdown_it::MarkdownIt; use regex::Regex; -use lazy_static::lazy_static; -use super::mathjax_plugin::add_mathjax_plugin; lazy_static! { - static ref NEWLINE_TAG_REGEX: Regex = Regex::new(r"\n?(<.+?>)\n?") - .expect("Failed to compile newline tag regex"); + static ref NEWLINE_TAG_REGEX: Regex = + Regex::new(r"\n?(<.+?>)\n?").expect("Failed to compile newline tag regex"); } pub fn markdown_to_html(text: &str) -> String { diff --git a/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs b/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs index 8456e46..a281f79 100644 --- a/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs +++ b/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs @@ -1,5 +1,5 @@ -use markdown_it::parser::inline::{InlineRule, InlineState}; use markdown_it::parser::block::{BlockRule, BlockState}; +use markdown_it::parser::inline::{InlineRule, InlineState}; use markdown_it::{MarkdownIt, Node, NodeValue, Renderer}; #[derive(Debug)] diff --git a/ankiview/src/inka/infrastructure/markdown/mod.rs b/ankiview/src/inka/infrastructure/markdown/mod.rs index d6ceea7..dc922f8 100644 --- a/ankiview/src/inka/infrastructure/markdown/mod.rs +++ b/ankiview/src/inka/infrastructure/markdown/mod.rs @@ -1,6 +1,6 @@ // Markdown processing module -pub mod mathjax_plugin; -pub mod section_parser; pub mod card_parser; pub mod cloze_converter; pub mod converter; +pub mod mathjax_plugin; +pub mod section_parser; diff --git a/ankiview/src/inka/infrastructure/markdown/section_parser.rs b/ankiview/src/inka/infrastructure/markdown/section_parser.rs index 8c8a8e6..7688c8c 100644 --- a/ankiview/src/inka/infrastructure/markdown/section_parser.rs +++ b/ankiview/src/inka/infrastructure/markdown/section_parser.rs @@ -1,5 +1,5 @@ -use regex::Regex; use lazy_static::lazy_static; +use regex::Regex; pub struct SectionParser { section_regex: Regex, @@ -9,8 +9,8 @@ impl SectionParser { pub fn new() -> Self { // Regex pattern: ^---\n(.+?)^---$ // Multiline and dotall flags - let section_regex = Regex::new(r"(?ms)^---\n(.+?)^---$") - .expect("Failed to compile section regex"); + let section_regex = + Regex::new(r"(?ms)^---\n(.+?)^---$").expect("Failed to compile section regex"); Self { section_regex } } @@ -31,14 +31,12 @@ impl Default for SectionParser { } lazy_static! { - static ref DECK_REGEX: Regex = Regex::new(r"(?m)^Deck:[ \t]*(.+?)$") - .expect("Failed to compile deck regex"); - - static ref TAGS_REGEX: Regex = Regex::new(r"(?m)^Tags:[ \t]*(.+?)$") - .expect("Failed to compile tags regex"); - - static ref NOTE_START_REGEX: Regex = Regex::new(r"(?m)^(?:\n)?^\d+\.") - .expect("Failed to compile note start regex"); + static ref DECK_REGEX: Regex = + Regex::new(r"(?m)^Deck:[ \t]*(.+?)$").expect("Failed to compile deck regex"); + static ref TAGS_REGEX: Regex = + Regex::new(r"(?m)^Tags:[ \t]*(.+?)$").expect("Failed to compile tags regex"); + static ref NOTE_START_REGEX: Regex = + Regex::new(r"(?m)^(?:\n)?^\d+\.").expect("Failed to compile note start regex"); } pub fn extract_deck_name(section: &str) -> Option { diff --git a/ankiview/src/inka/infrastructure/media_handler.rs b/ankiview/src/inka/infrastructure/media_handler.rs index 02dc428..570af18 100644 --- a/ankiview/src/inka/infrastructure/media_handler.rs +++ b/ankiview/src/inka/infrastructure/media_handler.rs @@ -1,5 +1,5 @@ -use regex::Regex; use lazy_static::lazy_static; +use regex::Regex; lazy_static! { // Match markdown images: ![alt](path) @@ -46,6 +46,7 @@ pub fn extract_image_paths(markdown: &str) -> Vec { pub fn copy_media_to_anki( source_path: &std::path::Path, media_dir: &std::path::Path, + force: bool, ) -> anyhow::Result { use anyhow::Context; @@ -57,18 +58,67 @@ pub fn copy_media_to_anki( let dest_path = media_dir.join(filename); - // Skip copying if file already exists in media directory - if !dest_path.exists() { - std::fs::copy(source_path, &dest_path) - .context("Failed to copy media file")?; + // Check if file exists in media directory + if dest_path.exists() { + // Use filecmp equivalent - compare file contents + let files_identical = files_are_identical(source_path, &dest_path) + .context("Failed to compare file contents")?; + + if files_identical { + // Same file already exists - optimization, skip copy + return Ok(filename.to_string()); + } + + // Files have different content + if !force { + // Error on conflict without --force + return Err(anyhow::anyhow!( + "Different file with the same name \"{}\" already exists in Anki Media folder. \ + Use --force to overwrite.", + filename + )); + } + + // force=true: overwrite existing file } + // Copy file (either new or force overwrite) + std::fs::copy(source_path, &dest_path).context("Failed to copy media file")?; + Ok(filename.to_string()) } +/// Compare two files for identical content +fn files_are_identical(path1: &std::path::Path, path2: &std::path::Path) -> anyhow::Result { + use std::io::Read; + + let mut file1 = std::fs::File::open(path1)?; + let mut file2 = std::fs::File::open(path2)?; + + // Quick size check first + let meta1 = file1.metadata()?; + let meta2 = file2.metadata()?; + + if meta1.len() != meta2.len() { + return Ok(false); + } + + // Compare contents byte by byte + let mut buf1 = Vec::new(); + let mut buf2 = Vec::new(); + + file1.read_to_end(&mut buf1)?; + file2.read_to_end(&mut buf2)?; + + Ok(buf1 == buf2) +} + /// Update image paths in HTML to use Anki media filenames /// Takes a mapping of original paths to Anki filenames -pub fn update_media_paths_in_html(html: &str, path_mapping: &std::collections::HashMap) -> String { +pub fn update_media_paths_in_html( + html: &str, + path_mapping: &std::collections::HashMap, +) -> String { let mut result = html.to_string(); // Replace each original path with its Anki filename @@ -164,7 +214,7 @@ HTTPS: ![secure](https://example.com/photo.png) let media_dir = temp_dir.path().join("collection.media"); fs::create_dir(&media_dir).unwrap(); - let filename = copy_media_to_anki(&source_file, &media_dir).unwrap(); + let filename = copy_media_to_anki(&source_file, &media_dir, false).unwrap(); // Should return just the filename assert_eq!(filename, "test_image.png"); @@ -179,28 +229,77 @@ HTTPS: ![secure](https://example.com/photo.png) } #[test] - fn given_existing_file_when_copying_then_skips_duplicate() { + fn given_identical_file_when_copying_without_force_then_skips() { use std::fs; use tempfile::TempDir; let temp_dir = TempDir::new().unwrap(); let source_file = temp_dir.path().join("image.png"); - fs::write(&source_file, b"original").unwrap(); + fs::write(&source_file, b"same content").unwrap(); let media_dir = temp_dir.path().join("collection.media"); fs::create_dir(&media_dir).unwrap(); - // Pre-create the file in media dir + // Pre-create identical file in media dir let existing_file = media_dir.join("image.png"); - fs::write(&existing_file, b"already exists").unwrap(); + fs::write(&existing_file, b"same content").unwrap(); // Copy should succeed and return filename - let filename = copy_media_to_anki(&source_file, &media_dir).unwrap(); + let filename = copy_media_to_anki(&source_file, &media_dir, false).unwrap(); + assert_eq!(filename, "image.png"); + + // Should not overwrite (content stays same but we verify no error) + let content = fs::read(&existing_file).unwrap(); + assert_eq!(content, b"same content"); + } + + #[test] + fn given_different_file_when_copying_without_force_then_errors() { + use std::fs; + use tempfile::TempDir; + + let temp_dir = TempDir::new().unwrap(); + let source_file = temp_dir.path().join("image.png"); + fs::write(&source_file, b"new content").unwrap(); + + let media_dir = temp_dir.path().join("collection.media"); + fs::create_dir(&media_dir).unwrap(); + + // Pre-create different file in media dir + let existing_file = media_dir.join("image.png"); + fs::write(&existing_file, b"old content").unwrap(); + + // Copy should fail with error about conflict + let result = copy_media_to_anki(&source_file, &media_dir, false); + assert!(result.is_err()); + let error_msg = result.unwrap_err().to_string(); + assert!(error_msg.contains("already exists")); + assert!(error_msg.contains("--force")); + } + + #[test] + fn given_different_file_when_copying_with_force_then_overwrites() { + use std::fs; + use tempfile::TempDir; + + let temp_dir = TempDir::new().unwrap(); + let source_file = temp_dir.path().join("image.png"); + fs::write(&source_file, b"new content").unwrap(); + + let media_dir = temp_dir.path().join("collection.media"); + fs::create_dir(&media_dir).unwrap(); + + // Pre-create different file in media dir + let existing_file = media_dir.join("image.png"); + fs::write(&existing_file, b"old content").unwrap(); + + // Copy with force should succeed + let filename = copy_media_to_anki(&source_file, &media_dir, true).unwrap(); assert_eq!(filename, "image.png"); - // Should not overwrite existing file + // Should overwrite with new content let content = fs::read(&existing_file).unwrap(); - assert_eq!(content, b"already exists"); + assert_eq!(content, b"new content"); } #[test] @@ -214,7 +313,7 @@ HTTPS: ![secure](https://example.com/photo.png) let media_dir = temp_dir.path().join("collection.media"); fs::create_dir(&media_dir).unwrap(); - let result = copy_media_to_anki(&nonexistent, &media_dir); + let result = copy_media_to_anki(&nonexistent, &media_dir, false); assert!(result.is_err()); } @@ -233,7 +332,7 @@ HTTPS: ![secure](https://example.com/photo.png) let media_dir = temp_dir.path().join("collection.media"); fs::create_dir(&media_dir).unwrap(); - let filename = copy_media_to_anki(&source_file, &media_dir).unwrap(); + let filename = copy_media_to_anki(&source_file, &media_dir, false).unwrap(); // Should return just filename, not path assert_eq!(filename, "photo.jpg"); diff --git a/ankiview/src/inka/infrastructure/mod.rs b/ankiview/src/inka/infrastructure/mod.rs index 80eb7f9..2cdc1d8 100644 --- a/ankiview/src/inka/infrastructure/mod.rs +++ b/ankiview/src/inka/infrastructure/mod.rs @@ -1,5 +1,5 @@ -pub mod markdown; -pub mod file_writer; -pub mod media_handler; pub mod config; +pub mod file_writer; pub mod hasher; +pub mod markdown; +pub mod media_handler; diff --git a/ankiview/src/inka/mod.rs b/ankiview/src/inka/mod.rs index 3087c17..6cc80f2 100644 --- a/ankiview/src/inka/mod.rs +++ b/ankiview/src/inka/mod.rs @@ -1,4 +1,4 @@ -pub mod domain; pub mod application; -pub mod infrastructure; pub mod cli; +pub mod domain; +pub mod infrastructure; diff --git a/ankiview/src/lib.rs b/ankiview/src/lib.rs index a1a61ba..c065803 100644 --- a/ankiview/src/lib.rs +++ b/ankiview/src/lib.rs @@ -3,9 +3,9 @@ pub mod application; pub mod cli; pub mod domain; pub mod infrastructure; +pub mod inka; pub mod ports; pub mod util; -pub mod inka; use crate::cli::args::{Args, Command}; use anyhow::{Context, Result}; @@ -34,7 +34,12 @@ pub fn run(args: Args) -> Result<()> { Command::View { note_id, json } => handle_view_command(note_id, json, collection_path), Command::Delete { note_id } => handle_delete_command(note_id, collection_path), Command::List { search } => handle_list_command(search.as_deref(), collection_path), - Command::Collect { path, recursive } => handle_collect_command(path, recursive, collection_path), + Command::Collect { + path, + recursive, + force, + ignore_errors, + } => handle_collect_command(path, recursive, force, ignore_errors, collection_path), } } @@ -115,13 +120,22 @@ fn handle_list_command(search_query: Option<&str>, collection_path: PathBuf) -> Ok(()) } -fn handle_collect_command(path: PathBuf, recursive: bool, collection_path: PathBuf) -> Result<()> { +fn handle_collect_command( + path: PathBuf, + recursive: bool, + force: bool, + ignore_errors: bool, + collection_path: PathBuf, +) -> Result<()> { use crate::inka::application::card_collector::CardCollector; - info!(?path, recursive, "Collecting markdown cards"); + info!( + ?path, + recursive, force, ignore_errors, "Collecting markdown cards" + ); - // Initialize collector - let mut collector = CardCollector::new(&collection_path)?; + // Initialize collector with force flag + let mut collector = CardCollector::new(&collection_path, force)?; // Process based on path type let total_cards = if path.is_file() { @@ -137,7 +151,9 @@ fn handle_collect_command(path: PathBuf, recursive: bool, collection_path: PathB for entry in std::fs::read_dir(&path)? { let entry = entry?; let entry_path = entry.path(); - if entry_path.is_file() && entry_path.extension().and_then(|s| s.to_str()) == Some("md") { + if entry_path.is_file() + && entry_path.extension().and_then(|s| s.to_str()) == Some("md") + { count += collector.process_file(&entry_path)?; } } diff --git a/ankiview/tests/test_collect.rs b/ankiview/tests/test_collect.rs index ba44911..d702b0e 100644 --- a/ankiview/tests/test_collect.rs +++ b/ankiview/tests/test_collect.rs @@ -26,7 +26,8 @@ Deck: IntegrationTest // Act let mut collector = ankiview::inka::application::card_collector::CardCollector::new( - &test_collection.collection_path + &test_collection.collection_path, + false, )?; let count = collector.process_file(&markdown_path)?; @@ -35,7 +36,10 @@ Deck: IntegrationTest // Verify IDs were injected let updated_content = fs::read_to_string(&markdown_path)?; - assert!(updated_content.contains(" +1. This is an image test! +> ![mungoggo](munggoggo.png) +> answer + +--- + diff --git a/ankiview/examples/image-test.md.ori b/ankiview/examples/image-test.md.ori new file mode 100644 index 0000000..714ec59 --- /dev/null +++ b/ankiview/examples/image-test.md.ori @@ -0,0 +1,15 @@ +# image test +1. Without --force: Create a card with an image, modify the image file content, then run collect + again - should error with "Different file with the same name ... Use --force to overwrite." +2. With --force: Same scenario above but with --force flag - should successfully overwrite the + media file +3. Identical media file: Run collect twice with same image file - should skip copy operation in + both modes (no error) + +--- +1. This is an image test! +> ![mungoggo](munggoggo.png) +> answer + +--- + diff --git a/ankiview/examples/munggoggo.png b/ankiview/examples/munggoggo.png new file mode 100644 index 0000000000000000000000000000000000000000..f24ea2b55e3bed0dbc86a2946d7f525ba25f0615 GIT binary patch literal 1794 zcmeAS@N?(olHy`uVBq!ia0y~yV9W)w^*ES;vet#R2kF#(11d%E^Uojv`PLE+fS z*YQDbcX#Pr-PY8|waf2#M8NVB%|&buKG(`N`@X7Y*O;D_o_kw(N9D6c1_3@&3pSZt zxYFSWwz~N&GlSv0EB`0SKB(<||ATLXcJ{XjIm3SA-xZAKyO*v%`WfPa?~l)2|F}2c z@FGW$3m8AK=oUE7yZ-mi^?zGq_!*LHR^ENFb6JqW)p=f-rzdCmuiBP-o_Tk1!TjcE zfrRYKv*RxC{?<%lXsQ%oUZBr+<%gBr<(m#E3ExEKJOYM_!H$c{a~DWgu^wSy`K-X^ z5HGXhW2w!tt8z1JS$_xbU}cO7y1Rz$?ge&bHia4Nh77+RIrOf7Y`Ah8zoY(fgN#*< zt8)9RMJ9`Jv#z+iMfIxR*0-EF#b&YGE9NJh&D_4`ZOHFe;j7*-tgr8Loxl9x4?Y8b z^YFtKa}rG?>UYij^x)U);-_;`P6>HzRS*4rpP%Dk?kd@>U;4Ig*qWJ@%gET!*T`zf zAR(*BmT+cjlhlHs+XuJ)UCMJl*{11sp!fR-Zo>p_CIN<;*$j)7&ze2+a^!tb`Re1L zZ^AMr3Efucj@hzUGOc9e57Zhgn-6*r)eH-RgkN z-4!dZ_|M5(X0zkpub6G|&t`cwhE!`U)$PxZ`q!t;X3jAG`Q-JV(>V`3aAOw8U}&1B z>39G4_Ku4To~EXEOr&)Zm=7|1;5$_<{r;Po~FRy z5ai2i4%BNVz|(MRz1efYFUyx}eS282Y4$ { + debug!("Copied media file: {} -> {}", image_path, filename); + path_mapping.insert(image_path.clone(), filename); + } + Err(e) => { + return Err(e) + .with_context(|| format!("Failed to copy media file '{}'", image_path)); + } + } + } + // Parse sections let parser = section_parser::SectionParser::new(); let sections = parser.parse(&content); @@ -81,8 +112,14 @@ impl CardCollector { let (front_md, back_md) = card_parser::parse_basic_card_fields(¬e_str)?; // Convert to HTML - let front_html = converter::markdown_to_html(&front_md); - let back_html = converter::markdown_to_html(&back_md); + let mut front_html = converter::markdown_to_html(&front_md); + let mut back_html = converter::markdown_to_html(&back_md); + + // Update media paths in HTML + front_html = + media_handler::update_media_paths_in_html(&front_html, &path_mapping); + back_html = + media_handler::update_media_paths_in_html(&back_html, &path_mapping); // Create or update note if let Some(id) = existing_id { @@ -110,7 +147,11 @@ impl CardCollector { let text_transformed = crate::inka::infrastructure::markdown::cloze_converter::convert_cloze_syntax(&text_md); // Convert to HTML - let text_html = converter::markdown_to_html(&text_transformed); + let mut text_html = converter::markdown_to_html(&text_transformed); + + // Update media paths in HTML + text_html = + media_handler::update_media_paths_in_html(&text_html, &path_mapping); // Create or update note if let Some(id) = existing_id { @@ -177,7 +218,7 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); let fixture_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .join("tests/fixtures/test_collection/collection.anki2"); + .join("tests/fixtures/test_collection/User 1/collection.anki2"); let collection_path = temp_dir.path().join("collection.anki2"); std::fs::copy(&fixture_path, &collection_path).unwrap(); @@ -344,4 +385,43 @@ Deck: Test // Should process both markdown files assert_eq!(count, 2); } + + #[test] + fn given_markdown_with_image_when_processing_then_copies_media_file() { + let (temp_dir, collection_path, media_dir) = create_test_collection(); + + // Create a test image file + let images_dir = temp_dir.path().join("images"); + fs::create_dir(&images_dir).unwrap(); + let source_image = images_dir.join("test_photo.png"); + fs::write(&source_image, b"fake png data").unwrap(); + + // Create markdown with image reference + let markdown_path = temp_dir.path().join("with_image.md"); + let markdown_content = r#"--- +Deck: TestDeck + +1. What is this image? +> ![test image](images/test_photo.png) +> This is a test +---"#; + fs::write(&markdown_path, markdown_content).unwrap(); + + // Process the file + let mut collector = CardCollector::new(&collection_path, false).unwrap(); + let count = collector.process_file(&markdown_path).unwrap(); + + assert_eq!(count, 1); + + // Verify image was copied to media directory + let copied_image = media_dir.join("test_photo.png"); + assert!( + copied_image.exists(), + "Image should be copied to media directory" + ); + + // Verify image content is correct + let copied_content = fs::read(&copied_image).unwrap(); + assert_eq!(copied_content, b"fake png data"); + } } diff --git a/ankiview/tests/fixtures/README.md b/ankiview/tests/fixtures/README.md index 832f3e0..85fc4e4 100644 --- a/ankiview/tests/fixtures/README.md +++ b/ankiview/tests/fixtures/README.md @@ -3,7 +3,7 @@ ## Golden Test Dataset **Source**: `/Users/Q187392/dev/s/private/ankiview/data/testuser/` -**Fixture Location**: `test_collection/` +**Fixture Location**: `test_collection/User 1/` **IMPORTANT**: The golden dataset in the source location is READ-ONLY. Never modify it. All tests work with copies. diff --git a/ankiview/tests/fixtures/copy_golden_dataset.sh b/ankiview/tests/fixtures/copy_golden_dataset.sh index efc4b56..2a5413e 100755 --- a/ankiview/tests/fixtures/copy_golden_dataset.sh +++ b/ankiview/tests/fixtures/copy_golden_dataset.sh @@ -5,7 +5,7 @@ set -euo pipefail GOLDEN_SOURCE="/Users/Q187392/dev/s/private/ankiview/data/testuser" -FIXTURE_TARGET="ankiview/tests/fixtures/test_collection" +FIXTURE_TARGET="ankiview/tests/fixtures/test_collection/User 1" echo "Copying golden dataset to test fixtures..." diff --git a/ankiview/tests/fixtures/gh_activity.png b/ankiview/tests/fixtures/gh_activity.png new file mode 100644 index 0000000000000000000000000000000000000000..f24ea2b55e3bed0dbc86a2946d7f525ba25f0615 GIT binary patch literal 1794 zcmeAS@N?(olHy`uVBq!ia0y~yV9W)w^*ES;vet#R2kF#(11d%E^Uojv`PLE+fS z*YQDbcX#Pr-PY8|waf2#M8NVB%|&buKG(`N`@X7Y*O;D_o_kw(N9D6c1_3@&3pSZt zxYFSWwz~N&GlSv0EB`0SKB(<||ATLXcJ{XjIm3SA-xZAKyO*v%`WfPa?~l)2|F}2c z@FGW$3m8AK=oUE7yZ-mi^?zGq_!*LHR^ENFb6JqW)p=f-rzdCmuiBP-o_Tk1!TjcE zfrRYKv*RxC{?<%lXsQ%oUZBr+<%gBr<(m#E3ExEKJOYM_!H$c{a~DWgu^wSy`K-X^ z5HGXhW2w!tt8z1JS$_xbU}cO7y1Rz$?ge&bHia4Nh77+RIrOf7Y`Ah8zoY(fgN#*< zt8)9RMJ9`Jv#z+iMfIxR*0-EF#b&YGE9NJh&D_4`ZOHFe;j7*-tgr8Loxl9x4?Y8b z^YFtKa}rG?>UYij^x)U);-_;`P6>HzRS*4rpP%Dk?kd@>U;4Ig*qWJ@%gET!*T`zf zAR(*BmT+cjlhlHs+XuJ)UCMJl*{11sp!fR-Zo>p_CIN<;*$j)7&ze2+a^!tb`Re1L zZ^AMr3Efucj@hzUGOc9e57Zhgn-6*r)eH-RgkN z-4!dZ_|M5(X0zkpub6G|&t`cwhE!`U)$PxZ`q!t;X3jAG`Q-JV(>V`3aAOw8U}&1B z>39G4_Ku4To~EXEOr&)Zm=7|1;5$_<{r;Po~FRy z5ai2i4%BNVz|(MRz1efYFUyx}eS282Y4$f=&Ml)tG`Mi+w=2H)}FtPrT*qLF5L6n8S)<)l71d5QJ6$Y zrow7%ZOtwraeXitpStih`DPfnD%}z^8TXJ63x_$e$?t4wv~JHX~czO#>tit6irP1hfUOWaqY_x^#mcN5|egP50#E27-6 z7B4kgOiZk1DmY)N(cCru{QQ;Bvk|5m*A?Z2%*;C_j}N8h=I8s@$MgH~Z@iLw|atuSs8D-jIX5k-(m5{@bK`tFUx%dbckC;C=|t= zw2QOj6}(Rgg5u)0vGGV)U(?=^|3Ki#b?1&!pyq=Ia=esTHxPq0HA3fZc6N4=$F474 zaPsl-o!p)q?C1#k_t&K^m^ejab#*oVt+;bSIQfl)t}exMVSfHNYHI4Gw$k$Q^2<9U z^jy=^)3%O{O-)Y&IjBew{QUfLf2(V1DBXAS@@~4F>^(KP{QdodR@Zwgt=?GX{_q=q z5tS5UETh=7l(WY>^Pjl1J$L5rFA7qW*42q5ynU-4_4@VIat3CHwX?Gp6BAQJe*Ucz zvsRe~@A;1&s^1$NUF~M;?DP)!S00elAtIanE+kKdGo`#KFVf91Hn2!$eGWI@Ojj7^ zzmz*W`mGATV%*4!)?9TsO)&U;uakUrf2CJ)-k4AB!GiZM=DG&^Q?K?~X|KJ2aml2AvcjeBJwJNwTQD#(s=tnkD&5jSA|Du37%AO>N8Z$$)IBf|b$PN9Cn7GM zYGY$FLCPpL#-! znb#n1(pp&=k58ch1HlhpEk!h0W35g}PygWZxbjlF$O;TiWNB`3cz^S?+Q$`B7TU(B5xth5m-e<`mlv|8v3X;!aG}wY7<#B=NB65X$dl{j)UE1TZUj z$ig4}c5ePVa&PsmbIYQSY`1YNHNQMwtt3);ibNvgD~vzdK5 z;JJz(9v`P(NUU;IR#v)C7CoF3HdSvlx9fxZYnRIR#A^4%japAnZ??tvAEAq6P&P-cYR`y%A)~1m6pVijPzpDgr&G9tSg>WlKn(lp?!o{&3&63Wm-}*WH zwxqVUc6*fD+SS!H7a9O<#Y0jWJ|tHE;9$O{re=+&d&%n$AH=0ymN0f?Bb8iT1(i*U z_1?#BkMde?EHvNS*>DZ%yg=b-o0w=6X=X;HTSXH-d-klx_w0p6I|pumfB$MIooQQD zgs!=hQ>+s`EiLWIC=Cq_l10`JVfZK})zl}FNzy&ylO#b1aSB6@ckB&W%`j3%&Pd|m z=4NT-#t_+DxO;iQ_I`dcDdpt|U0vOqKGOd=I*xl-{#or$@C&;7_bYo}%0^c=fotYN zLqktQu8fRKf%o5^>x^q_YoYkb#KgpckA`Cy#rrlU-;Xoj5);!;@+o$4abXn{6hz*L z^=ULOVY_`Beuny%6=Jk%#k)}N*G+kdllk%rTZ$V$Ny0| z3O`gZ?!ejGEUWra!z&=5z4a@Q5Ie=ux#b_HQDb=^;Y|?{>aAk8&!H`tiJ{5KG)AF& zyG~9{>*d7LX^Dx6<-00;(efxBUS6FDN=ix>R+G;}9L|z{ z8_gt|m04MNdB4)7$Idn;9QZQS)q+MZ=y=-1+l9o{@2eyAFNV9iuu{%bsX9 zzyh3pettunwU)hPB-g3v=z6y^dq_f0CVHq@Al2<##C3PRu39 zX!rXQZ>O6YGq!EIxw^Jv-6X|D)F1u+zQS%BudZR(sv}fXSC_=e$vFXqGzooUb7zO| z?EThz^}&=o8(+!{l$>9~gws%5UR&ef;tmcC#f^!MrfxRPz}gj-k}|2->V^Zh)6|S* z_kbyKv%;V%Yuk6ju)DvX@VmppXYch@-lBU^O!U#_FbH2mTcL=P=K4a#ym|QDxm9(V zlY@ifV5&r~4*K#`PfyYG$HT1|t?Eq1g{@k}nc3Nlqn&xiY@LBxZz%kAaT^B*-j~@v zrqy+I1&*_Ixs2jY1%!lz3H32w*Xzw;_>8w+p1y`QnJNNRl5n6oOA9?n^1rVFby$U* zFu^KKn;AwiJ6Nj>`E6_Am0L0h6;j@~5!Yd5V-x0q=O7>=lC(3|lqlnK@E}EU?bokK z0D8Pebvbfx>+QcpRX+KYvhc}MYnS}VR)zWf&fTxo*jg97hQ*Hxb@G)(I60N4#Ri9l z3IMg7zO@Yv3CZy&b+0sQ)qE>r7o=iT^zdyFK^YrAKY8e*ofZ!F4xDA94^EkL!$nj+ z+{n<-&;yu6?>)Arlbdf5{6?s!@+rO6c9L74DK`v$X@^q0*!i@(TA9F_v+(_cbFU%_ zHT9Ti@zvj$?F;opELygR6warCNg)e_({FC69PGGXjpZu18WxF6OiWOJaxUbT^4i0Y z-Rqn06}e-+pMBR=e&^>Tee)lw*RioZR^+;wqL$rlf!H+Z`6H8)lU$kPhPqXz((W2x zKD>yIiQ&k8^@p&iVDk1sQ8J8``wKo>cMgKbZ)<312*gPA3Sj1L&QxWaX5oH@GVng0 z_Z+7fqdSmC{rK@?tYrWE-(R29zMO||#k}4HSvJCPXSr`pKin-Z9~e+`7ucDt=lutb zXSFhxm3pz``t|Ep14-NkIXQAWrM5~iPF5?iRIJ*fcD|I+`F!8HCD4;{$2jb^TvBWv zE-xD!X6M@oX72llcbS>{7<|2Z8)LcHESl2>u`JS2*MyMFIdsHI#M9f~YWv{;$K1cD z1O){>DATW4t;AEo0@Nyqh;Pv~GE$qaGSiZ|I)5HbT=a;V7s<`dO)n~E)=d;Zw!O8* z>nVDd(%jm*@c3miFaa(jwYy(0q^P-(xVX4h%N?OUpPZFSEtu;6`$kC>?qHkTV|2yVI3Nqk`8YcJ zdwXr)#z#iTu8d(ww}>DUt%o)$rcDd86Z?En#3_pxf%_JDwe!&-7?H=8L1z z+qZ8mue3QB16zLm{26vt_4+k2-cP-M?(mW=TZ1oC($ga^1h~0X9j_*ng`bU>Yg_@G zi+VxssQu1dNl7WvK1v?n`*;0}z`8$7c5`7CNql!oE+9no-6Fs|=9-@4GaD`M+D|ju z7`OOao3!2gP*-;zZXPHHt%dvpfxNuD0E4}p9EPSXPc@}Qx{jJ4c{_|X0Zc;VeHdkV z5_(sE2KlWvCJO1Z47s_~Q0CXYU?k{NlGe_^lj}d(Us3(^;y|;kPTY z0Q8-t>s?;QMrE8pNv1ohK6>;hCzRtE%V-9dJj(6)^K`tQ#N^~E?Ls)WWyn;haz;IO z7no~pYHvcZ2%aIMD9S&7ma0P`1ueP=nq?yfmTg~GRNT2PYES)97KM=#3Rg>`vXB?P zuA@WVJ;G86NO1a-*PY`k&_zav{DG%w2&$}6vmwo$YTTZoYHK?!#hGFSS9W$rwh<1h zs)TyM@^tXuaLhxoba(IGUD_ug>-}2Y&|u@njKr)y52_-DpAN8C@Jyay%k%kjrOdlO zT&x0Fkrg`3>gh$i_dPE+urJFmS4Qd%^_%g^7-#gXlI z%Khzw^9?*IdTF1!0*UJ!R0tG+NlB%K|GvpPEqqq}v9jX28zl}?SF0cmo)^a9trbT`j*Q5`&<42Da8=Mz4@yJ=CBekEqxpg%T3uA7n4bXKj zbG-pD&@bNe`t@r;rLU-`oqbTpG3{5#P7_o@Vxl1Crm=H#sB7*T>IHOK&VSMpOdM~L zlGvln3D}^IrG`T_TglL-6Y0CVi?Dn&sj-SIm%yzuX}bBnS%Rr4Hz@%I+saXd$s>Jz zi%M(?J_;>;folk4*aeiKm)bd~3pKxl@42l;6bx#Usi<06TR%PG=Gfb+YE1z4oCw72 zO~3u?Nb@^SJ|<~o%RCYM6_dlF7M0Mq{DAPlpQA!(Vsf&7C|x-Hzn(1wa!law&tbhJ zC2am}XjN_o!JTiIkn`_ke2oOhX&zZUduBQ3jkFECSX~*{+_Lma{n#y>O^2;x`J`1%I@`aP`%@KF^#=8$w5+H^pRhl zZt*vjwJ$$VGckF0Q*WrO(RBsy)WOaH)1CuK&LYJM6E>1d#Q~|KtsQWCNP@}orl{y+ z*TMFTwuD3ftFs&5P)~#PuUdP8lC!h5evW2WrNx)jS9|W7HkGx(Pp!2X?#sC!EKm~! zn6vuB2LqEv4rDU$P)%Oa@pV$ZPFjrT|GSsxeDO}KZG#kaq?DSnCzGOzIwH{)`q~sy`1mv#_cA$oJUV5L%Uky z@LRW`@VQLQ*Vu30UP+6Mi&MeGWK-ABPz+4$NTuxA2XR4FQ88rcO669&(PuBgQ@sak z^h6feH*szrqDeq7dVGZTwYLWv5+LE)W2*CtiYCvs`e&CL)f0{4Ky!{_F(d0a1gML9 zWr{Knyh+X|7R@B(rJZ_8h=nL=Ym+m1eY`#%TxUCur9^a@)wvv@4p*wlWf|++M-N_~ zq0R|igt6B6jken(rQjkz5Z-ckmV>BvUc1sj1m( zoZtLXCW!H#etWSsn}l9KZp<0imm`FOm8$|7&dn(H-vczbIG_7Ba&MP{oSd9bpD7m0 z1ukq?7w9uR@qHHMZkpLLckZg8SlgrG;%@t2{=5CRnO6W?K|!IPT=rbfW+?6c`T2R8 zX?Eu_3Nt#Q-eD#IPUeSDEQgMcj@N=VK@g4u2uLT_lQ>YTAWG2dHyD3#(x@6cOXvb})L^M3V*%K-_J zfs9!)>D$*kyu1n@h}anNk77|wR^FVWy}kI@=P@^Av%ZN+FvYrmot^#R-9~5jO6S)5 z(b<6Hlz3Jmdt6>Y!E3(4NI|;^_M!52-{QMW7aRft@vlyn!-s100(H6+QEqN-X=~!3 zSSnOoq`yh&T<%i@LFcI@ytuB+oq>e8b5s!ZlYB&WvMpPL_c^XSoP*vP-r z`9eg?n^*-pXs3b80rq+{OjQn0fV(XX0SvA4CT3@wIqKL6r+?K9Hc((+cHl6-}E`%oVi3N?}~tF_>N<;yvVeuDAvvQl#-2N$Y*qXCT;SiIHa7aX^xKt$8rJ!T$8Xr zE3ET_GHK~1jcN!zu~FkhwiT+U?%n@4RKkSr{;vq!TjoD`4rbMmp5jN`PJ&CCcm*0? z0q|9Nw_>eqnUm$P8^02=fBdkh@&0RLv(W`tDd=o&Q=1o=m6fHzqgYz=d#=$nSj2hZ z9aL5ZcI^)kr1#snBffyHgzKxDf{hx|%9IQ}d{S>u^Rs<>JRjeCIg~C<;*OgLz&>eP zn%dbFr^S|L*7hLA=>eE&B{4CiQ&FnkzcM}H?v{M~?xx5JL~!R@Z8#o2z8;jG&F-U3 z3nwQ{5R5g+nI#qf{`s@&q@RBa0Bl%zxbVyDk&%&uj!@#Dor>uOec$n?-3!ex`@hvb zOMeoV&yt2a3J*4n{6=i}i1Yl%?pMxgYHgb!c}ZTE_7V9v(|Q^DG^$`gxQ%TU^l!jh z<+k5yIDXnwgh(p!A`k`nDseWF3NLmSG8dqIg?@c*&jx0~<{Ysl*>!joPuq{`(q`8T1$z zzR1NrIjeU) z#mVIgj`d(mS0el8+FGt`Dspgw_Ks0J{M{sxrwh83 zM)bG=__dz8x_Z&TY@s?`Mp9Bi;llGQP6>&0zd!xAhf@3+8XD@*m9yjddsYbqXFI@t zi62u^QYJv#n}8RUhb!y-$7+tp{9EsAy+im+Re8CU?la4u(E4rLYZ{{PN$7KQq;ZI+ zp&U!zzrQ^>kn#5Ir01e!qMgz;_r`UJkheW?D?L8&uZVbjI1m^ z)eBx89$PgvqIrOGQ3jx{c=*wBJoB|Gl!?AXFm_A_4-)ud85)DaJ^nP*i9;zHIR~%zke&{m~eJ24fBF(R(`T7Z|`im zGknEBZA2DOwW5k8cW-m4MwV35v~9{qkvfJDdq-;$({< z{7L&JDkhea9-OAl#LO(nr~>RYEv#YO6HFx)n~`^_FSft3aWPYtsFi`RtPBG`uzPe= z>%d1X2CXDcl~9E(hWWkC$l&1$!Z`mql6jAQlZ%~Q5m|t73uBf8G~Z0$$%zR?SE4oMGjF`+Z;5bt=Jezub7DewwnXoC%q&%kv_cq( zV5zm1ygU{kw}|0Q^gRJtYnjeqWHC)9eL)3xu6#v7sWVp(C`F|$Q6;J>W6zAvW z*vshM`HJU2mazp&v6Ja~c|#!PgVTKJIBUq7Ca~(gvW#l5mLgdGW`vAlqD_aUxyJ_V zs;a67WmPSC08>*Qp_VQ{A^LrzXK)dRxSSDOIaMA&;VO$Sccp#){(RVcQqG6JS0i(> zgdJl-xAb9;B{a)VTybDvU>#Hxk=fwT%M(zM{?|ck;G5ifl7SyX*TNVaE2&D(`uVlGYG#< zYcp6#6v&~SE5^FEp&*e+`(M5~yePv(V@@MO!$_a!$Wo0=$>i`W3OT@#6<{VTHPnN? z9XvY6v3eEQUhlFjzqz!;+Q|HW9We3kC_c``M4|d!i6}?M^t7}w7$ydbjJRcujcJ!# zW>=KIi+=w6S?7Os>E)IMBStExIi!JKL*&`WJM_z2gEpMU#>N)(y;kqNW&-o+v?r)+ z0s(<(BWo~555QISlBGthZ@09xTC&N97>zQRDcfM7c*qo(zD+)C$G@@kl@%zqSBqNoUK8lt?jY&Dx}}GW z7Dw7uG~1ZM`vd$F49F45trthe?KhH5OiacC({DwiYj0(;-oZfU=DmRX9)DI<)j;g7 zmrN|I6#4IK{3U>oSJ-;kfByazjgk;^{h^o@_y8MykYE(!e5eA%WX53sz{Z9%OqQ;5 zzz5ubl8tQtOG15GJ?F)!@bAhCZK2%kGYV)VTVJ0@3K{RSqk--fB zyapfds&t#sOXzh8Q-kqn>d8VthlNlAGJM-7lY|td5l+h3v+d(W@%Hh8Wa9`Rmo-|C z=KO*JoVa@gc9vkr`W$T=HkJJ+Aom>~AD`I%B|Kb~11(~7KZ0&KplFFeAd0!ms25F5 zP1&WS-u2fi&{?4Hp}K;M-Vi zl}&WG|NU)^n$5=V-!WzccRK>A0v`n(D1cW_ zm_}`$U_I{K-Y7)FKvF5D@$jK4+{1CAC4yg|J1DhN{vp$^FbXyMaQ8n~4-XE|%W0+> z@X1-&d*g2W(oYG)fs+Dbed84~NKqm!k&%R2gq3WluDkB*@vP!_;C{DIQ@;VFHScwV zBvAT(Xs3d|WmN9M?Hkg^D?*gtq>vU!{x@{%9SA~+fNjeznA4`<`bna#;R4ehwZ#_S z$l&1M*+uB^WEZdRc7V-j`Qsf~*fpy5p!zk?G9Liy9GMXk$#Kki(v}yLE^+4$>34gG z1h8{(DE+?L{q;+Q^I-XVpsAW&~UA55s*`==%+D*@+}qoks8rJ#kue}=pDTI=iR zPdLCJeacOYb|L)dF-ke!UV+-x&~!In-SNepy=Fs2Q3SyZN;|KI|D?K-<&GBIfeM$D2jg7@GJYr%SGsig5>Y1F{Itb#)-k8!3$&*1?7 zd-Y`4PBOWc+lc(!F`}<~q2Dt4{nCpzr6xuPU(YjZKz%+**29 zQt}VdKnT(Um5*OI(soZxB^_z@zQ4)!|dqj*s~0r%dD}vE@Dg3*@q3uRD@ljcDq|rVdr6W^&tv~oxjjgu29$Gb4{gi)T?g@ylJVGbBe*K#< zeHrulZ0FO7p@AkiX#%X@VL+;aemUt7ya7{P4qT`4(Vp4akF${!DcASj1Lddv^8UY* z$&L=J?}Tbqrp+jqRm`4FsL#IB!>#NsSp$fJ;HEfhLuhEcS;-?096@wEi9qseB_VM6 z)%-Y_n-G4IljWD_A01U$929k!Hpft#9xcs#^67;YoIqD^Z+MP*yP5n7box|p`KXLG z^N_GCNe>=?h>3`CX5bQncoU#Ur85@>y!iX~uLAykik~2RO*Fb%O68LMz#T22yruhv zOsg}$j)&Z1q^0yS|5VRof=yJEX60RpMaJSs58|uqM6XvI1ir;Eh!Ta$#|q+%o83*R(CXO>Atora_AzI_&WCXA#LD-jQ$K4oLE2JMTqzOHVn zxXQ=JXCf#FtLQLL5V)7C1}kP6F?T(N+OLsJJAM6W+08@p_Pob=qBcYHswrWkJUcr( z?ri~>jnR|I0MF*c$GS$#VP>xHj~o}=)y2L5VnX}GSmZk3&g0f`x+vV+T3OWHdQWWk z4-b8vUKBqgqK%J_m;2R#yK zxS1k$3A3)wl_Rd8#6-V>Wsg$$Nj5F8r(yB7)TwK;!l^JMTeZ-5-6$T~c1&B`^VQ)~%PvJKe02AXKcs`(F9v z9y*XE#r+f!+aSPc>U;QjDAEf~*2v!&a#_1?9*1$*t^fY7Cq(%C9AQ2OQM>3@ zN8g@pE|yUg;tpb$XF=zAU4()drk7WOHAO7|+R|qJ^2udSHR(q@c)%{#yY9NJKA1a0 z)z?=nEQO}eEi7b#t9|2Q1cDU2hG{udQ;!akiAhNdK}Ple?jCmi>$h*$NBu^!q#2Jn zV{q5s0x;2S|TDMm$=7I zK1G9k-2Y-=67q?!YqCkH3OEC=i4d`qV!eMTgzEw58h7}9NFcnJM);c$DogV(-AUl$e#Yes^Pkm$MKjW7Jv;m_4MGz{7#o?!~G4Q~N5(EsF! zId}@{)X(D&H8nTU;ikXUSiXONCk6TQBT?oTM07U3H@`0~W~0M`wGpTSP+7>`%kotd zIhCtDahWXvcCJD%nLZiw)YK%y6W7qvqAN57p(t3Nd+EN{dyVuCM1SA}ArX=8+wJ<| z;supYn!JB6_AgyTB#+Vb z3;^(m{l{pCp2OSt@)QUp81P>=427KL?rKo;;6c|@yMJlIaclLmIg zB#5*pmKGLuEqEkjK)VIY5)-abnh~EiOX^G}1D_^R+v2BABMCiv2uRjxUXpX8=rPPr z{FyGqafLmWgCCq{eJj)pbT8Wk$?&#j232E2O*}DQ$ktwyPa)o}82v4?mv81mR6voKf2kFV)|MmkEg!*EapBQ5N+yrG< z6}oo1NtGBtIe|(a#{h|spc@=;(mK@Y5~3c2k_n3sHWTarCLOgY&f0}oBRz}PN>7aa zcj5)oqK*U%+rLoGiBQ(UO)TXeDCaSKkYlUeO zV{jRr9S6pnFr`k0Y>a86Y=Qi%#->7P3V&>DtfU$KLTzRybD|_SA&LR)gP2~65DecZ zJjBE@A5H(0sdK_!H;hv@Jk&1-5+$jGh>Db|!r2yC@n9DVN^9)9@S8C1`{5JIXx zXxsxs8ILFtM~s`mtbGpF+Y4mmE=~{o;rDyAkbO2|+eKzaL`L3!HajYFrZIN;E<7AR z>)7MPiwwho(m%N;3-gdePy0YPHMqEFwk4UKkzt{Tsen0qyxN~w2W|&LeJ51o{z>!{ zn74`FT$|wmmNjP^wX3PBwiCQV@Ze;sZwuRv-sI%OtDUpe9bH;7*H}0y7pORWU}$(f zQ_wPO`g%)6MTJuOf5824GylCZ1K$?)L=p`4K*PA(O3 znnV7+m?F}5a}(+@u+d=y8_&=}v%S0?aAPz~0>F!1JW~D`P@2u?f3g9BqF~TMn@%U@8g3 z{4?uN_4L7Yw$YufEp>ob&&F?O0AY`5U||3Y@`jWbt9@n{&Z(u96+6rtzOa+=BC{ZC zQRo&KkW~8q9PN_2nCTj%A&~&o>0?8bLd}a(o{MRytLH-!Vut$`#Ska!68Ll-a{oXa zs*#RsXfrpgwos}#V;@s~`t+&$)#XLP7hV||7R#q!j{MABI=k}Y_EC^f5aeD5wz>|< zX>P8T$Bx#J*A7~F5ZSFIdr@#G);Y;>_bxo&QoS#-anP@Y0J*eh>pOr!iZiMZ>Fb^& z28gL2!erxg8U~}jUMxT`&S$^mDG^h$u*90J0E^p4#shiubE5m6Z)UvoWwGgb_&cEH zS3`q?3T9a#7rb$r|M>Kx@W?sn7^0!US)*ucZO`7(G3NJmCZ_1|r^!C%%$r%+*>Q(j znwrYDze6vR;=mepgY5MA?nf3@aZynYaHDR6ax0%o))cUhj_M@*KYDNoz*wEn^fPkH zc`7R*WF&XwpDvexTwZ>DdZ^16Jg;wwoDVySabbu~a=)OEBL7?mDB&r@70tBUOrAU` z!zG>_f$Xyq10Kuyy*j($SUi97D>KM*Nq!Q8t&Q=?X|f3Dj4?ROPZNl|WXJqi)3jF$ zz^i8#TV_@;hEBS)*e)Bq-quP-OPjY9R|C^Xk3Tsz^XQ3>1LBlOLBqcq|tJ zfi8hhSyfe4vx~(v2rmi5yR%1d_7r!nNk}KQV;R-iUhC<|5VXXbv}9(LgiPM3G#42* zrP?sP$=+z`eH2uDKhA5tdjjboml9Xp^pf3kT4rvS?X)U9^(@hR5FiKYN9-$jcN)C~ zVr{j~3!l?LF;DG3G$#o-ET?7Lnxf&=7x-Hb;iV!ltrLHj^-P+4dJz)~Y~yckwU^8j z;yO{4>*yQ?CqvU&2qcL9PxZp|I~uw9_K~q_$4u!6f)@bM8;On7x5{c-zKwLZ5^3WoZlXW1E1MFs(rTNwXP< zzlGs4g+0h3fipNTK-6sqAUnlvY2l ziU%?iOx7k(@$RR?{*UfFk_8xSr@c4)*C$f)Ff(nwls;bfTw7l+PQ0R}LtJ2U^66{7 zsi#>tM&prub2vvg*`viq&s`RqEHXmEYrp@s3j{RK1}!a(nrHEuwY>8_`UT2I;%9I< z)(t>DcyE7ogF_p3nxdEXKr%9?-9TsD8q*UJg8bxxP7OV@jCG@MP+ebNkNA!NB%##W z%=hGD4u3XGg1;~Au%q{()Rr3h08@;qXq9V7wH52+yOa~&uUILLf@}VH5At^94i{O( z<@5LJKrjh*@Pu~B8Tbo{&HN80E|&=`3v^)w>^*aG;%lJ#Y~pzD6jmq%7FksMK>5~w zRCo9VIzeaiZEz){$AE|gUVMU)$POXkYgo9xO<+~YhGj~>r0o>w%!d0uB%e1ER$niA zsz`KEeEVxh5b+1c@3fh>x*rs&mFAJj2P?rk4f0{O40EQ&a`)-!DeRCDmTX3hZREIb ze|3LIl3hLkq&dk;;zLT&+t zzzmb9lV8hLxS54r)}oa@;hIak}xR5JhkcyN47cYbA?)J9UC z1&S79b7!Fd-DA0x(%9B_k{(3E|50$KHRsd@#DfGu(utwq>glP%7W_>^lPy?MUT3&c zTU;U9#=e-K9%W7$etG+t2!pd1sN7PP-WUA(P2an0s-cBA(J??;?b!7d_1c9%lD8s- z+pgfxA%T$_g2n#}qF2ZVF}D;^uMU7iz1cfDQVzezfpJK+`m95TL|$xuxfz^#-XC&e z2Up553{T0_3bEDOQ_cJgSFW^Hm>rQB6;D3t9|fmsVKeiJi_?vY1=2J?DoUwvM@UrE zI)NbQ7PCAKF(MSCU{(G1vjKwN@>R%{oZ_YmV8mO&9Wsjite?FE8kCEyvJ+LK*ivC& zNhNhoMbc8m(6!ijj@E2dVb8RTp!5A>!IC>JQc{07&nX zOL}Y;Bsv#rnx>=oIgDUM4Mh`FQgA%K1FAzi!MwfeEAY?mceI$` zPYCkZ0cG=ONf~ik1$sj7WU;r@9z}@DL{HCUIN+2kr$TuXIoI?^g1~iNXEq`Jk)bex zYQdfusS7QH1HI~$O#fPsS7W3Z%vW=+Z>}pn+&{P;0_mmsu+Q}fRdC?EG8L=ct~;Pu z(E%B)gPnQfX z$tHl?Uu(^zYPaQ30U2C4wWQ?xO`w1--K zw*V;9zYG@@sO4H#Uz-dF<*+I_T_3-ty4hKh32UVHX;Tt#U)TTTlH#RjrQ+0J#l|N~ z>Xvvs@mv)A4|+0h0B5TM$-ML=dmx3Rk!H}8q8QW^BZO~6+#T9c`};^t3~;j`(R<0R z8Vh8b+Ycw$HskyUHhZe5ZYiROpu7Sgk{aMW}|)Nq0<2O-bPqON;?*Z!|tIJlwsC&*U8_lLXuh*n|ym)VL)pd9FSF`g0be-Dhtp(8}Y76eIoi zCZ`PKPP&c$zYj2!fD4jNl*DEcK|6@!3jE&s@9w$=eXB=eS>WpHJA#;D*`g5pxvQ(I zc(@yXSFc3JYIm_UPJ#lR$LdTC5#9gurvs<7{A;}fUG29}4m3M&p;lH>+V@3V$ILwi zEI9S^7PRHwNhY;h_wK!Yb@;_#Xf8~yywUti8zge_*=w*6)Phtkn^*|{FPV99uU}Ku z`FC8yr(YLXE_ohT0;bz!GA41(2#}WB@3pdR+@#SkIjV~K*?&p|v?S(_Gx%uXVVJiF zbL?0XdjDf>ggTYU`5pT1ZZYhvy1Ka~6lltWINSaFbGZrH{~JqN3Ob~Qjhm+o!51JZ zTlK}WMG?=Sb3*lJNEADSp=;r3oin2}&6j5I2p&UtGtc5Zn66JL7y9}h*vy3d+WuOd zZ;_=Z7zDYs0Mj}|@;O{eFF&2WYs4i19E#@VIbfnmztpd0ISbRUw&t*Tr9X27H%sG# zpgS(k5vVUc3Vnz`!IA=(!KiBpbLd$^R4S0=4gY*J>yV~)4kTLRP9-Rt)W8=Mw^yMb z>3z~6=D#XTHPwCj8-j#xThn)JJk|){u`O z^04OV(3`1GXPa;Modnu>-SpB#7IN8rD=TJt0(ZV!LXS->+o=a_k z^t+N=Tm*5~2_EXq_OeJ1@JumvTX8*W!OL3 zCrQ;kB99IPN#gPEh1;Jy0kshRVGe15%>a>u8T*L>)%r0}+cAulo~DldY&akU}|ng=nf?Xz2u5+uN(5Gf;+a4Y5;bkaF5+cXZ*V8Jjk+ZR0jImF z#oy&!V0r99pY2T2{2>iBwK=B47Hm|U9J9VySXlVNhdqYho)GAdhZ%fnYi<45mwdhT z-@kuf@j+ce-B<*7eLPW?5#aZaU%zf|N{cAz1mVK=52>$l*LgX4-t!+_k@om{X{tF> zQuAcAWCZb?zbHeUG&=r8Mx+VBt3px8a)gSTZ%}&$dIW3cE?aD56XmU zi$XN*tNb5M`DXocYe{9B~baUL{#K%H=WNP{>Z& zR3JGR1xHogjsGDm!L*xa-F3}t=MP3FW7g#$rwHwiy(Cc(?N6NDfa;`zRuy-Hw>gwV+YX5=)8^ylpwL)4yT3l0;w3_kMw$04}RJEX4dIUL# z4b8KDP;`uN{BAz;Z5b4%6u(Ytq$++mPj0SOwd|Ia!= zBx!<>igOI6<@oG(dc#(~=A27w3k!L2rn@=&hp@tS>t!poF6`g0L#ERXKoQsPAtLCi z1p_$v-8V$-lja^@ivcH`V73cRQIQVc!}KfDGAy1fjo)F@Ooh;L0bC-qT6jxrqtNI9USKtxPb`LHOq9x+VDjgHSVJwI-BU-rnAI5Gf7XdHU>G z_+T0gOr6Bo#ufFB-)S;+}DJBL2nRQ$tT_dpFtGq>Ynw^W8askd-Cv=-W^7J6|e{#TghG2S59gWT9LH+fJ)cla|<4ryLxF8Sc!ftc> z@muiUAmnX1+YmMaG!5qiKcW>jCp|u9PvDe+HYdWv69op9kb@rg_*;&r4=5Uv1rSnVX2r9LqB!uLHr&@mZ$;Rqe9y9K=% zA!VKW;Rb3|UxPNl?w;Sd7kcs3OSMpun3#);D`&U)IBV$_2JaHsbmr43VFS=BOF*v4 z5esdAoZ^g~pI^(!aq?tG(60bN%#^#x-LQcB1hf?l>#!`ChmKVq9vpP>vz~Gx>(m0w zyq0m>k3C#masc;QPQ->0d`<){o!Ds<_{8}7x-I8%iUJB!gf4NOu(Wjm1&X)!-+ux@ zg?W(30vA6S!qeI4SiFyy*8>}mYD&ZsI)JkKBKH`*lIXj)gqiq~krF(9poi5{VBeu0 z*O<3s*}Rm}B}#z=^8fRk>)@wZS+Mbt22+*4?;jp6gjI{iZ<3ZhBnO_b_FDw~As6C7 zQ8Wm1_$&hk!zdtq?%h!|LpxQw=t0xikEpwVX^e`arll1s!**y7lI#!~1=|90%9Gb_ zDUX5F9ku|0$D;S|S6syudR(+<4VHnH7s7jJj5LCD`#8CUg~eWyAFOmpcx+O$_&uKe zc?c%Tn1LbbSM|-duxyU>TkeLH5 zLnC~qK$U7J@+yvLq4@Cs%T0+80>2^Mn4gy?;8U#KX7r@lN3@|tw-fXE zC858bkFug7&dwwbF0S3d6}Xpg%>4f0wuUfEx<9D0v=1c6mholCwg$U8&HND+eCkWI z-8@ifUL$6SA8okmqXL40?d$QW{AOs+qewZsmb&o$;!ij8oQN=mG;qzmL@9;`@EnT2UXkNa2c}@hMV)5X`@7cm}WnmIG`g0U} z6zmC0R{}zL18#hT(KAQLI(7P%f{2^|5hHtRX82?g^Yiy$YjI7CM%^OeL;6H#XW#gV zzBYEHYuIi#amzCBjGKP`|0*g4k+B?%H5#;-df6la<13B2)o7 zQNREFkO#^9>qg%quY~0C=B64fJ;kYKg3mxA{zcS~^G)d^197K0RxndKhc6&)zLY?m zk_8`|L+@<5y^5p#3&z%ghOGNK1<_O_)oqc}&0`VPN5H!kZl7l?M5RfSJlfE5&j$mkXYD3I9u#8O^DvI&N|pO^@7gSaI2@td8+k(_ zvpuAGZ}+1gwRU&yDfprL1jM}pOF`Ch5I$R?tOnsDjdB}6+4}23QdSe-I!HN1*ig}- zKnE3p2nt%(L)Z}wfT!`Q~9zv*r zPT8!q$ky0@OwM`?j0K(1=p(UH*?BICdFN|=REH05>Ee}^=UC+4 zs`vfq^YL>=R}{#asYw0rq;LJ@3yiOV2<9TVG2*F$zy4JA4G{bFOJCk1xhNb*{&E`% zsm-~?uto?D34zB*aSS|tL?T)cq@6@lI}J>E!XP0HJVId!%@TNY<)9vz+pDE>x%|$E z+G7n~9S&S5j|s7XIy|8wIFevGKcDPu)E;#4Cb7BbqNukX?vx+o@r5c$PF(-dubE& zpj73#^e(JP+=|4A$Thr1zI=Cby6t9$V1EK+oi?_%`x?EyyexnI?QI(%Q_gTi^1V&j zH3itO(yz4pNTt&4yN`d2a(xW8j1Ix6O@u*`!maZ?VzHbOqB-*v9g5wG)F7@sVfk!QlC)&CC;gFo?{9Q>O znLRikZn9ilJ+mxb{U&KjQqPl=YEFCplJ%q|FC!?kY9Y+eyDw;>vO8ltajuI?^fabc7RAi8 zXLqDhW^FQS(kPAYs2jR61PAo;frf&k9(Q!dJuc#yBKw?9(G{Pv)pD&)bCa!o=9F|LQS>MCA`*+_V zhER2-JKFb!?bv)v-cbHaEiul8&(v&DP+XhnxViiUQA&#cS*BAixOJXR8_APJ_-n_< zF2FU%#|Nsy0{g>Vg|#08Z`^PT6{osmR-$_ zEj=p07sM=9F`LVrmAqauMch$X`?J*t4OXnm#|qwrzn`DeYIaRto5i;No*vTh9BRP#321_HN}zyNl?z$Xf(p-N^?Fw!y6d_Uh5^aToDj;(|dr7rwFzWef=*TJt%%~ziG)b&dm7YUP&K1I?A&J z$ZZM9>1RHowG65B-Z8tVZgQhUH=)xdK9{)Jj?*srYh6JERnUV@bo>Xh zwbR)T3LS`JnrH|4Y7#hzX-Rz|H>?a==ovp$?2E^F0k4CX7K@06ab9{kZ;4rt1#aG9 zYXzs7K}?AQvRn+9#?o(hci+8RxYMQ)XGLLcL8~74*-WeFk2j|_{-%{0t-DE#l#BZOywdk%AWGu}LytHc6U+11fJ~01J&TGAW zeX-cNlE(Gx248Gk7uxxYt zNQ%T3xFrYhW)w-vL77Z;?PmM$b91L?ZKr4X&_9hdg^8!r;kd8|(SF8Y$YpzVRPvOB zyB+GlF=)%2bdU*^V9rp{a+r4tr%=n)T=c(d;TR>dzYEO00rH*(llB#v*4rgrLSqbxpn4+(K3&3WZKF2M}lNOnNFa9VDrw8MMd$oUw>^sH}ONzoUY#f z%7#sEIt$SK+5Z$zHi7_Dv0%g9{tXd7zIs?6G+R@b^lA5M&@cj>WY+s$} z?`Avo4G=QbbfEw57nhftJpxa-nap7O|94U1J})C#Sgfx*RQZRji;OKW7PN?sO4!%T H63PD$@c$1n literal 0 HcmV?d00001 diff --git a/ankiview/tests/fixtures/test_collection/collection.anki2 b/ankiview/tests/fixtures/test_collection/User 1/collection.anki2 similarity index 100% rename from ankiview/tests/fixtures/test_collection/collection.anki2 rename to ankiview/tests/fixtures/test_collection/User 1/collection.anki2 diff --git a/ankiview/tests/fixtures/test_collection/collection.anki2-shm b/ankiview/tests/fixtures/test_collection/User 1/collection.anki2-shm similarity index 100% rename from ankiview/tests/fixtures/test_collection/collection.anki2-shm rename to ankiview/tests/fixtures/test_collection/User 1/collection.anki2-shm diff --git a/ankiview/tests/fixtures/test_collection/collection.media.db2 b/ankiview/tests/fixtures/test_collection/User 1/collection.media.db2 similarity index 100% rename from ankiview/tests/fixtures/test_collection/collection.media.db2 rename to ankiview/tests/fixtures/test_collection/User 1/collection.media.db2 diff --git a/ankiview/tests/fixtures/test_collection/collection.media/dag.png b/ankiview/tests/fixtures/test_collection/User 1/collection.media/dag.png similarity index 100% rename from ankiview/tests/fixtures/test_collection/collection.media/dag.png rename to ankiview/tests/fixtures/test_collection/User 1/collection.media/dag.png diff --git a/ankiview/tests/fixtures/test_collection/collection.media/mercator.png b/ankiview/tests/fixtures/test_collection/User 1/collection.media/mercator.png similarity index 100% rename from ankiview/tests/fixtures/test_collection/collection.media/mercator.png rename to ankiview/tests/fixtures/test_collection/User 1/collection.media/mercator.png diff --git a/ankiview/tests/fixtures/test_collection/collection.media/star-schema.png b/ankiview/tests/fixtures/test_collection/User 1/collection.media/star-schema.png similarity index 100% rename from ankiview/tests/fixtures/test_collection/collection.media/star-schema.png rename to ankiview/tests/fixtures/test_collection/User 1/collection.media/star-schema.png diff --git a/ankiview/tests/fixtures/test_collection/collection.media/wsg-enu2.png b/ankiview/tests/fixtures/test_collection/User 1/collection.media/wsg-enu2.png similarity index 100% rename from ankiview/tests/fixtures/test_collection/collection.media/wsg-enu2.png rename to ankiview/tests/fixtures/test_collection/User 1/collection.media/wsg-enu2.png diff --git a/ankiview/tests/fixtures/test_collection/prefs21.db b/ankiview/tests/fixtures/test_collection/prefs21.db new file mode 100644 index 0000000000000000000000000000000000000000..f93307374b7c41a6f6f25c273d3277cf22803cc5 GIT binary patch literal 12288 zcmeI1&ui2`6vvb7c6YnF-BOTRN+IZ_imt6f51wq-V(IR-*lnqx5~j)QHZ;k^WMaDo z#Zn5b%%O)758gZr{tMnbc(F(S1@DRnrM}r8EO_ZnD7=B0dGBT3Oy)x_-@N+6s>`X_ zV3AL_c}E-8bX~h=nwq8!Ku5z-bYeo^yUEFhN5hCUT#v@R*(k5f&#um!tFv>} zd9xSIPX@%NCZ{_bVy;i3mbpb+rp-K$fJhIRO=5b(bO(N50OjJ0A~zz73W|)*~^m#Z8qo6enRA(KudfC(9#|Yq(L&SCRni3l43N#N$;G zG^H4zfwfkbVp!!{Q4&~ia_q7|E8D0C0?b-ti?1kr==KeuOW`Z z0;}`Bmc2>sExDLk%DPEuKV_uH4qkSa!$pH`Jy~6J$R}>F4&s$GV@pxNs+!fHPw)?5s%!kz5Y;v z^eCsDeUHSY{(GyNP#(2rN;9%Nr+@qUrBhxvMxd6Sf!g{9sJA}`Ij;OGeZTVoWIp}- zM2eCA=4~kLov*|CQ`du(o?Ot`I`*Xix&}34|!( PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .join("tests/fixtures/test_collection/collection.anki2") + .join("tests/fixtures/test_collection/User 1/collection.anki2") } /// Open repository for this test collection From 8a4124af4439e97ff91235657a69dbd79d815de4 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 2 Nov 2025 14:12:52 +0100 Subject: [PATCH 28/32] feat: add hash cache integration and --full-sync flag Implement Phase 2 of inka2 collect parity by integrating hash-based change detection to skip processing unchanged markdown files. Changes: - Add --full-sync CLI flag to bypass hash checking - Integrate HashCache into CardCollector with conditional loading - Add hash check at start of process_file to skip unchanged files - Update hash after successful file processing - Implement Drop trait to automatically save cache on exit - Create ankiview_hashes.json in collection directory - Add debug logging for skipped files Behavior: - Without --full-sync: Skips files that haven't changed since last run - With --full-sync: Processes all files regardless of changes - Hash cache persists between runs for performance optimization - Uses SHA256 hashing (more secure than inka2's MD5) Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- ankiview/src/cli/args.rs | 4 ++ .../src/inka/application/card_collector.rs | 64 ++++++++++++++++--- ankiview/src/lib.rs | 17 +++-- ankiview/tests/test_collect.rs | 4 ++ 4 files changed, 77 insertions(+), 12 deletions(-) diff --git a/ankiview/src/cli/args.rs b/ankiview/src/cli/args.rs index 69c1744..c2e0bb4 100644 --- a/ankiview/src/cli/args.rs +++ b/ankiview/src/cli/args.rs @@ -67,5 +67,9 @@ pub enum Command { /// Continue processing on errors without pausing #[arg(short, long)] ignore_errors: bool, + + /// Process files even if unchanged since last sync + #[arg(short = 'f', long)] + full_sync: bool, }, } diff --git a/ankiview/src/inka/application/card_collector.rs b/ankiview/src/inka/application/card_collector.rs index 96b64d0..50ec7ba 100644 --- a/ankiview/src/inka/application/card_collector.rs +++ b/ankiview/src/inka/application/card_collector.rs @@ -1,5 +1,6 @@ use crate::infrastructure::anki::AnkiRepository; use crate::inka::infrastructure::file_writer; +use crate::inka::infrastructure::hasher::HashCache; use crate::inka::infrastructure::markdown::card_parser; use crate::inka::infrastructure::markdown::converter; use crate::inka::infrastructure::markdown::section_parser; @@ -15,11 +16,12 @@ pub struct CardCollector { media_dir: PathBuf, repository: AnkiRepository, force: bool, + hash_cache: Option, } impl CardCollector { /// Create a new CardCollector with Anki collection path - pub fn new(collection_path: impl AsRef, force: bool) -> Result { + pub fn new(collection_path: impl AsRef, force: bool, full_sync: bool) -> Result { let collection_path = collection_path.as_ref().to_path_buf(); // Determine media directory path @@ -33,6 +35,19 @@ impl CardCollector { std::fs::create_dir_all(&media_dir).context("Failed to create media directory")?; } + // Determine hash cache path (in same directory as collection) + let cache_path = collection_path + .parent() + .expect("Invalid collection path") + .join("ankiview_hashes.json"); + + // Load hash cache unless full_sync is enabled + let hash_cache = if full_sync { + None + } else { + Some(HashCache::load(&cache_path).context("Failed to load hash cache")?) + }; + // Open repository let repository = AnkiRepository::new(&collection_path)?; @@ -41,6 +56,7 @@ impl CardCollector { media_dir, repository, force, + hash_cache, }) } @@ -49,6 +65,19 @@ impl CardCollector { pub fn process_file(&mut self, markdown_path: impl AsRef) -> Result { let markdown_path = markdown_path.as_ref(); + // Check if file has changed (skip if unchanged and cache exists) + if let Some(cache) = &self.hash_cache { + let has_changed = cache + .file_has_changed(markdown_path) + .context("Failed to check file hash")?; + + if !has_changed { + // File unchanged, skip processing + debug!(?markdown_path, "Skipping unchanged file"); + return Ok(0); + } + } + // Read markdown file let mut content = file_writer::read_markdown_file(markdown_path)?; @@ -175,6 +204,13 @@ impl CardCollector { // Write updated content back to file if IDs were injected file_writer::write_markdown_file(markdown_path, &content)?; + // After successful processing, update hash cache + if let Some(cache) = &mut self.hash_cache { + cache + .update_hash(markdown_path) + .context("Failed to update file hash")?; + } + Ok(card_count) } @@ -207,6 +243,18 @@ impl CardCollector { } } +impl Drop for CardCollector { + fn drop(&mut self) { + // Save hash cache if it exists + if let Some(cache) = &self.hash_cache { + if let Err(e) = cache.save() { + // Use eprintln since we can't return Result from Drop + eprintln!("Warning: Failed to save hash cache: {}", e); + } + } + } +} + #[cfg(test)] mod tests { use super::*; @@ -242,7 +290,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); @@ -260,7 +308,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); @@ -284,7 +332,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 3); @@ -303,7 +351,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); // First run creates note let count1 = collector.process_file(&markdown_path).unwrap(); @@ -332,7 +380,7 @@ Deck: TestDeck let markdown_path = temp_dir.path().join("empty.md"); fs::write(&markdown_path, "Just text, no sections").unwrap(); - let mut collector = CardCollector::new(&collection_path, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 0); @@ -379,7 +427,7 @@ Deck: Test let txt_file = notes_dir.join("readme.txt"); fs::write(&txt_file, "This is not markdown").unwrap(); - let mut collector = CardCollector::new(&collection_path, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); let count = collector.process_directory(¬es_dir).unwrap(); // Should process both markdown files @@ -408,7 +456,7 @@ Deck: TestDeck fs::write(&markdown_path, markdown_content).unwrap(); // Process the file - let mut collector = CardCollector::new(&collection_path, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); diff --git a/ankiview/src/lib.rs b/ankiview/src/lib.rs index c065803..9d7b212 100644 --- a/ankiview/src/lib.rs +++ b/ankiview/src/lib.rs @@ -39,7 +39,15 @@ pub fn run(args: Args) -> Result<()> { recursive, force, ignore_errors, - } => handle_collect_command(path, recursive, force, ignore_errors, collection_path), + full_sync, + } => handle_collect_command( + path, + recursive, + force, + ignore_errors, + full_sync, + collection_path, + ), } } @@ -125,17 +133,18 @@ fn handle_collect_command( recursive: bool, force: bool, ignore_errors: bool, + full_sync: bool, collection_path: PathBuf, ) -> Result<()> { use crate::inka::application::card_collector::CardCollector; info!( ?path, - recursive, force, ignore_errors, "Collecting markdown cards" + recursive, force, ignore_errors, full_sync, "Collecting markdown cards" ); - // Initialize collector with force flag - let mut collector = CardCollector::new(&collection_path, force)?; + // Initialize collector with force and full_sync flags + let mut collector = CardCollector::new(&collection_path, force, full_sync)?; // Process based on path type let total_cards = if path.is_file() { diff --git a/ankiview/tests/test_collect.rs b/ankiview/tests/test_collect.rs index d702b0e..cbbdcb3 100644 --- a/ankiview/tests/test_collect.rs +++ b/ankiview/tests/test_collect.rs @@ -28,6 +28,7 @@ Deck: IntegrationTest let mut collector = ankiview::inka::application::card_collector::CardCollector::new( &test_collection.collection_path, false, + false, )?; let count = collector.process_file(&markdown_path)?; @@ -87,6 +88,7 @@ Deck: Integration let mut collector = ankiview::inka::application::card_collector::CardCollector::new( &test_collection.collection_path, false, + false, )?; let count = collector.process_directory(¬es_dir)?; @@ -123,6 +125,7 @@ Deck: UpdateTest let mut collector = ankiview::inka::application::card_collector::CardCollector::new( &test_collection.collection_path, false, + false, )?; let count1 = collector.process_file(&markdown_path)?; assert_eq!(count1, 1); @@ -184,6 +187,7 @@ Tags: test integration let mut collector = ankiview::inka::application::card_collector::CardCollector::new( &test_collection.collection_path, false, + false, )?; collector.process_file(&markdown_path)? }; // Collector dropped here, releasing the lock From b354bdc68733ca602e7c9e8b67f5545fb7932590 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 2 Nov 2025 14:26:29 +0100 Subject: [PATCH 29/32] feat: add --update-ids flag for missing ID recovery Implement Phase 3 of inka2 collect parity by adding ability to find and inject missing/incorrect note IDs by searching Anki. Changes: - Add --update-ids CLI flag (-u short flag) - Implement search_by_html in AnkiRepository to find notes by content - Integrate update_ids mode into CardCollector - When no ID exists and --update-ids is set: - Search Anki for note with matching HTML fields - If found: inject ID and update note - If not found: create new note as usual - Add debug logging for ID recovery Behavior: - Without --update-ids: Normal collection (creates new notes) - With --update-ids: Searches for existing notes before creating - Useful for recovering from lost/incorrect IDs in markdown files - Matches both basic cards (front+back) and cloze cards (text field) Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- ankiview/src/cli/args.rs | 4 + ankiview/src/infrastructure/anki.rs | 42 +++++++++++ .../src/inka/application/card_collector.rs | 74 ++++++++++++++++--- ankiview/src/lib.rs | 9 ++- ankiview/tests/test_collect.rs | 4 + 5 files changed, 118 insertions(+), 15 deletions(-) diff --git a/ankiview/src/cli/args.rs b/ankiview/src/cli/args.rs index c2e0bb4..f4f8d44 100644 --- a/ankiview/src/cli/args.rs +++ b/ankiview/src/cli/args.rs @@ -71,5 +71,9 @@ pub enum Command { /// Process files even if unchanged since last sync #[arg(short = 'f', long)] full_sync: bool, + + /// Find and update missing/incorrect note IDs by searching Anki + #[arg(short = 'u', long)] + update_ids: bool, }, } diff --git a/ankiview/src/infrastructure/anki.rs b/ankiview/src/infrastructure/anki.rs index dc36d89..29fd47b 100644 --- a/ankiview/src/infrastructure/anki.rs +++ b/ankiview/src/infrastructure/anki.rs @@ -258,6 +258,48 @@ impl AnkiRepository { Ok(exists) } + + /// Search for notes by HTML content (for --update-ids) + /// Returns a vector of note IDs that match the given HTML fields + pub fn search_by_html(&mut self, fields: &[String]) -> Result> { + use anki::search::SearchNode; + + // Get all notes in the collection + let search_node = SearchNode::WholeCollection; + let note_ids = self + .collection + .search_notes_unordered(search_node) + .context("Failed to search notes")?; + + let mut matching_ids = Vec::new(); + + // Check each note to see if its fields match + for note_id in note_ids { + if let Ok(Some(note)) = self.collection.storage.get_note(note_id) { + let note_fields: Vec = + note.fields().iter().map(|f| f.to_string()).collect(); + + // For basic cards, match front and back (first 2 fields) + // For cloze cards, match the text field (first field) + let matches = if fields.len() == 2 && note_fields.len() >= 2 { + // Basic card: match both fields + note_fields[0] == fields[0] && note_fields[1] == fields[1] + } else if fields.len() == 1 && !note_fields.is_empty() { + // Cloze card: match first field + note_fields[0] == fields[0] + } else { + false + }; + + if matches { + debug!(note_id = note_id.0, "Found matching note"); + matching_ids.push(note_id.0); + } + } + } + + Ok(matching_ids) + } } impl NoteRepository for AnkiRepository { diff --git a/ankiview/src/inka/application/card_collector.rs b/ankiview/src/inka/application/card_collector.rs index 50ec7ba..7316daa 100644 --- a/ankiview/src/inka/application/card_collector.rs +++ b/ankiview/src/inka/application/card_collector.rs @@ -17,11 +17,17 @@ pub struct CardCollector { repository: AnkiRepository, force: bool, hash_cache: Option, + update_ids: bool, } impl CardCollector { /// Create a new CardCollector with Anki collection path - pub fn new(collection_path: impl AsRef, force: bool, full_sync: bool) -> Result { + pub fn new( + collection_path: impl AsRef, + force: bool, + full_sync: bool, + update_ids: bool, + ) -> Result { let collection_path = collection_path.as_ref().to_path_buf(); // Determine media directory path @@ -57,6 +63,7 @@ impl CardCollector { repository, force, hash_cache, + update_ids, }) } @@ -153,9 +160,32 @@ impl CardCollector { // Create or update note if let Some(id) = existing_id { // Update existing note - self.repository.update_note(id, &[front_html, back_html])?; + self.repository + .update_note(id, &[front_html.clone(), back_html.clone()])?; + } else if self.update_ids { + // --update-ids mode: search for existing note by HTML content + let matching_ids = self + .repository + .search_by_html(&[front_html.clone(), back_html.clone()])?; + + if let Some(&id) = matching_ids.first() { + // Found existing note, inject ID + debug!(note_id = id, "Found existing note for card, injecting ID"); + content = file_writer::inject_anki_id(&content, ¬e_str, id); + // Update the existing note with current content + self.repository.update_note(id, &[front_html, back_html])?; + } else { + // No match found, create new note + let id = self.repository.create_basic_note( + &front_html, + &back_html, + &deck_name, + &tags, + )?; + content = file_writer::inject_anki_id(&content, ¬e_str, id); + } } else { - // Create new note + // Normal mode: create new note let id = self.repository.create_basic_note( &front_html, &back_html, @@ -185,9 +215,29 @@ impl CardCollector { // Create or update note if let Some(id) = existing_id { // Update existing note - self.repository.update_note(id, &[text_html])?; + self.repository.update_note(id, &[text_html.clone()])?; + } else if self.update_ids { + // --update-ids mode: search for existing note by HTML content + let matching_ids = self.repository.search_by_html(&[text_html.clone()])?; + + if let Some(&id) = matching_ids.first() { + // Found existing note, inject ID + debug!( + note_id = id, + "Found existing note for cloze card, injecting ID" + ); + content = file_writer::inject_anki_id(&content, ¬e_str, id); + // Update the existing note with current content + self.repository.update_note(id, &[text_html])?; + } else { + // No match found, create new note + let id = self + .repository + .create_cloze_note(&text_html, &deck_name, &tags)?; + content = file_writer::inject_anki_id(&content, ¬e_str, id); + } } else { - // Create new note + // Normal mode: create new note let id = self .repository .create_cloze_note(&text_html, &deck_name, &tags)?; @@ -290,7 +340,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); @@ -308,7 +358,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); @@ -332,7 +382,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 3); @@ -351,7 +401,7 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); // First run creates note let count1 = collector.process_file(&markdown_path).unwrap(); @@ -380,7 +430,7 @@ Deck: TestDeck let markdown_path = temp_dir.path().join("empty.md"); fs::write(&markdown_path, "Just text, no sections").unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 0); @@ -427,7 +477,7 @@ Deck: Test let txt_file = notes_dir.join("readme.txt"); fs::write(&txt_file, "This is not markdown").unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); let count = collector.process_directory(¬es_dir).unwrap(); // Should process both markdown files @@ -456,7 +506,7 @@ Deck: TestDeck fs::write(&markdown_path, markdown_content).unwrap(); // Process the file - let mut collector = CardCollector::new(&collection_path, false, false).unwrap(); + let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); diff --git a/ankiview/src/lib.rs b/ankiview/src/lib.rs index 9d7b212..d557822 100644 --- a/ankiview/src/lib.rs +++ b/ankiview/src/lib.rs @@ -40,12 +40,14 @@ pub fn run(args: Args) -> Result<()> { force, ignore_errors, full_sync, + update_ids, } => handle_collect_command( path, recursive, force, ignore_errors, full_sync, + update_ids, collection_path, ), } @@ -134,17 +136,18 @@ fn handle_collect_command( force: bool, ignore_errors: bool, full_sync: bool, + update_ids: bool, collection_path: PathBuf, ) -> Result<()> { use crate::inka::application::card_collector::CardCollector; info!( ?path, - recursive, force, ignore_errors, full_sync, "Collecting markdown cards" + recursive, force, ignore_errors, full_sync, update_ids, "Collecting markdown cards" ); - // Initialize collector with force and full_sync flags - let mut collector = CardCollector::new(&collection_path, force, full_sync)?; + // Initialize collector with force, full_sync, and update_ids flags + let mut collector = CardCollector::new(&collection_path, force, full_sync, update_ids)?; // Process based on path type let total_cards = if path.is_file() { diff --git a/ankiview/tests/test_collect.rs b/ankiview/tests/test_collect.rs index cbbdcb3..5f2b4c5 100644 --- a/ankiview/tests/test_collect.rs +++ b/ankiview/tests/test_collect.rs @@ -29,6 +29,7 @@ Deck: IntegrationTest &test_collection.collection_path, false, false, + false, )?; let count = collector.process_file(&markdown_path)?; @@ -89,6 +90,7 @@ Deck: Integration &test_collection.collection_path, false, false, + false, )?; let count = collector.process_directory(¬es_dir)?; @@ -126,6 +128,7 @@ Deck: UpdateTest &test_collection.collection_path, false, false, + false, )?; let count1 = collector.process_file(&markdown_path)?; assert_eq!(count1, 1); @@ -188,6 +191,7 @@ Tags: test integration &test_collection.collection_path, false, false, + false, )?; collector.process_file(&markdown_path)? }; // Collector dropped here, releasing the lock From 927c529466ee020baeabf52c26c486d43faed59a Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 2 Nov 2025 14:36:55 +0100 Subject: [PATCH 30/32] feat(collect): add error handling with --ignore-errors flag MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implement Phase 4 of inka2 parity plan: error handling to continue processing when individual files fail. Key changes: - Add `--ignore-errors` CLI flag to collect command - Extend CardCollector to collect errors instead of failing immediately - Add error summary reporting at end of processing - Implement error collection for both file and directory processing Implementation details: - CardCollector now has `ignore_errors: bool` and `errors: Vec` fields - Refactored `process_file` to wrap implementation with error handling - When `ignore_errors` is true, errors are collected and 0 cards returned - When `ignore_errors` is false, errors propagate normally (existing behavior) - lib.rs prints error summary to stderr after processing completes Testing: - Added tests for error collection when ignore_errors is true - Added tests for error propagation when ignore_errors is false - All existing tests updated for new constructor parameter - Tests use missing media files to trigger realistic errors 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../src/inka/application/card_collector.rs | 107 ++++++++++++++++-- ankiview/src/lib.rs | 23 +++- ankiview/tests/test_collect.rs | 4 + 3 files changed, 125 insertions(+), 9 deletions(-) diff --git a/ankiview/src/inka/application/card_collector.rs b/ankiview/src/inka/application/card_collector.rs index 7316daa..89c0db6 100644 --- a/ankiview/src/inka/application/card_collector.rs +++ b/ankiview/src/inka/application/card_collector.rs @@ -18,6 +18,8 @@ pub struct CardCollector { force: bool, hash_cache: Option, update_ids: bool, + ignore_errors: bool, + errors: Vec, } impl CardCollector { @@ -27,6 +29,7 @@ impl CardCollector { force: bool, full_sync: bool, update_ids: bool, + ignore_errors: bool, ) -> Result { let collection_path = collection_path.as_ref().to_path_buf(); @@ -64,14 +67,39 @@ impl CardCollector { force, hash_cache, update_ids, + ignore_errors, + errors: Vec::new(), }) } + /// Get accumulated errors from processing + pub fn errors(&self) -> &[String] { + &self.errors + } + /// Process a single markdown file and add/update cards in Anki /// Returns the number of cards processed pub fn process_file(&mut self, markdown_path: impl AsRef) -> Result { let markdown_path = markdown_path.as_ref(); + // Handle error according to ignore_errors flag + match self.process_file_impl(markdown_path) { + Ok(count) => Ok(count), + Err(e) => { + if self.ignore_errors { + // Collect error and continue + let error_msg = format!("{}: {:#}", markdown_path.display(), e); + self.errors.push(error_msg); + Ok(0) + } else { + Err(e) + } + } + } + } + + /// Internal implementation of process_file + fn process_file_impl(&mut self, markdown_path: &Path) -> Result { // Check if file has changed (skip if unchanged and cache exists) if let Some(cache) = &self.hash_cache { let has_changed = cache @@ -340,7 +368,8 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); + let mut collector = + CardCollector::new(&collection_path, false, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); @@ -358,7 +387,8 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); + let mut collector = + CardCollector::new(&collection_path, false, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); @@ -382,7 +412,8 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); + let mut collector = + CardCollector::new(&collection_path, false, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 3); @@ -401,7 +432,8 @@ Deck: TestDeck ---"#; fs::write(&markdown_path, markdown_content).unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); + let mut collector = + CardCollector::new(&collection_path, false, false, false, false).unwrap(); // First run creates note let count1 = collector.process_file(&markdown_path).unwrap(); @@ -430,7 +462,8 @@ Deck: TestDeck let markdown_path = temp_dir.path().join("empty.md"); fs::write(&markdown_path, "Just text, no sections").unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); + let mut collector = + CardCollector::new(&collection_path, false, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 0); @@ -477,13 +510,72 @@ Deck: Test let txt_file = notes_dir.join("readme.txt"); fs::write(&txt_file, "This is not markdown").unwrap(); - let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); + let mut collector = + CardCollector::new(&collection_path, false, false, false, false).unwrap(); let count = collector.process_directory(¬es_dir).unwrap(); // Should process both markdown files assert_eq!(count, 2); } + #[test] + fn given_ignore_errors_when_processing_file_with_missing_media_then_collects_error() { + let (temp_dir, collection_path, _media_dir) = create_test_collection(); + + // Create markdown with reference to non-existent image + let markdown_path = temp_dir.path().join("missing_media.md"); + let markdown_content = r#"--- +Deck: TestDeck + +1. What is this image? +> ![missing image](images/nonexistent.png) +---"#; + fs::write(&markdown_path, markdown_content).unwrap(); + + // Process with ignore_errors = true + let mut collector = + CardCollector::new(&collection_path, false, false, false, true).unwrap(); + let count = collector.process_file(&markdown_path).unwrap(); + + // Should return 0 cards since processing failed + assert_eq!(count, 0); + + // Should have collected the error + let errors = collector.errors(); + assert_eq!(errors.len(), 1, "Should have 1 error"); + assert!( + errors[0].contains("missing_media.md"), + "Error message should mention the file" + ); + } + + #[test] + fn given_no_ignore_errors_when_processing_file_with_missing_media_then_returns_error() { + let (temp_dir, collection_path, _media_dir) = create_test_collection(); + + // Create markdown with reference to non-existent image + let markdown_path = temp_dir.path().join("missing_media.md"); + let markdown_content = r#"--- +Deck: TestDeck + +1. What is this image? +> ![missing image](images/nonexistent.png) +---"#; + fs::write(&markdown_path, markdown_content).unwrap(); + + // Process with ignore_errors = false + let mut collector = + CardCollector::new(&collection_path, false, false, false, false).unwrap(); + let result = collector.process_file(&markdown_path); + + // Should return an error + assert!(result.is_err(), "Should return an error"); + + // Should not have collected any errors (since we returned immediately) + let errors = collector.errors(); + assert_eq!(errors.len(), 0, "Should have 0 collected errors"); + } + #[test] fn given_markdown_with_image_when_processing_then_copies_media_file() { let (temp_dir, collection_path, media_dir) = create_test_collection(); @@ -506,7 +598,8 @@ Deck: TestDeck fs::write(&markdown_path, markdown_content).unwrap(); // Process the file - let mut collector = CardCollector::new(&collection_path, false, false, false).unwrap(); + let mut collector = + CardCollector::new(&collection_path, false, false, false, false).unwrap(); let count = collector.process_file(&markdown_path).unwrap(); assert_eq!(count, 1); diff --git a/ankiview/src/lib.rs b/ankiview/src/lib.rs index d557822..ef228c7 100644 --- a/ankiview/src/lib.rs +++ b/ankiview/src/lib.rs @@ -146,8 +146,14 @@ fn handle_collect_command( recursive, force, ignore_errors, full_sync, update_ids, "Collecting markdown cards" ); - // Initialize collector with force, full_sync, and update_ids flags - let mut collector = CardCollector::new(&collection_path, force, full_sync, update_ids)?; + // Initialize collector with force, full_sync, update_ids, and ignore_errors flags + let mut collector = CardCollector::new( + &collection_path, + force, + full_sync, + update_ids, + ignore_errors, + )?; // Process based on path type let total_cards = if path.is_file() { @@ -182,6 +188,19 @@ fn handle_collect_command( if total_cards == 1 { "" } else { "s" } ); + // Print error summary if there were any errors + let errors = collector.errors(); + if !errors.is_empty() { + eprintln!( + "\n{} error{} occurred:", + errors.len(), + if errors.len() == 1 { "" } else { "s" } + ); + for error in errors { + eprintln!(" {}", error); + } + } + Ok(()) } diff --git a/ankiview/tests/test_collect.rs b/ankiview/tests/test_collect.rs index 5f2b4c5..734081f 100644 --- a/ankiview/tests/test_collect.rs +++ b/ankiview/tests/test_collect.rs @@ -30,6 +30,7 @@ Deck: IntegrationTest false, false, false, + false, )?; let count = collector.process_file(&markdown_path)?; @@ -91,6 +92,7 @@ Deck: Integration false, false, false, + false, )?; let count = collector.process_directory(¬es_dir)?; @@ -129,6 +131,7 @@ Deck: UpdateTest false, false, false, + false, )?; let count1 = collector.process_file(&markdown_path)?; assert_eq!(count1, 1); @@ -192,6 +195,7 @@ Tags: test integration false, false, false, + false, )?; collector.process_file(&markdown_path)? }; // Collector dropped here, releasing the lock From db1339381e4c82daaee255eebfc0569cabc4b0a6 Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 2 Nov 2025 14:50:11 +0100 Subject: [PATCH 31/32] docs: improve help text and add collect command documentation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Enhance CLI help text with clearer descriptions and add comprehensive documentation for the collect command. CLI Help Improvements (args.rs): - Expanded flag descriptions with context and use cases - Clarified what "conflicts" and "unchanged files" mean - Added explanations of hash cache and ID injection mechanisms - Improved readability with multi-line docstrings README.md Updates: - Added collect command to features list - Created comprehensive "Collect markdown cards" section - Included markdown format examples (basic, cloze, images) - Documented common workflows and flag combinations - Added flag reference table - Expanded troubleshooting with collect-specific issues Key Documentation Additions: - Markdown format specification with examples - How ID injection works ( comments) - Hash cache explanation and performance notes - Media file handling workflow - Flag interaction guidance - Troubleshooting for common collect errors The documentation now provides users with complete guidance for importing markdown flashcards into Anki without needing external resources. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- README.md | 124 ++++++++++++++++++++++++++++++++++++++- ankiview/src/cli/args.rs | 25 ++++++-- 2 files changed, 140 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index bba6fb4..90a7339 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,12 @@ AnkiView is a command-line tool that lets you quickly view Anki notes directly f ## Features ✨ -- View any note by its ID in your default browser -- Delete notes from your collection via CLI +- **View notes** - View any note by its ID in your default browser +- **Delete notes** - Delete notes from your collection via CLI +- **Import markdown** - Convert markdown flashcards to Anki notes +- **Smart updates** - Automatically track cards with ID comments +- **Media handling** - Import images from markdown files +- **Hash caching** - Skip unchanged files for fast re-imports - Automatic collection file detection - Support for multiple Anki profiles - LaTeX math rendering support @@ -69,6 +73,105 @@ ankiview -c /path/to/collection.anki2 delete 1234567890 ankiview -p "User 1" delete 1234567890 ``` +### Collect markdown cards + +Import markdown flashcards into your Anki collection: + +```bash +# Import a single file +ankiview collect notes.md + +# Import a directory (non-recursive) +ankiview collect notes/ + +# Import recursively (all subdirectories) +ankiview collect -r notes/ +``` + +**Markdown Format** + +Basic cards (question and answer): +```markdown +--- +Deck: Programming +Tags: rust basics + +1. What is Rust? +> A systems programming language + +2. What is Cargo? +> Rust's package manager +--- +``` + +Cloze deletion cards: +```markdown +--- +Deck: Programming + +1. Rust provides {memory safety} without garbage collection. +2. The {{c1::borrow checker}} ensures {{c2::safe concurrency}}. +--- +``` + +Cards with images: +```markdown +--- +Deck: ComputerScience + +1. What type of graph is this? +> ![Graph diagram](images/dag.png) +> A directed acyclic graph (DAG) +--- +``` + +**How It Works** + +1. AnkiView reads your markdown files +2. Creates or updates notes in Anki +3. Injects ID comments into your markdown for tracking +4. Copies media files to Anki's collection.media/ + +After the first run, your markdown will have ID comments: +```markdown + +1. What is Rust? +> A systems programming language +``` + +This allows you to edit the content and re-run collect to update (not duplicate) the cards. + +**Advanced Usage** + +```bash +# Recover lost IDs by searching Anki +ankiview collect -u notes/ + +# Force rebuild (bypass cache) +ankiview collect -f notes/ + +# Overwrite existing media files +ankiview collect --force notes/ + +# Continue on errors, report at end +ankiview collect -i notes/ + +# Combine flags for batch processing +ankiview collect -ri notes/ +``` + +**Flag Reference** + +| Flag | Description | +|------|-------------| +| `-r, --recursive` | Process subdirectories | +| `--force` | Overwrite conflicting media files | +| `-i, --ignore-errors` | Continue processing on errors | +| `-f, --full-sync` | Bypass hash cache (force rebuild) | +| `-u, --update-ids` | Search Anki for existing notes by content | + +**Performance Note:** AnkiView maintains a hash cache to skip unchanged files. Use `-f` to force processing all files. + ### Debug logging Enable debug logging for any command (global flags can appear before or after subcommand): @@ -129,9 +232,24 @@ RUST_LOG=debug cargo test - Verify the collection path 2. **"Failed to open Anki collection"** - - Make sure Anki isn't running + - Make sure Anki isn't running (required for all commands) - Check file permissions +3. **"Different file with the same name already exists"** (collect command) + - Media file conflict detected + - Use `--force` flag to overwrite existing media files + - Or rename your image file to avoid conflict + +4. **Duplicate cards created** (collect command) + - Ensure ID comments (``) are preserved in markdown + - Use `--update-ids` flag to recover lost IDs + - Check that you didn't manually modify or remove ID comments + +5. **Cards not updating** (collect command) + - File may be unchanged (check hash cache) + - Use `-f` flag to force rebuild + - Verify ID comments are correct and match Anki notes + ## Contributing 🤝 Contributions are welcome! Please feel free to submit a Pull Request. diff --git a/ankiview/src/cli/args.rs b/ankiview/src/cli/args.rs index f4f8d44..496c482 100644 --- a/ankiview/src/cli/args.rs +++ b/ankiview/src/cli/args.rs @@ -51,28 +51,41 @@ pub enum Command { }, /// Collect markdown cards into Anki + /// + /// Processes markdown files containing flashcards and imports them into your Anki collection. + /// Cards are automatically tracked with ID comments, allowing updates without creating duplicates. Collect { - /// Path to markdown file or directory + /// Path to markdown file or directory containing .md files #[arg(value_name = "PATH")] path: PathBuf, - /// Process directory recursively + /// Process directory recursively, scanning all subdirectories for .md files. + /// Without this flag, only processes files in the specified directory (non-recursive). #[arg(short, long)] recursive: bool, - /// Overwrite conflicting media files + /// Overwrite media files when filename conflicts occur in collection.media/. + /// Without this flag, processing stops with an error if a different file with the same name exists. + /// Use when you want to replace existing images with updated versions. #[arg(long)] force: bool, - /// Continue processing on errors without pausing + /// Continue processing remaining files even if errors occur. + /// Errors are collected and reported at the end instead of stopping immediately. + /// Useful for batch processing where you want to see all issues at once. #[arg(short, long)] ignore_errors: bool, - /// Process files even if unchanged since last sync + /// Process all files regardless of hash cache, forcing a complete rebuild. + /// By default, unchanged files are skipped for performance (tracked via SHA256 hashes). + /// Use this when you want to ensure all cards are re-processed from scratch. #[arg(short = 'f', long)] full_sync: bool, - /// Find and update missing/incorrect note IDs by searching Anki + /// Search Anki for existing notes by content and inject their IDs into markdown. + /// Prevents duplicate creation when markdown files lack ID comments (). + /// Useful for recovering lost IDs or importing cards from other sources. + /// Matches notes by comparing HTML field content. #[arg(short = 'u', long)] update_ids: bool, }, From 0c860fda7bd0103d72ab45ebebab6b7d72e39e4f Mon Sep 17 00:00:00 2001 From: sysid Date: Sun, 2 Nov 2025 15:32:47 +0100 Subject: [PATCH 32/32] =?UTF-8?q?Bump=20version:=200.4.0=20=E2=86=92=201.0?= =?UTF-8?q?.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.toml | 2 +- VERSION | 2 +- ankiview/Cargo.lock | 2 +- ankiview/Cargo.toml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index 17d51c4..d6cd2df 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.4.0" +current_version = "1.0.0" parse = "(?P\\d+)\\.(?P\\d+)\\.(?P\\d+)" serialize = ["{major}.{minor}.{patch}"] search = "{current_version}" diff --git a/VERSION b/VERSION index 1d0ba9e..3eefcb9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.4.0 +1.0.0 diff --git a/ankiview/Cargo.lock b/ankiview/Cargo.lock index 5a503a4..9fe469a 100644 --- a/ankiview/Cargo.lock +++ b/ankiview/Cargo.lock @@ -219,7 +219,7 @@ dependencies = [ [[package]] name = "ankiview" -version = "0.4.0" +version = "1.0.0" dependencies = [ "anki", "anyhow", diff --git a/ankiview/Cargo.toml b/ankiview/Cargo.toml index 9125c22..d66bcaf 100644 --- a/ankiview/Cargo.toml +++ b/ankiview/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ankiview" -version = "0.4.0" +version = "1.0.0" edition = "2021" authors = ["sysid "] description = "Fast Anki card viewer"