inline code"));
+ }
+}
diff --git a/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs b/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs
new file mode 100644
index 0000000..a281f79
--- /dev/null
+++ b/ankiview/src/inka/infrastructure/markdown/mathjax_plugin.rs
@@ -0,0 +1,172 @@
+use markdown_it::parser::block::{BlockRule, BlockState};
+use markdown_it::parser::inline::{InlineRule, InlineState};
+use markdown_it::{MarkdownIt, Node, NodeValue, Renderer};
+
+#[derive(Debug)]
+pub struct InlineMath {
+ pub content: String,
+}
+
+impl NodeValue for InlineMath {
+ fn render(&self, _node: &Node, fmt: &mut dyn Renderer) {
+ // Render as \(...\) for MathJax
+ fmt.text(&format!(r"\({}\)", self.content));
+ }
+}
+
+struct InlineMathScanner;
+
+impl InlineRule for InlineMathScanner {
+ const MARKER: char = '$';
+
+ fn run(state: &mut InlineState) -> Option<(Node, usize)> {
+ let input = &state.src[state.pos..state.pos_max];
+
+ // Check if we start with $
+ if !input.starts_with('$') {
+ return None;
+ }
+
+ // Don't match if $ is followed by whitespace
+ if input.len() < 2 || input.chars().nth(1)?.is_whitespace() {
+ return None;
+ }
+
+ // Find the closing $
+ let mut end_pos = None;
+ let chars: Vec = input.chars().collect();
+
+ for i in 1..chars.len() {
+ if chars[i] == '$' {
+ // Don't match if $ is preceded by whitespace
+ if i > 0 && !chars[i - 1].is_whitespace() {
+ end_pos = Some(i);
+ break;
+ }
+ }
+ }
+
+ if let Some(end) = end_pos {
+ // Extract content between the $...$ (excluding the $ markers)
+ let content: String = chars[1..end].iter().collect();
+ let match_len = end + 1; // Include both $ markers
+
+ let node = Node::new(InlineMath { content });
+ return Some((node, match_len));
+ }
+
+ None
+ }
+}
+
+#[derive(Debug)]
+pub struct BlockMath {
+ pub content: String,
+}
+
+impl NodeValue for BlockMath {
+ fn render(&self, _node: &Node, fmt: &mut dyn Renderer) {
+ // Render as \[...\] for MathJax
+ fmt.text(&format!(r"\[{}\]", self.content));
+ }
+}
+
+struct BlockMathScanner;
+
+impl BlockRule for BlockMathScanner {
+ fn run(state: &mut BlockState) -> Option<(Node, usize)> {
+ // Get the current line
+ if state.line >= state.line_max {
+ return None;
+ }
+
+ let start_line = state.line;
+ let line = state.get_line(start_line);
+
+ // Check if line starts with $$
+ if !line.trim().starts_with("$$") {
+ return None;
+ }
+
+ // Find the closing $$
+ let mut end_line = None;
+ for line_num in (start_line + 1)..state.line_max {
+ let line = state.get_line(line_num);
+ if line.trim().starts_with("$$") {
+ end_line = Some(line_num);
+ break;
+ }
+ }
+
+ if let Some(end) = end_line {
+ // Extract content between the $$ markers
+ let mut content_lines = Vec::new();
+ for line_num in (start_line + 1)..end {
+ content_lines.push(state.get_line(line_num).to_string());
+ }
+ let content = content_lines.join("\n");
+
+ let node = Node::new(BlockMath { content });
+ // Return the closing $$ line - the parser will advance past it
+ return Some((node, end));
+ }
+
+ None
+ }
+}
+
+pub fn add_mathjax_plugin(md: &mut MarkdownIt) {
+ md.inline.add_rule::();
+ md.block.add_rule::().before_all();
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn given_inline_math_when_parsing_then_creates_math_token() {
+ let input = "This is $f(x) = x^2$ inline math";
+ let mut parser = MarkdownIt::new();
+ markdown_it::plugins::cmark::add(&mut parser);
+ add_mathjax_plugin(&mut parser);
+
+ let ast = parser.parse(input);
+ let html = ast.render();
+
+ // Should render with MathJax delimiters
+ assert!(html.contains(r"\(f(x) = x^2\)"));
+ }
+
+ #[test]
+ fn given_block_math_when_parsing_then_creates_block_math_token() {
+ let input = "$$\nf(x) = \\int_0^1 x^2 dx\n$$";
+ let mut parser = MarkdownIt::new();
+ markdown_it::plugins::cmark::add(&mut parser);
+ add_mathjax_plugin(&mut parser);
+
+ let html = parser.parse(input).render();
+
+ assert!(html.contains(r"\[f(x) = \int_0^1 x^2 dx\]"));
+ }
+
+ #[test]
+ fn given_mixed_math_when_parsing_then_handles_both_types() {
+ let input = r#"Inline $a=b$ and block:
+
+$$
+\sum_{i=1}^n i = \frac{n(n+1)}{2}
+$$
+
+More text."#;
+
+ let mut parser = MarkdownIt::new();
+ markdown_it::plugins::cmark::add(&mut parser);
+ add_mathjax_plugin(&mut parser);
+
+ let html = parser.parse(input).render();
+
+ assert!(html.contains(r"\(a=b\)"));
+ assert!(html.contains(r"\[\sum_{i=1}^n i = \frac{n(n+1)}{2}\]"));
+ }
+}
diff --git a/ankiview/src/inka/infrastructure/markdown/mod.rs b/ankiview/src/inka/infrastructure/markdown/mod.rs
new file mode 100644
index 0000000..dc922f8
--- /dev/null
+++ b/ankiview/src/inka/infrastructure/markdown/mod.rs
@@ -0,0 +1,6 @@
+// Markdown processing module
+pub mod card_parser;
+pub mod cloze_converter;
+pub mod converter;
+pub mod mathjax_plugin;
+pub mod section_parser;
diff --git a/ankiview/src/inka/infrastructure/markdown/section_parser.rs b/ankiview/src/inka/infrastructure/markdown/section_parser.rs
new file mode 100644
index 0000000..7688c8c
--- /dev/null
+++ b/ankiview/src/inka/infrastructure/markdown/section_parser.rs
@@ -0,0 +1,241 @@
+use lazy_static::lazy_static;
+use regex::Regex;
+
+pub struct SectionParser {
+ section_regex: Regex,
+}
+
+impl SectionParser {
+ pub fn new() -> Self {
+ // Regex pattern: ^---\n(.+?)^---$
+ // Multiline and dotall flags
+ let section_regex =
+ Regex::new(r"(?ms)^---\n(.+?)^---$").expect("Failed to compile section regex");
+
+ Self { section_regex }
+ }
+
+ pub fn parse<'a>(&self, input: &'a str) -> Vec<&'a str> {
+ self.section_regex
+ .captures_iter(input)
+ .filter_map(|cap| cap.get(1))
+ .map(|m| m.as_str())
+ .collect()
+ }
+}
+
+impl Default for SectionParser {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+lazy_static! {
+ static ref DECK_REGEX: Regex =
+ Regex::new(r"(?m)^Deck:[ \t]*(.+?)$").expect("Failed to compile deck regex");
+ static ref TAGS_REGEX: Regex =
+ Regex::new(r"(?m)^Tags:[ \t]*(.+?)$").expect("Failed to compile tags regex");
+ static ref NOTE_START_REGEX: Regex =
+ Regex::new(r"(?m)^(?:\n)?^\d+\.").expect("Failed to compile note start regex");
+}
+
+pub fn extract_deck_name(section: &str) -> Option {
+ DECK_REGEX
+ .captures(section)
+ .and_then(|cap| cap.get(1))
+ .map(|m| m.as_str().trim().to_string())
+}
+
+pub fn extract_tags(section: &str) -> Vec {
+ TAGS_REGEX
+ .captures(section)
+ .and_then(|cap| cap.get(1))
+ .map(|m| {
+ m.as_str()
+ .split_whitespace()
+ .map(|s| s.to_string())
+ .collect()
+ })
+ .unwrap_or_default()
+}
+
+pub fn extract_note_strings(section: &str) -> Vec {
+ // Find all positions where notes start (either "1. " or "\n1. ")
+ let mut note_positions: Vec = Vec::new();
+
+ // Find all lines starting with digits followed by a dot
+ for line in section.lines() {
+ if let Some(trimmed) = line.trim_start().strip_prefix(|c: char| c.is_ascii_digit()) {
+ if trimmed.starts_with('.') {
+ // Found a note start, get its position in the original string
+ if let Some(pos) = section.find(line) {
+ // Check if there's an ID comment before this line
+ let before = §ion[..pos];
+ if let Some(last_line) = before.lines().last() {
+ if last_line.trim().starts_with("\n1. Q1\n> A1\n\n2. Q2\n> A2";
+ let notes = extract_note_strings(section);
+
+ assert_eq!(notes.len(), 2);
+ assert!(notes[0].contains(""));
+ assert!(notes[1].contains(""));
+ }
+
+ #[test]
+ fn given_section_with_cloze_and_basic_when_extracting_then_finds_both() {
+ let section = "1. Basic Q\n> Basic A\n2. Cloze {{c1::text}}";
+ let notes = extract_note_strings(section);
+
+ assert_eq!(notes.len(), 2);
+ }
+}
diff --git a/ankiview/src/inka/infrastructure/media_handler.rs b/ankiview/src/inka/infrastructure/media_handler.rs
new file mode 100644
index 0000000..570af18
--- /dev/null
+++ b/ankiview/src/inka/infrastructure/media_handler.rs
@@ -0,0 +1,420 @@
+use lazy_static::lazy_static;
+use regex::Regex;
+
+lazy_static! {
+ // Match markdown images: 
+ static ref MD_IMAGE_REGEX: Regex = Regex::new(r"!\[.*?\]\(([^)]+)\)")
+ .expect("Failed to compile markdown image regex");
+
+ // Match HTML img tags:
+ static ref HTML_IMAGE_REGEX: Regex = Regex::new(r#"
]+src="([^"]+)""#)
+ .expect("Failed to compile HTML image regex");
+}
+
+/// Extract image paths from markdown content
+/// Supports both markdown syntax  and HTML
+pub fn extract_image_paths(markdown: &str) -> Vec {
+ let mut paths = Vec::new();
+
+ // Extract markdown format images
+ for cap in MD_IMAGE_REGEX.captures_iter(markdown) {
+ if let Some(path_match) = cap.get(1) {
+ let path = path_match.as_str();
+ // Skip HTTP(S) URLs
+ if !path.starts_with("http://") && !path.starts_with("https://") {
+ paths.push(path.to_string());
+ }
+ }
+ }
+
+ // Extract HTML format images
+ for cap in HTML_IMAGE_REGEX.captures_iter(markdown) {
+ if let Some(path_match) = cap.get(1) {
+ let path = path_match.as_str();
+ // Skip HTTP(S) URLs
+ if !path.starts_with("http://") && !path.starts_with("https://") {
+ paths.push(path.to_string());
+ }
+ }
+ }
+
+ paths
+}
+
+/// Copy a media file to Anki's collection.media directory
+/// Returns the filename (not full path) that Anki will use
+pub fn copy_media_to_anki(
+ source_path: &std::path::Path,
+ media_dir: &std::path::Path,
+ force: bool,
+) -> anyhow::Result {
+ use anyhow::Context;
+
+ // Extract filename from source path
+ let filename = source_path
+ .file_name()
+ .and_then(|n| n.to_str())
+ .ok_or_else(|| anyhow::anyhow!("Invalid filename"))?;
+
+ let dest_path = media_dir.join(filename);
+
+ // Check if file exists in media directory
+ if dest_path.exists() {
+ // Use filecmp equivalent - compare file contents
+ let files_identical = files_are_identical(source_path, &dest_path)
+ .context("Failed to compare file contents")?;
+
+ if files_identical {
+ // Same file already exists - optimization, skip copy
+ return Ok(filename.to_string());
+ }
+
+ // Files have different content
+ if !force {
+ // Error on conflict without --force
+ return Err(anyhow::anyhow!(
+ "Different file with the same name \"{}\" already exists in Anki Media folder. \
+ Use --force to overwrite.",
+ filename
+ ));
+ }
+
+ // force=true: overwrite existing file
+ }
+
+ // Copy file (either new or force overwrite)
+ std::fs::copy(source_path, &dest_path).context("Failed to copy media file")?;
+
+ Ok(filename.to_string())
+}
+
+/// Compare two files for identical content
+fn files_are_identical(path1: &std::path::Path, path2: &std::path::Path) -> anyhow::Result {
+ use std::io::Read;
+
+ let mut file1 = std::fs::File::open(path1)?;
+ let mut file2 = std::fs::File::open(path2)?;
+
+ // Quick size check first
+ let meta1 = file1.metadata()?;
+ let meta2 = file2.metadata()?;
+
+ if meta1.len() != meta2.len() {
+ return Ok(false);
+ }
+
+ // Compare contents byte by byte
+ let mut buf1 = Vec::new();
+ let mut buf2 = Vec::new();
+
+ file1.read_to_end(&mut buf1)?;
+ file2.read_to_end(&mut buf2)?;
+
+ Ok(buf1 == buf2)
+}
+
+/// Update image paths in HTML to use Anki media filenames
+/// Takes a mapping of original paths to Anki filenames
+pub fn update_media_paths_in_html(
+ html: &str,
+ path_mapping: &std::collections::HashMap,
+) -> String {
+ let mut result = html.to_string();
+
+ // Replace each original path with its Anki filename
+ for (original_path, anki_filename) in path_mapping {
+ result = result.replace(original_path, anki_filename);
+ }
+
+ result
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn given_markdown_image_when_extracting_then_returns_path() {
+ let markdown = "Some text  more text";
+ let paths = extract_image_paths(markdown);
+
+ assert_eq!(paths, vec!["images/photo.png"]);
+ }
+
+ #[test]
+ fn given_multiple_images_when_extracting_then_returns_all_paths() {
+ let markdown = r#"
+
+Some text
+
+More text
+
+"#;
+ let paths = extract_image_paths(markdown);
+
+ assert_eq!(paths.len(), 3);
+ assert!(paths.contains(&"image1.png".to_string()));
+ assert!(paths.contains(&"path/to/image2.jpg".to_string()));
+ assert!(paths.contains(&"../relative/image3.gif".to_string()));
+ }
+
+ #[test]
+ fn given_html_img_tag_when_extracting_then_returns_path() {
+ let markdown = r#"Some text
more text"#;
+ let paths = extract_image_paths(markdown);
+
+ assert_eq!(paths, vec!["diagrams/flow.png"]);
+ }
+
+ #[test]
+ fn given_mixed_formats_when_extracting_then_returns_all() {
+ let markdown = r#"
+Markdown: 
+HTML:
+Another: 
+"#;
+ let paths = extract_image_paths(markdown);
+
+ assert_eq!(paths.len(), 3);
+ assert!(paths.contains(&"logo.png".to_string()));
+ assert!(paths.contains(&"banner.jpg".to_string()));
+ assert!(paths.contains(&"icons/star.svg".to_string()));
+ }
+
+ #[test]
+ fn given_no_images_when_extracting_then_returns_empty() {
+ let markdown = "Just text with no images at all";
+ let paths = extract_image_paths(markdown);
+
+ assert!(paths.is_empty());
+ }
+
+ #[test]
+ fn given_absolute_urls_when_extracting_then_excludes_them() {
+ let markdown = r#"
+Local: 
+HTTP: 
+HTTPS: 
+"#;
+ let paths = extract_image_paths(markdown);
+
+ // Should only return local path, not HTTP(S) URLs
+ assert_eq!(paths, vec!["image.png"]);
+ }
+
+ #[test]
+ fn given_source_file_when_copying_then_file_appears_in_media_dir() {
+ use std::fs;
+ use tempfile::TempDir;
+
+ let temp_dir = TempDir::new().unwrap();
+ let source_file = temp_dir.path().join("test_image.png");
+ fs::write(&source_file, b"fake image data").unwrap();
+
+ let media_dir = temp_dir.path().join("collection.media");
+ fs::create_dir(&media_dir).unwrap();
+
+ let filename = copy_media_to_anki(&source_file, &media_dir, false).unwrap();
+
+ // Should return just the filename
+ assert_eq!(filename, "test_image.png");
+
+ // File should exist in media directory
+ let dest_path = media_dir.join(&filename);
+ assert!(dest_path.exists());
+
+ // Content should match
+ let content = fs::read(&dest_path).unwrap();
+ assert_eq!(content, b"fake image data");
+ }
+
+ #[test]
+ fn given_identical_file_when_copying_without_force_then_skips() {
+ use std::fs;
+ use tempfile::TempDir;
+
+ let temp_dir = TempDir::new().unwrap();
+ let source_file = temp_dir.path().join("image.png");
+ fs::write(&source_file, b"same content").unwrap();
+
+ let media_dir = temp_dir.path().join("collection.media");
+ fs::create_dir(&media_dir).unwrap();
+
+ // Pre-create identical file in media dir
+ let existing_file = media_dir.join("image.png");
+ fs::write(&existing_file, b"same content").unwrap();
+
+ // Copy should succeed and return filename
+ let filename = copy_media_to_anki(&source_file, &media_dir, false).unwrap();
+ assert_eq!(filename, "image.png");
+
+ // Should not overwrite (content stays same but we verify no error)
+ let content = fs::read(&existing_file).unwrap();
+ assert_eq!(content, b"same content");
+ }
+
+ #[test]
+ fn given_different_file_when_copying_without_force_then_errors() {
+ use std::fs;
+ use tempfile::TempDir;
+
+ let temp_dir = TempDir::new().unwrap();
+ let source_file = temp_dir.path().join("image.png");
+ fs::write(&source_file, b"new content").unwrap();
+
+ let media_dir = temp_dir.path().join("collection.media");
+ fs::create_dir(&media_dir).unwrap();
+
+ // Pre-create different file in media dir
+ let existing_file = media_dir.join("image.png");
+ fs::write(&existing_file, b"old content").unwrap();
+
+ // Copy should fail with error about conflict
+ let result = copy_media_to_anki(&source_file, &media_dir, false);
+ assert!(result.is_err());
+ let error_msg = result.unwrap_err().to_string();
+ assert!(error_msg.contains("already exists"));
+ assert!(error_msg.contains("--force"));
+ }
+
+ #[test]
+ fn given_different_file_when_copying_with_force_then_overwrites() {
+ use std::fs;
+ use tempfile::TempDir;
+
+ let temp_dir = TempDir::new().unwrap();
+ let source_file = temp_dir.path().join("image.png");
+ fs::write(&source_file, b"new content").unwrap();
+
+ let media_dir = temp_dir.path().join("collection.media");
+ fs::create_dir(&media_dir).unwrap();
+
+ // Pre-create different file in media dir
+ let existing_file = media_dir.join("image.png");
+ fs::write(&existing_file, b"old content").unwrap();
+
+ // Copy with force should succeed
+ let filename = copy_media_to_anki(&source_file, &media_dir, true).unwrap();
+ assert_eq!(filename, "image.png");
+
+ // Should overwrite with new content
+ let content = fs::read(&existing_file).unwrap();
+ assert_eq!(content, b"new content");
+ }
+
+ #[test]
+ fn given_nonexistent_source_when_copying_then_returns_error() {
+ use std::fs;
+ use tempfile::TempDir;
+
+ let temp_dir = TempDir::new().unwrap();
+ let nonexistent = temp_dir.path().join("doesnt_exist.png");
+
+ let media_dir = temp_dir.path().join("collection.media");
+ fs::create_dir(&media_dir).unwrap();
+
+ let result = copy_media_to_anki(&nonexistent, &media_dir, false);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn given_file_with_path_when_copying_then_returns_basename() {
+ use std::fs;
+ use tempfile::TempDir;
+
+ let temp_dir = TempDir::new().unwrap();
+ let subdir = temp_dir.path().join("images");
+ fs::create_dir(&subdir).unwrap();
+
+ let source_file = subdir.join("photo.jpg");
+ fs::write(&source_file, b"photo data").unwrap();
+
+ let media_dir = temp_dir.path().join("collection.media");
+ fs::create_dir(&media_dir).unwrap();
+
+ let filename = copy_media_to_anki(&source_file, &media_dir, false).unwrap();
+
+ // Should return just filename, not path
+ assert_eq!(filename, "photo.jpg");
+
+ // File should be in media dir root (not in subdirectory)
+ assert!(media_dir.join("photo.jpg").exists());
+ }
+
+ #[test]
+ fn given_html_with_image_src_when_updating_then_replaces_path() {
+ use std::collections::HashMap;
+
+ let html = r#"Some text
more text
"#;
+ let mut mapping = HashMap::new();
+ mapping.insert("images/photo.png".to_string(), "photo.png".to_string());
+
+ let updated = update_media_paths_in_html(html, &mapping);
+
+ assert!(updated.contains(r#"
+ Text
+
+ "#;
+
+ let mut mapping = HashMap::new();
+ mapping.insert("path/to/image1.jpg".to_string(), "image1.jpg".to_string());
+ mapping.insert("another/image2.png".to_string(), "image2.png".to_string());
+
+ let updated = update_media_paths_in_html(html, &mapping);
+
+ assert!(updated.contains(r#"src="image1.jpg""#));
+ assert!(updated.contains(r#"src="image2.png""#));
+ assert!(!updated.contains("path/to/"));
+ assert!(!updated.contains("another/"));
+ }
+
+ #[test]
+ fn given_html_with_no_matching_paths_when_updating_then_unchanged() {
+ use std::collections::HashMap;
+
+ let html = r#"Text without images
"#;
+ let mapping = HashMap::new();
+
+ let updated = update_media_paths_in_html(html, &mapping);
+
+ assert_eq!(updated, html);
+ }
+
+ #[test]
+ fn given_html_with_unmapped_image_when_updating_then_leaves_unchanged() {
+ use std::collections::HashMap;
+
+ let html = r#"
and
"#;
+ let mut mapping = HashMap::new();
+ mapping.insert("mapped.jpg".to_string(), "new_mapped.jpg".to_string());
+
+ let updated = update_media_paths_in_html(html, &mapping);
+
+ // Should update only mapped path
+ assert!(updated.contains(r#"src="new_mapped.jpg""#));
+ // Should leave unmapped path as-is
+ assert!(updated.contains(r#"src="unmapped.png""#));
+ }
+
+ #[test]
+ fn given_markdown_img_syntax_when_updating_then_replaces_path() {
+ use std::collections::HashMap;
+
+ let html = r#"
"#;
+ let mut mapping = HashMap::new();
+ mapping.insert("images/diagram.png".to_string(), "diagram.png".to_string());
+
+ let updated = update_media_paths_in_html(html, &mapping);
+
+ assert!(updated.contains(r#"src="diagram.png""#));
+ }
+}
diff --git a/ankiview/src/inka/infrastructure/mod.rs b/ankiview/src/inka/infrastructure/mod.rs
new file mode 100644
index 0000000..2cdc1d8
--- /dev/null
+++ b/ankiview/src/inka/infrastructure/mod.rs
@@ -0,0 +1,5 @@
+pub mod config;
+pub mod file_writer;
+pub mod hasher;
+pub mod markdown;
+pub mod media_handler;
diff --git a/ankiview/src/inka/mod.rs b/ankiview/src/inka/mod.rs
new file mode 100644
index 0000000..6cc80f2
--- /dev/null
+++ b/ankiview/src/inka/mod.rs
@@ -0,0 +1,4 @@
+pub mod application;
+pub mod cli;
+pub mod domain;
+pub mod infrastructure;
diff --git a/ankiview/src/lib.rs b/ankiview/src/lib.rs
index 498ddba..ef228c7 100644
--- a/ankiview/src/lib.rs
+++ b/ankiview/src/lib.rs
@@ -3,6 +3,7 @@ pub mod application;
pub mod cli;
pub mod domain;
pub mod infrastructure;
+pub mod inka;
pub mod ports;
pub mod util;
@@ -33,6 +34,22 @@ pub fn run(args: Args) -> Result<()> {
Command::View { note_id, json } => handle_view_command(note_id, json, collection_path),
Command::Delete { note_id } => handle_delete_command(note_id, collection_path),
Command::List { search } => handle_list_command(search.as_deref(), collection_path),
+ Command::Collect {
+ path,
+ recursive,
+ force,
+ ignore_errors,
+ full_sync,
+ update_ids,
+ } => handle_collect_command(
+ path,
+ recursive,
+ force,
+ ignore_errors,
+ full_sync,
+ update_ids,
+ collection_path,
+ ),
}
}
@@ -113,6 +130,80 @@ fn handle_list_command(search_query: Option<&str>, collection_path: PathBuf) ->
Ok(())
}
+fn handle_collect_command(
+ path: PathBuf,
+ recursive: bool,
+ force: bool,
+ ignore_errors: bool,
+ full_sync: bool,
+ update_ids: bool,
+ collection_path: PathBuf,
+) -> Result<()> {
+ use crate::inka::application::card_collector::CardCollector;
+
+ info!(
+ ?path,
+ recursive, force, ignore_errors, full_sync, update_ids, "Collecting markdown cards"
+ );
+
+ // Initialize collector with force, full_sync, update_ids, and ignore_errors flags
+ let mut collector = CardCollector::new(
+ &collection_path,
+ force,
+ full_sync,
+ update_ids,
+ ignore_errors,
+ )?;
+
+ // Process based on path type
+ let total_cards = if path.is_file() {
+ // Single file
+ collector.process_file(&path)?
+ } else if path.is_dir() {
+ if recursive {
+ // Recursive directory processing
+ collector.process_directory(&path)?
+ } else {
+ // Non-recursive - only process .md files in the directory
+ let mut count = 0;
+ for entry in std::fs::read_dir(&path)? {
+ let entry = entry?;
+ let entry_path = entry.path();
+ if entry_path.is_file()
+ && entry_path.extension().and_then(|s| s.to_str()) == Some("md")
+ {
+ count += collector.process_file(&entry_path)?;
+ }
+ }
+ count
+ }
+ } else {
+ return Err(anyhow::anyhow!("Path does not exist: {:?}", path));
+ };
+
+ // Print summary
+ println!(
+ "Successfully processed {} card{}",
+ total_cards,
+ if total_cards == 1 { "" } else { "s" }
+ );
+
+ // Print error summary if there were any errors
+ let errors = collector.errors();
+ if !errors.is_empty() {
+ eprintln!(
+ "\n{} error{} occurred:",
+ errors.len(),
+ if errors.len() == 1 { "" } else { "s" }
+ );
+ for error in errors {
+ eprintln!(" {}", error);
+ }
+ }
+
+ Ok(())
+}
+
pub fn find_collection_path(profile: Option<&str>) -> Result {
let home = dirs::home_dir().context("Could not find home directory")?;
diff --git a/ankiview/tests/fixtures/README.md b/ankiview/tests/fixtures/README.md
index 832f3e0..85fc4e4 100644
--- a/ankiview/tests/fixtures/README.md
+++ b/ankiview/tests/fixtures/README.md
@@ -3,7 +3,7 @@
## Golden Test Dataset
**Source**: `/Users/Q187392/dev/s/private/ankiview/data/testuser/`
-**Fixture Location**: `test_collection/`
+**Fixture Location**: `test_collection/User 1/`
**IMPORTANT**: The golden dataset in the source location is READ-ONLY. Never modify it. All tests work with copies.
diff --git a/ankiview/tests/fixtures/build_test_collection.rs b/ankiview/tests/fixtures/build_test_collection.rs
index e654ed4..938a2b6 100644
--- a/ankiview/tests/fixtures/build_test_collection.rs
+++ b/ankiview/tests/fixtures/build_test_collection.rs
@@ -112,12 +112,12 @@ fn create_test_media(media_dir: &std::path::Path) -> anyhow::Result<()> {
];
let rust_logo_path = media_dir.join("rust-logo.png");
- std::fs::write(&rust_logo_path, &rust_logo_png)?;
+ std::fs::write(&rust_logo_path, rust_logo_png)?;
println!("Created test image: {:?}", rust_logo_path);
// Create another simple PNG (sample.jpg - actually a PNG despite the name)
let sample_path = media_dir.join("sample.jpg");
- std::fs::write(&sample_path, &rust_logo_png)?;
+ std::fs::write(&sample_path, rust_logo_png)?;
println!("Created test image: {:?}", sample_path);
Ok(())
diff --git a/ankiview/tests/fixtures/copy_golden_dataset.sh b/ankiview/tests/fixtures/copy_golden_dataset.sh
index efc4b56..2a5413e 100755
--- a/ankiview/tests/fixtures/copy_golden_dataset.sh
+++ b/ankiview/tests/fixtures/copy_golden_dataset.sh
@@ -5,7 +5,7 @@
set -euo pipefail
GOLDEN_SOURCE="/Users/Q187392/dev/s/private/ankiview/data/testuser"
-FIXTURE_TARGET="ankiview/tests/fixtures/test_collection"
+FIXTURE_TARGET="ankiview/tests/fixtures/test_collection/User 1"
echo "Copying golden dataset to test fixtures..."
diff --git a/ankiview/tests/fixtures/gh_activity.png b/ankiview/tests/fixtures/gh_activity.png
new file mode 100644
index 0000000..f24ea2b
Binary files /dev/null and b/ankiview/tests/fixtures/gh_activity.png differ
diff --git a/ankiview/tests/fixtures/munggoggo.png b/ankiview/tests/fixtures/munggoggo.png
new file mode 100644
index 0000000..2a71ac8
Binary files /dev/null and b/ankiview/tests/fixtures/munggoggo.png differ
diff --git a/ankiview/tests/fixtures/test_collection/collection.anki2 b/ankiview/tests/fixtures/test_collection/User 1/collection.anki2
similarity index 100%
rename from ankiview/tests/fixtures/test_collection/collection.anki2
rename to ankiview/tests/fixtures/test_collection/User 1/collection.anki2
diff --git a/ankiview/tests/fixtures/test_collection/collection.anki2-shm b/ankiview/tests/fixtures/test_collection/User 1/collection.anki2-shm
similarity index 100%
rename from ankiview/tests/fixtures/test_collection/collection.anki2-shm
rename to ankiview/tests/fixtures/test_collection/User 1/collection.anki2-shm
diff --git a/ankiview/tests/fixtures/test_collection/collection.media.db2 b/ankiview/tests/fixtures/test_collection/User 1/collection.media.db2
similarity index 100%
rename from ankiview/tests/fixtures/test_collection/collection.media.db2
rename to ankiview/tests/fixtures/test_collection/User 1/collection.media.db2
diff --git a/ankiview/tests/fixtures/test_collection/collection.media/dag.png b/ankiview/tests/fixtures/test_collection/User 1/collection.media/dag.png
similarity index 100%
rename from ankiview/tests/fixtures/test_collection/collection.media/dag.png
rename to ankiview/tests/fixtures/test_collection/User 1/collection.media/dag.png
diff --git a/ankiview/tests/fixtures/test_collection/collection.media/mercator.png b/ankiview/tests/fixtures/test_collection/User 1/collection.media/mercator.png
similarity index 100%
rename from ankiview/tests/fixtures/test_collection/collection.media/mercator.png
rename to ankiview/tests/fixtures/test_collection/User 1/collection.media/mercator.png
diff --git a/ankiview/tests/fixtures/test_collection/collection.media/star-schema.png b/ankiview/tests/fixtures/test_collection/User 1/collection.media/star-schema.png
similarity index 100%
rename from ankiview/tests/fixtures/test_collection/collection.media/star-schema.png
rename to ankiview/tests/fixtures/test_collection/User 1/collection.media/star-schema.png
diff --git a/ankiview/tests/fixtures/test_collection/collection.media/wsg-enu2.png b/ankiview/tests/fixtures/test_collection/User 1/collection.media/wsg-enu2.png
similarity index 100%
rename from ankiview/tests/fixtures/test_collection/collection.media/wsg-enu2.png
rename to ankiview/tests/fixtures/test_collection/User 1/collection.media/wsg-enu2.png
diff --git a/ankiview/tests/fixtures/test_collection/prefs21.db b/ankiview/tests/fixtures/test_collection/prefs21.db
new file mode 100644
index 0000000..f933073
Binary files /dev/null and b/ankiview/tests/fixtures/test_collection/prefs21.db differ
diff --git a/ankiview/tests/helpers/mod.rs b/ankiview/tests/helpers/mod.rs
index 7d087dd..69cd334 100644
--- a/ankiview/tests/helpers/mod.rs
+++ b/ankiview/tests/helpers/mod.rs
@@ -43,10 +43,11 @@ impl TestCollection {
/// Get path to the fixture collection
fn fixture_collection_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
- .join("tests/fixtures/test_collection/collection.anki2")
+ .join("tests/fixtures/test_collection/User 1/collection.anki2")
}
/// Open repository for this test collection
+ #[allow(dead_code)]
pub fn open_repository(&self) -> Result {
AnkiRepository::new(&self.collection_path)
}
diff --git a/ankiview/tests/test_anki.rs b/ankiview/tests/test_anki.rs
index 606e89f..d922ca2 100644
--- a/ankiview/tests/test_anki.rs
+++ b/ankiview/tests/test_anki.rs
@@ -169,7 +169,7 @@ fn given_collection_when_listing_with_search_then_returns_filtered_notes() -> Re
let notes = repo.list_notes(Some("Tree"))?;
// Assert
- assert!(notes.len() > 0);
+ assert!(!notes.is_empty());
assert!(notes.iter().any(|n| n.front.contains("Tree")));
Ok(())
}
diff --git a/ankiview/tests/test_cli.rs b/ankiview/tests/test_cli.rs
index 90ecdc9..019d49f 100644
--- a/ankiview/tests/test_cli.rs
+++ b/ankiview/tests/test_cli.rs
@@ -23,7 +23,7 @@ fn given_explicit_view_command_when_parsing_then_succeeds() {
match parsed.command {
Command::View { note_id, json } => {
assert_eq!(note_id, 1234567890);
- assert_eq!(json, false);
+ assert!(!json);
}
_ => panic!("Expected View command"),
}
@@ -90,7 +90,7 @@ fn given_global_profile_flag_when_parsing_then_succeeds() {
match parsed.command {
Command::View { note_id, json } => {
assert_eq!(note_id, 1234567890);
- assert_eq!(json, false);
+ assert!(!json);
}
_ => panic!("Expected View command"),
}
@@ -149,7 +149,7 @@ fn given_json_flag_when_parsing_view_command_then_json_is_true() {
match parsed.command {
Command::View { note_id, json } => {
assert_eq!(note_id, 1234567890);
- assert_eq!(json, true);
+ assert!(json);
}
_ => panic!("Expected View command"),
}
@@ -167,7 +167,7 @@ fn given_no_json_flag_when_parsing_view_command_then_json_is_false() {
match parsed.command {
Command::View { note_id, json } => {
assert_eq!(note_id, 1234567890);
- assert_eq!(json, false);
+ assert!(!json);
}
_ => panic!("Expected View command"),
}
@@ -185,7 +185,7 @@ fn given_json_flag_with_global_flags_when_parsing_then_succeeds() {
match parsed.command {
Command::View { note_id, json } => {
assert_eq!(note_id, 1234567890);
- assert_eq!(json, true);
+ assert!(json);
}
_ => panic!("Expected View command"),
}
diff --git a/ankiview/tests/test_collect.rs b/ankiview/tests/test_collect.rs
new file mode 100644
index 0000000..734081f
--- /dev/null
+++ b/ankiview/tests/test_collect.rs
@@ -0,0 +1,240 @@
+mod helpers;
+
+use anyhow::Result;
+use helpers::TestCollection;
+use std::fs;
+use tempfile::TempDir;
+
+#[test]
+fn given_markdown_file_when_collecting_then_creates_notes_in_anki() -> Result<()> {
+ // Arrange
+ let test_collection = TestCollection::new()?;
+ let temp_dir = TempDir::new()?;
+
+ // Create a markdown file with basic cards only (simpler test)
+ let markdown_path = temp_dir.path().join("test.md");
+ let markdown_content = r#"---
+Deck: IntegrationTest
+
+1. What is the capital of France?
+> Paris
+
+2. What is 2 + 2?
+> 4
+---"#;
+ fs::write(&markdown_path, markdown_content)?;
+
+ // Act
+ let mut collector = ankiview::inka::application::card_collector::CardCollector::new(
+ &test_collection.collection_path,
+ false,
+ false,
+ false,
+ false,
+ )?;
+ let count = collector.process_file(&markdown_path)?;
+
+ // Assert
+ assert_eq!(count, 2, "Should process 2 cards");
+
+ // Verify IDs were injected
+ let updated_content = fs::read_to_string(&markdown_path)?;
+ assert!(
+ updated_content.contains("")
+ .next()?
+ .trim()
+ .parse::()
+ .ok()
+ })
+ .collect();
+
+ assert_eq!(ids.len(), 3, "Should extract 3 valid IDs");
+
+ // Verify IDs are non-zero and unique
+ for id in &ids {
+ assert!(*id > 0, "ID should be positive");
+ }
+
+ let unique_ids: std::collections::HashSet<_> = ids.iter().collect();
+ assert_eq!(unique_ids.len(), 3, "All IDs should be unique");
+
+ Ok(())
+}