From 0b6244cf08328b568da84381113ea6e35c2df4ef Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sat, 24 Jan 2026 16:10:15 +0300 Subject: [PATCH 1/2] Release 1.3.0 --- ARCHITECTURE.md | 36 +- CHANGELOG.md | 21 ++ codegraph/__init__.py | 2 +- codegraph/core.py | 358 +------------------- codegraph/main.py | 16 +- codegraph/parser.py | 3 +- codegraph/parsers/__init__.py | 4 + codegraph/parsers/base.py | 57 ++++ codegraph/parsers/python_parser.py | 398 +++++++++++++++++++++++ codegraph/parsers/registry.py | 22 ++ codegraph/parsers/rust_parser.py | 22 ++ codegraph/utils.py | 30 +- pyproject.toml | 3 +- tests/test_graph_generation.py | 8 +- tests/test_known_limitations.py | 74 +++++ tests/test_legacy_parser_objects.py | 32 ++ tests/test_parsers_and_utils.py | 59 ++++ tests/test_python_ast_parser_versions.py | 69 ++++ tests/test_python_parser_ast.py | 113 +++++++ tests/test_utils.py | 14 +- tests/test_visualizer_helpers.py | 75 +++++ 21 files changed, 1042 insertions(+), 374 deletions(-) create mode 100644 codegraph/parsers/__init__.py create mode 100644 codegraph/parsers/base.py create mode 100644 codegraph/parsers/python_parser.py create mode 100644 codegraph/parsers/registry.py create mode 100644 codegraph/parsers/rust_parser.py create mode 100644 tests/test_known_limitations.py create mode 100644 tests/test_legacy_parser_objects.py create mode 100644 tests/test_parsers_and_utils.py create mode 100644 tests/test_python_ast_parser_versions.py create mode 100644 tests/test_python_parser_ast.py create mode 100644 tests/test_visualizer_helpers.py diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index 504919d..aa2c3b1 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -14,7 +14,12 @@ codegraph/ │ ├── __init__.py # Package init, version definition │ ├── main.py # CLI entry point (click-based) │ ├── core.py # Core graph building logic -│ ├── parser.py # Python source code parser +│ ├── parser.py # Python token parser (legacy, used by PythonParser) +│ ├── parsers/ # Pluggable language parsers +│ │ ├── base.py # Parser interface +│ │ ├── python_parser.py # Python parser implementation +│ │ ├── rust_parser.py # Rust parser stub +│ │ ├── registry.py # Parser registry / discovery │ ├── utils.py # Utility functions │ └── vizualyzer.py # Visualization (D3.js + matplotlib) ├── tests/ # Test suite @@ -30,9 +35,25 @@ codegraph/ ## Core Components -### 1. Parser (`codegraph/parser.py`) +### 1. Parser Layer (`codegraph/parsers/`) -The parser uses Python's `tokenize` module to extract code structure from source files. +Parser implementations are pluggable via a registry. Each parser exposes: +- `get_source_files()` for language-specific file discovery +- `parse_files()` to produce module objects +- `usage_graph()` to build dependencies +- `get_entity_metadata()` for entity stats + +This allows adding new languages without changing core graph orchestration. + +#### Python Parser (`codegraph/parsers/python_parser.py`) + +Uses Python's `ast` (and `typed_ast` for Python 2.x) to extract classes, functions, +imports, and line ranges. + +#### Rust Parser (`codegraph/parsers/rust_parser.py`) + +Currently a stub to establish extension points. The intent is to parse `.rs` files, +extract functions/structs/impl blocks, and build dependency edges using a Rust-aware parser. **Key Classes:** - `_Object` - Base class for all parsed objects (lineno, endno, name, parent) @@ -52,16 +73,15 @@ The parser uses Python's `tokenize` module to extract code structure from source ### 2. Core (`codegraph/core.py`) -The core module builds the dependency graph from parsed data. +The core module orchestrates parsing and visualization by delegating language-specific +work to the selected parser. **Key Classes:** - `CodeGraph` - Main class that orchestrates graph building **Key Functions:** -- `get_code_objects(paths_list)` - Parse all files and return dict of module → objects -- `get_imports_and_entities_lines()` - Extract imports and entity line ranges -- `collect_entities_usage_in_modules()` - Find where entities are used -- `search_entity_usage()` - Check if entity is used in a line +- `usage_graph()` - Delegates to the active parser +- `get_entity_metadata()` - Delegates to the active parser **Data Flow:** ``` diff --git a/CHANGELOG.md b/CHANGELOG.md index e6bcd74..f9f7db0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,27 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.3.0] - 2026-01-24 + +### Added + +- Pluggable parser architecture with registry and base parser interface +- Rust parser stub to establish language extension points +- AST-based Python parser with cross-version parsing support (Python 2.x via typed-ast) +- CLI options for language selection and target Python version +- Expanded test coverage for parsers, registry/utils, visualizer helpers, and legacy parser objects + +### Changed + +- Core now delegates parsing and dependency analysis to language parsers +- Python dependency detection now uses AST rather than token scanning +- Utilities now support multi-extension file discovery + +### Fixed + +- Legacy parser now handles comma-separated imports in a single statement +- Removed known false positives from string literals and alias leakage (AST parser) + ## [1.2.0] - 2026-01-18 ### Added diff --git a/codegraph/__init__.py b/codegraph/__init__.py index c68196d..67bc602 100644 --- a/codegraph/__init__.py +++ b/codegraph/__init__.py @@ -1 +1 @@ -__version__ = "1.2.0" +__version__ = "1.3.0" diff --git a/codegraph/core.py b/codegraph/core.py index 64ccd33..36fc7f5 100644 --- a/codegraph/core.py +++ b/codegraph/core.py @@ -1,47 +1,15 @@ -import logging -import os from argparse import Namespace -from collections import defaultdict, deque -from typing import Dict, List, Set, Text, Tuple +from typing import Dict, Set -from codegraph.parser import Import, create_objects_array -from codegraph.utils import get_python_paths_list - -logger = logging.getLogger(__name__) - -aliases = {} - - -def read_file_content(path: Text) -> Text: - with open(path, "r+") as file_read: - return file_read.read() - - -def parse_code_file(path: Text) -> List: - """read module source and parse to get objects array""" - source = read_file_content(path) - parsed_module = create_objects_array(source=source, fname=os.path.basename(path)) - return parsed_module - - -def get_code_objects(paths_list: List) -> Dict: - """ - get all code files data for paths list - :param paths_list: list with paths to code files to parse - :return: - """ - all_data = {} - for path in paths_list: - content = parse_code_file(path) - all_data[path] = content - return all_data +from codegraph.parsers import get_parser class CodeGraph: def __init__(self, args: Namespace): - self.paths_list = get_python_paths_list(args.paths) - # get py modules list data - self.modules_data = get_code_objects(self.paths_list) + language = getattr(args, "language", "python") + self.parser = get_parser(language, args=args) + self.paths_list = self.parser.get_source_files(args.paths) + self.modules_data = self.parser.parse_files(self.paths_list) def get_lines_numbers(self): """ @@ -54,72 +22,18 @@ def get_lines_numbers(self): first number in tuple - start line, second - last line """ data = {} - for module in self.modules_data: + metadata = self.get_entity_metadata() + for module, entities in metadata.items(): data[module] = {} - for func in self.modules_data[module]: - data[module][func.name] = (func.lineno, func.endno) + for name, info in entities.items(): + data[module][name] = (info.get("lineno"), info.get("endno")) return data def get_entity_metadata(self) -> Dict: - """ - Return metadata for all entities including line counts and types. - :return: {module_path: {entity_name: {'lines': int, 'type': 'function'|'class'}}} - """ - from codegraph.parser import Class, Function, AsyncFunction, Import - - data = {} - for module_path in self.modules_data: - data[module_path] = {} - for entity in self.modules_data[module_path]: - if isinstance(entity, Import): - continue - lines = 0 - if entity.lineno and entity.endno: - lines = entity.endno - entity.lineno + 1 - - entity_type = "function" - if isinstance(entity, Class): - entity_type = "class" - elif isinstance(entity, (Function, AsyncFunction)): - entity_type = "function" - - data[module_path][entity.name] = { - "lines": lines, - "entity_type": entity_type, - "lineno": entity.lineno, - "endno": entity.endno - } - return data + return self.parser.get_entity_metadata(self.modules_data) def usage_graph(self) -> Dict: - """ - module name: function - :return: - """ - entities_lines, imports, modules_names_map = get_imports_and_entities_lines( - self.modules_data - ) - entities_usage_in_modules = collect_entities_usage_in_modules( - self.modules_data, imports, modules_names_map - ) - # create edges - dependencies = defaultdict(dict) - for module in entities_usage_in_modules: - dependencies[module] = defaultdict(list) - for method_that_used in entities_usage_in_modules[module]: - method_usage_lines = entities_usage_in_modules[module][method_that_used] - for method_usage_line in method_usage_lines: - for entity in entities_lines[module]: - if entity[0] <= method_usage_line <= entity[1]: - dependencies[module][entities_lines[module][entity]].append( - method_that_used - ) - break - else: - # mean in global of module - dependencies[module]["_"].append(method_that_used) - dependencies = populate_free_nodes(self.modules_data, dependencies, imports, modules_names_map) - return dependencies + return self.parser.usage_graph(self.modules_data) def get_dependencies(self, file_path: str, distance: int) -> Dict[str, Set[str]]: """ @@ -129,249 +43,5 @@ def get_dependencies(self, file_path: str, distance: int) -> Dict[str, Set[str]] :param distance: Number of edges to traverse :return: Dictionary with distances as keys and sets of dependent files as values """ - dependencies = {i: set() for i in range(1, distance + 1)} - graph = self.usage_graph() - - if file_path not in graph: - return dependencies - - queue = deque([(file_path, 0)]) - visited = set() - - while queue: - current_file, current_distance = queue.popleft() - - if current_distance >= distance: - continue - - if current_file not in visited: - visited.add(current_file) - - for entity, used_entities in graph[current_file].items(): - for used_entity in used_entities: - if "." in used_entity: - dependent_file = used_entity.split(".")[0] + ".py" - if dependent_file != current_file: - dependencies[current_distance + 1].add(dependent_file) - queue.append((dependent_file, current_distance + 1)) - - return dependencies - - -def get_module_name(code_path: Text) -> Text: - module_name = os.path.basename(code_path).replace(".py", "") - return module_name - - -def module_name_in_imports(imports: List, module_name: Text) -> bool: - for import_ in imports: - if module_name in import_: - return True - return False - - -def get_imports_and_entities_lines( # noqa: C901 - code_objects: Dict, -) -> Tuple[Dict, Dict, Dict]: - # todo: need to do optimization - """ - joined together to avoid iteration several time - imports - list of modules in code_objects Dict that used in current module - """ - entities_lines = defaultdict(dict) - imports = defaultdict(list) - modules_ = code_objects.keys() - names_map = {} - # Build a set of all module names for quick lookup - module_names_set = {os.path.basename(m).replace(".py", "") for m in modules_} - - for path in code_objects: - names_map[get_module_name(path)] = path - # for each module in list - if code_objects[path] and isinstance(code_objects[path][-1], Import): - # extract imports if exist - for import_ in code_objects[path].pop(-1).modules: - pathed_import = import_ - alias = None - if " as " in pathed_import: - pathed_import, alias = pathed_import.split(" as ") - - parts = pathed_import.split(".") - matched = False - - # Try each part from right to left to find a module match - # e.g., simple_ddl_parser.output.dialects.dialect_by_name - # -> try: dialect_by_name (no), dialects (yes!) - for i in range(len(parts) - 1, -1, -1): - candidate = parts[i] - - # Check if this part matches a module name - if candidate in module_names_set: - for module_ in modules_: - if candidate in module_: - if alias: - aliases[candidate] = alias - imports[path].append(candidate) - matched = True - break - if matched: - break - - # Check for __init__.py - if the candidate is a package name - # e.g., from simple_ddl_parser import X -> simple_ddl_parser/__init__.py - if not matched: - for module_ in modules_: - # Check if this is a package __init__.py - if f"/{candidate}/__init__.py" in module_ or module_.endswith(f"{candidate}/__init__.py"): - if alias: - aliases[candidate] = alias - imports[path].append("__init__") - matched = True - break - if matched: - break - - for entity in code_objects[path]: - # create a dict with lines of start and end for each entity in module - entities_lines[path][(entity.lineno, entity.endno)] = entity.name - return entities_lines, imports, names_map - - -def search_entities_from_list_in_code( - entities_list: List, module_name: Text, line: Text -) -> Text: - for entity in entities_list: - if search_entity_usage(module_name, entity.name, line): - yield entity - - -def search_entities_from_module_in_code( - _module: Text, _path: Text, code_objects: Dict, code: List, current: bool = False -) -> Dict: - found_entities = defaultdict(list) - for num, line in enumerate(code): - if ( - not line.startswith("#") - and not line.startswith('"') - and not line.startswith("'") - ): - entities_in_line = [ - x - for x in search_entities_from_list_in_code( - code_objects[_path], _module, line - ) - ] - for entity in entities_in_line: - prefix = f"{_module}." if not current else "" - found_entities[f"{prefix}{entity.name}"].append(num + 1) - return found_entities - - -def collect_entities_usage_in_modules( - code_objects: Dict, imports: Dict, modules_names_map: Dict -) -> Dict: - entities_usage_in_modules = defaultdict(dict) - for path in code_objects: - entities_usage_in_modules[path] = defaultdict(list) - logger.debug(f"Processing module: {path}") - logger.debug(f"Imports in module: {imports[path]}") - module_content = read_file_content(path) - # to reduce count of iteration, we not need lines with functions and classes defenitions - module_content = ( - module_content.replace("async ", "# async ") - .replace("def ", "# def ") - .replace("class ", "# class ") - ) - # split by line - code = module_content.split("\n") - for _module in imports[path]: - # search entities from other modules (skip if not in analyzed codebase) - if _module not in modules_names_map: - continue - _path = modules_names_map[_module] - entities_usage_in_modules[path].update( - search_entities_from_module_in_code(_module, _path, code_objects, code) - ) - # search entities from current module - entities_usage_in_modules[path].update( - search_entities_from_module_in_code( - get_module_name(path), path, code_objects, code, current=True - ) - ) - return entities_usage_in_modules - - -def populate_free_nodes(code_objects: Dict, dependencies: Dict, imports: Dict, modules_names_map: Dict) -> Dict: - from codegraph.parser import Class - - for path in code_objects: - # Create module-to-module connections based on imports - # This ensures we show connections even when specific entities aren't detected - # (e.g., when importing variables or when entity usage detection misses something) - if imports.get(path): - if "_" not in dependencies[path]: - dependencies[path]["_"] = [] - for imp in imports[path]: - import_dep = f"{imp}._" - if import_dep not in dependencies[path]["_"]: - dependencies[path]["_"].append(import_dep) - - for entity in code_objects[path]: - if entity.name not in dependencies[path]: - dependencies[path][entity.name] = [] - - # Add inheritance connections for classes - if isinstance(entity, Class) and entity.super: - for base_class in entity.super: - # Try to find the base class in imports or local module - base_found = False - - # Check if it's a dotted name (e.g., module.ClassName) - if "." in base_class: - # Already qualified, add as-is - dependencies[path][entity.name].append(base_class) - base_found = True - else: - # Search in imports for this module - for imp in imports.get(path, []): - # Import could be like "dialects.HQL" or "simple_ddl_parser.dialects.HQL" - if imp.endswith("." + base_class) or imp.endswith("." + base_class.split(" as ")[0]): - # Found the import, extract module name - parts = imp.split(".") - if len(parts) >= 2: - module_name = parts[-2] # e.g., "dialects" from "simple_ddl_parser.dialects.HQL" - dependencies[path][entity.name].append(f"{module_name}.{base_class}") - base_found = True - break - - # If not found in imports, check if it's a local class - if not base_found: - for local_entity in code_objects[path]: - if local_entity.name == base_class: - # It's a local class, add without module prefix - dependencies[path][entity.name].append(base_class) - base_found = True - break - - # If still not found, add as-is (might be external) - if not base_found: - dependencies[path][entity.name].append(base_class) - - return dependencies - - -def search_entity_usage(module_name: Text, name: Text, line: Text) -> bool: - """check exist method or entity usage in line or not""" - method_call = name + "(" - dot_access = name + "." - if ( - method_call in line - or " " + dot_access in line - or f"{module_name}." + method_call in line - or f"{module_name}." + dot_access in line - ): - return True - elif module_name in aliases: - if aliases[module_name] + "." + method_call in line: - return True - return False + usage_graph = self.usage_graph() + return self.parser.get_dependencies(usage_graph, file_path, distance) diff --git a/codegraph/main.py b/codegraph/main.py index 93f8a91..aef9c2b 100644 --- a/codegraph/main.py +++ b/codegraph/main.py @@ -8,6 +8,7 @@ import click from codegraph import __version__, core +from codegraph.parsers import available_languages logger = logging.getLogger(__name__) @@ -40,7 +41,18 @@ type=click.Path(), help="Export graph data to CSV file (specify output path)", ) -def cli(paths, object_only, file_path, distance, matplotlib, output, csv): +@click.option( + "--language", + type=click.Choice(available_languages(), case_sensitive=False), + default="python", + show_default=True, + help="Language parser to use", +) +@click.option( + "--python-version", + help="Target Python version for parsing (e.g. 2.7, 3.8, 3.10)", +) +def cli(paths, object_only, file_path, distance, matplotlib, output, csv, language, python_version): """ Tool that creates a graph of code to show dependencies between code entities (methods, classes, etc.). CodeGraph does not execute code, it is based only on lex and syntax parsing. @@ -62,6 +74,8 @@ def cli(paths, object_only, file_path, distance, matplotlib, output, csv): matplotlib=matplotlib, output=output, csv=csv, + language=language, + python_version=python_version, ) main(args) diff --git a/codegraph/parser.py b/codegraph/parser.py index d58850f..4cbed4e 100644 --- a/codegraph/parser.py +++ b/codegraph/parser.py @@ -121,7 +121,8 @@ def create_objects_array(fname, source): # noqa: C901 new_lines += 1 elif token == "import": - modules = [_line.replace("\n", "").split("import ")[1]] + modules_part = _line.replace("\n", "").split("import ", 1)[1] + modules = [part.strip() for part in modules_part.split(",") if part.strip()] if not imports: imports = Import(modules) else: diff --git a/codegraph/parsers/__init__.py b/codegraph/parsers/__init__.py new file mode 100644 index 0000000..938b2eb --- /dev/null +++ b/codegraph/parsers/__init__.py @@ -0,0 +1,4 @@ +from codegraph.parsers.base import BaseParser +from codegraph.parsers.registry import available_languages, get_parser + +__all__ = ["BaseParser", "available_languages", "get_parser"] diff --git a/codegraph/parsers/base.py b/codegraph/parsers/base.py new file mode 100644 index 0000000..e8ea6e8 --- /dev/null +++ b/codegraph/parsers/base.py @@ -0,0 +1,57 @@ +from abc import ABC, abstractmethod +from collections import deque +from typing import Dict, List, Set + + +class BaseParser(ABC): + language: str + + @abstractmethod + def get_source_files(self, paths) -> List[str]: + raise NotImplementedError + + @abstractmethod + def parse_files(self, paths_list: List[str]) -> Dict: + raise NotImplementedError + + @abstractmethod + def usage_graph(self, modules_data: Dict) -> Dict: + raise NotImplementedError + + @abstractmethod + def get_entity_metadata(self, modules_data: Dict) -> Dict: + raise NotImplementedError + + def get_dependencies( + self, usage_graph: Dict, file_path: str, distance: int + ) -> Dict[int, Set[str]]: + """ + Default implementation that expects dependencies as "module.entity". + Parsers can override this to handle language-specific dependency formats. + """ + dependencies = {i: set() for i in range(1, distance + 1)} + + if file_path not in usage_graph: + return dependencies + + queue = deque([(file_path, 0)]) + visited = set() + + while queue: + current_file, current_distance = queue.popleft() + + if current_distance >= distance: + continue + + if current_file not in visited: + visited.add(current_file) + + for _, used_entities in usage_graph[current_file].items(): + for used_entity in used_entities: + if "." in used_entity: + dependent_file = used_entity.split(".")[0] + ".py" + if dependent_file != current_file: + dependencies[current_distance + 1].add(dependent_file) + queue.append((dependent_file, current_distance + 1)) + + return dependencies diff --git a/codegraph/parsers/python_parser.py b/codegraph/parsers/python_parser.py new file mode 100644 index 0000000..d9a57e7 --- /dev/null +++ b/codegraph/parsers/python_parser.py @@ -0,0 +1,398 @@ +import ast +import os +from dataclasses import dataclass +from typing import Dict, List, Optional, Set, Text, Tuple + +from codegraph.parser import AsyncFunction, Class, Function +from codegraph.parsers.base import BaseParser +from codegraph.utils import get_python_paths_list + +try: + import typed_ast.ast27 as ast27 +except ImportError: # pragma: no cover - optional dependency + ast27 = None + + +@dataclass +class ImportInfo: + module_aliases: Dict[str, str] + entity_aliases: Dict[str, str] + module_imports: Set[str] + + +@dataclass +class ModuleData: + ast_tree: object + entities: List[object] + entity_nodes: Dict[str, object] + imports: ImportInfo + + +class PythonParser(BaseParser): + language = "python" + + def __init__(self, args=None, python_version: Optional[str] = None) -> None: + if python_version is None and args is not None: + python_version = getattr(args, "python_version", None) + self._python_version = python_version + self._major, self._minor = self._parse_python_version(self._python_version) + self._ast_mod = ast27 if self._major == 2 else ast + self._feature_version = self._minor if self._major == 3 else None + self._module_names_set: Set[str] = set() + + def get_source_files(self, paths) -> List[str]: + return get_python_paths_list(paths) + + def parse_files(self, paths_list: List[str]) -> Dict: + self._module_names_set = { + os.path.basename(path).replace(".py", "") for path in paths_list + } + all_data = {} + for path in paths_list: + source = self._read_file_content(path) + ast_tree = self._parse_source(source, path) + entities, entity_nodes = self._extract_entities(ast_tree, os.path.basename(path)) + imports = self._collect_imports(ast_tree) + all_data[path] = ModuleData( + ast_tree=ast_tree, + entities=entities, + entity_nodes=entity_nodes, + imports=imports, + ) + return all_data + + def usage_graph(self, modules_data: Dict) -> Dict: + dependencies: Dict[str, Dict[str, List[str]]] = {} + + for module_path, module_data in modules_data.items(): + local_entities = {entity.name for entity in module_data.entities} + module_aliases = module_data.imports.module_aliases + entity_aliases = module_data.imports.entity_aliases + + dependencies[module_path] = {} + module_level_deps = self._collect_dependencies_in_module( + module_data.ast_tree, + local_entities, + module_aliases, + entity_aliases, + ) + module_level_deps += [f"{mod}._" for mod in module_data.imports.module_imports] + dependencies[module_path]["_"] = self._deduplicate(module_level_deps) + + for entity in module_data.entities: + node = module_data.entity_nodes[entity.name] + entity_deps = self._collect_dependencies_in_entity( + node, + local_entities, + module_aliases, + entity_aliases, + ) + dependencies[module_path][entity.name] = self._deduplicate(entity_deps) + + return dependencies + + def get_entity_metadata(self, modules_data: Dict) -> Dict: + data = {} + for module_path, module_data in modules_data.items(): + data[module_path] = {} + for entity in module_data.entities: + lines = 0 + if entity.lineno and entity.endno: + lines = entity.endno - entity.lineno + 1 + + entity_type = "function" + if isinstance(entity, Class): + entity_type = "class" + elif isinstance(entity, (Function, AsyncFunction)): + entity_type = "function" + + data[module_path][entity.name] = { + "lines": lines, + "entity_type": entity_type, + "lineno": entity.lineno, + "endno": entity.endno, + } + return data + + def _read_file_content(self, path: Text) -> Text: + with open(path, "r+", encoding="utf-8") as file_read: + return file_read.read() + + def _parse_source(self, source: Text, filename: Text): + if self._major == 2: + if ast27 is None: + raise ImportError( + "typed_ast is required to parse Python 2 source code." + ) + return ast27.parse(source, filename=filename, mode="exec") + + return self._parse_with_feature_version(source, filename, self._feature_version) + + def _parse_with_feature_version(self, source: Text, filename: Text, feature_version: Optional[int]): + if feature_version is None: + return ast.parse(source, filename=filename, mode="exec") + try: + return ast.parse( + source, + filename=filename, + mode="exec", + feature_version=feature_version, + ) + except TypeError: + return ast.parse(source, filename=filename, mode="exec") + + def _parse_python_version(self, version: Optional[str]) -> Tuple[int, Optional[int]]: + if not version: + return 3, None + parts = version.split(".") + try: + major = int(parts[0]) + except ValueError: + return 3, None + minor = None + if len(parts) > 1: + try: + minor = int(parts[1]) + except ValueError: + minor = None + return major, minor + + def _extract_entities(self, ast_tree, filename: Text) -> (List[object], Dict[str, object]): + entities: List[object] = [] + entity_nodes: Dict[str, object] = {} + ast_mod = self._ast_mod + async_def = getattr(self._ast_mod, "AsyncFunctionDef", None) + + for node in getattr(ast_tree, "body", []): + if isinstance(node, self._ast_mod.FunctionDef): + func = Function(node.name, filename, node.lineno) + func.endno = self._get_end_lineno(node, self._ast_mod) + entities.append(func) + entity_nodes[node.name] = node + elif async_def and isinstance(node, async_def): + func = AsyncFunction(node.name, filename, node.lineno) + func.endno = self._get_end_lineno(node, self._ast_mod) + entities.append(func) + entity_nodes[node.name] = node + elif isinstance(node, self._ast_mod.ClassDef): + bases = [self._get_name_from_expr(base, ast_mod) for base in node.bases] + bases = [b for b in bases if b] + cls = Class(node.name, bases, filename, node.lineno) + cls.endno = self._get_end_lineno(node, self._ast_mod) + entities.append(cls) + entity_nodes[node.name] = node + + return entities, entity_nodes + + def _collect_imports(self, ast_tree) -> ImportInfo: + module_aliases: Dict[str, str] = {} + entity_aliases: Dict[str, str] = {} + module_imports: Set[str] = set() + ast_mod = self._ast_mod + + for node in getattr(ast_tree, "body", []): + if isinstance(node, ast_mod.Import): + for alias in node.names: + full_name = alias.name + alias_name = alias.asname or full_name.split(".")[0] + resolved = self._resolve_imported_module(full_name) + if resolved: + module_aliases[alias_name] = resolved + module_imports.add(resolved) + elif isinstance(node, ast_mod.ImportFrom): + base = node.module or "" + for alias in node.names: + name = alias.name + alias_name = alias.asname or name + full_name = f"{base}.{name}" if base else name + resolved = self._resolve_imported_module(full_name) + if resolved: + module_imports.add(resolved) + entity_aliases[alias_name] = full_name + + return ImportInfo( + module_aliases=module_aliases, + entity_aliases=entity_aliases, + module_imports=module_imports, + ) + + def _resolve_imported_module(self, full_name: str) -> Optional[str]: + parts = full_name.split(".") + for i in range(len(parts) - 1, -1, -1): + candidate = parts[i] + if candidate in self._module_names_set: + return candidate + return None + + def _collect_dependencies_in_module( + self, + ast_tree, + local_entities: Set[str], + module_aliases: Dict[str, str], + entity_aliases: Dict[str, str], + ) -> List[str]: + deps: List[str] = [] + ast_mod = self._ast_mod + collector = self._make_dependency_collector( + local_entities, module_aliases, entity_aliases, ast_mod + ) + for node in getattr(ast_tree, "body", []): + if isinstance(node, (ast_mod.FunctionDef, ast_mod.ClassDef)): + continue + async_def = getattr(self._ast_mod, "AsyncFunctionDef", None) + if async_def and isinstance(node, async_def): + continue + collector.visit(node) + deps.extend(collector.dependencies) + return deps + + def _collect_dependencies_in_entity( + self, + node, + local_entities: Set[str], + module_aliases: Dict[str, str], + entity_aliases: Dict[str, str], + ) -> List[str]: + deps: List[str] = [] + ast_mod = self._ast_mod + + if isinstance(node, ast_mod.ClassDef): + for base in node.bases: + if isinstance(base, ast_mod.Attribute): + dep = self._resolve_attribute( + base, local_entities, module_aliases, entity_aliases, ast_mod + ) + elif isinstance(base, ast_mod.Name): + dep = self._resolve_name( + base.id, local_entities, module_aliases, entity_aliases + ) + else: + dep = None + if dep: + deps.append(dep) + collector = self._make_dependency_collector( + local_entities, module_aliases, entity_aliases, ast_mod + ) + for child in node.body: + collector.visit(child) + deps.extend(collector.dependencies) + return deps + + collector = self._make_dependency_collector( + local_entities, module_aliases, entity_aliases, ast_mod + ) + collector.visit(node) + deps.extend(collector.dependencies) + return deps + + def _make_dependency_collector(self, local_entities, module_aliases, entity_aliases, ast_mod): + parser = self + + class DependencyCollector(ast_mod.NodeVisitor): + def __init__(self): + self.dependencies: List[str] = [] + + def visit_Call(self, call_node): + dep = parser._resolve_call_target( + call_node.func, local_entities, module_aliases, entity_aliases, ast_mod + ) + if dep: + self.dependencies.append(dep) + self.generic_visit(call_node) + + return DependencyCollector() + + def _resolve_call_target( + self, + func_node, + local_entities: Set[str], + module_aliases: Dict[str, str], + entity_aliases: Dict[str, str], + ast_mod, + ) -> Optional[str]: + if isinstance(func_node, ast_mod.Name): + return self._resolve_name( + func_node.id, local_entities, module_aliases, entity_aliases + ) + if isinstance(func_node, ast_mod.Attribute): + return self._resolve_attribute( + func_node, local_entities, module_aliases, entity_aliases, ast_mod + ) + return None + + def _resolve_attribute( + self, + node, + local_entities: Set[str], + module_aliases: Dict[str, str], + entity_aliases: Dict[str, str], + ast_mod, + ) -> Optional[str]: + parts = self._flatten_attribute(node, ast_mod) + if not parts: + return None + base = parts[0] + if base in module_aliases: + module_name = module_aliases[base] + suffix = ".".join(parts[1:]) + return f"{module_name}.{suffix}" if suffix else module_name + if base in entity_aliases: + base_name = entity_aliases[base] + suffix = ".".join(parts[1:]) + return f"{base_name}.{suffix}" if suffix else base_name + if base in local_entities: + return base + return None + + def _resolve_name( + self, + name: str, + local_entities: Set[str], + module_aliases: Dict[str, str], + entity_aliases: Dict[str, str], + ) -> Optional[str]: + if name in entity_aliases: + return entity_aliases[name] + if name in local_entities: + return name + if name in module_aliases: + return module_aliases[name] + return None + + def _flatten_attribute(self, node, ast_mod) -> List[str]: + parts: List[str] = [] + while isinstance(node, ast_mod.Attribute): + parts.insert(0, node.attr) + node = node.value + if isinstance(node, ast_mod.Name): + parts.insert(0, node.id) + return parts + return [] + + def _get_name_from_expr(self, node, ast_mod) -> Optional[str]: + if isinstance(node, ast_mod.Name): + return node.id + if isinstance(node, ast_mod.Attribute): + parts = self._flatten_attribute(node, ast_mod) + return ".".join(parts) if parts else None + return None + + def _get_end_lineno(self, node, ast_mod) -> int: + end_lineno = getattr(node, "end_lineno", None) + if end_lineno: + return end_lineno + max_lineno = getattr(node, "lineno", 0) or 0 + for child in ast_mod.walk(node): + lineno = getattr(child, "lineno", None) + if lineno and lineno > max_lineno: + max_lineno = lineno + return max_lineno + + def _deduplicate(self, items: List[str]) -> List[str]: + seen = set() + result = [] + for item in items: + if item not in seen: + seen.add(item) + result.append(item) + return result diff --git a/codegraph/parsers/registry.py b/codegraph/parsers/registry.py new file mode 100644 index 0000000..9b31b06 --- /dev/null +++ b/codegraph/parsers/registry.py @@ -0,0 +1,22 @@ +from typing import Dict, List, Type + +from codegraph.parsers.base import BaseParser +from codegraph.parsers.python_parser import PythonParser +from codegraph.parsers.rust_parser import RustParser + + +_PARSERS: Dict[str, Type[BaseParser]] = { + "python": PythonParser, + "rust": RustParser, +} + + +def available_languages() -> List[str]: + return sorted(_PARSERS.keys()) + + +def get_parser(language: str, args=None) -> BaseParser: + normalized = (language or "python").lower() + if normalized not in _PARSERS: + raise ValueError(f"Unsupported language: {language}") + return _PARSERS[normalized](args=args) diff --git a/codegraph/parsers/rust_parser.py b/codegraph/parsers/rust_parser.py new file mode 100644 index 0000000..1537436 --- /dev/null +++ b/codegraph/parsers/rust_parser.py @@ -0,0 +1,22 @@ +from typing import Dict, List + +from codegraph.parsers.base import BaseParser + + +class RustParser(BaseParser): + language = "rust" + + def __init__(self, args=None) -> None: + self._args = args + + def get_source_files(self, paths) -> List[str]: + raise NotImplementedError("Rust parser is not implemented yet.") + + def parse_files(self, paths_list: List[str]) -> Dict: + raise NotImplementedError("Rust parser is not implemented yet.") + + def usage_graph(self, modules_data: Dict) -> Dict: + raise NotImplementedError("Rust parser is not implemented yet.") + + def get_entity_metadata(self, modules_data: Dict) -> Dict: + raise NotImplementedError("Rust parser is not implemented yet.") diff --git a/codegraph/utils.py b/codegraph/utils.py index 79ba9de..3b20538 100644 --- a/codegraph/utils.py +++ b/codegraph/utils.py @@ -1,18 +1,12 @@ import glob from pathlib import Path -from typing import List, Union +from typing import Iterable, List, Union -def get_python_paths_list(paths: Union[str, List]) -> List[str]: - """ - return list of paths to python files, that found in provided path - :param paths: paths to folder or python file that need to tests - :return: - """ +def get_paths_list(paths: Union[str, List], extensions: Iterable[str]) -> List[str]: if isinstance(paths, str): paths = [paths] - if len(paths) == 1 and paths[0].endswith(".py"): - # mean provided path to one python module + if len(paths) == 1 and any(paths[0].endswith(ext) for ext in extensions): path = Path(paths[0]).absolute() if not path.exists(): raise ValueError(f"Path {path.as_posix()} does not exists") @@ -23,8 +17,18 @@ def get_python_paths_list(paths: Union[str, List]) -> List[str]: path = Path(path).absolute() if not path.exists(): raise ValueError(f"Path {path.as_posix()} does not exist") - paths_list += [ - Path(p).as_posix() - for p in glob.glob(str(path / "**" / "*.py"), recursive=True) - ] + for ext in extensions: + paths_list += [ + Path(p).as_posix() + for p in glob.glob(str(path / "**" / f"*{ext}"), recursive=True) + ] return paths_list + + +def get_python_paths_list(paths: Union[str, List]) -> List[str]: + """ + return list of paths to python files, that found in provided path + :param paths: paths to folder or python file that need to tests + :return: + """ + return get_paths_list(paths, [".py"]) diff --git a/pyproject.toml b/pyproject.toml index e6bf148..de710c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "codegraph" -version = "1.2.0" +version = "1.3.0" license = "MIT" readme = "docs/README.rst" homepage = "https://github.com/xnuinside/codegraph" @@ -22,6 +22,7 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.9" click = ">=8.0" +typed-ast = ">=1.5" [tool.poetry.extras] matplotlib = ["matplotlib", "networkx"] diff --git a/tests/test_graph_generation.py b/tests/test_graph_generation.py index 7c917c5..311731a 100644 --- a/tests/test_graph_generation.py +++ b/tests/test_graph_generation.py @@ -313,8 +313,8 @@ def test_main_core_connection(self): main_deps = usage_graph[main_path]["main"] assert any("core" in str(d) for d in main_deps) - def test_core_utils_connection(self): - """Test that core.py -> utils.py connection is detected.""" + def test_core_parsers_connection(self): + """Test that core.py -> parsers connection is detected.""" codegraph_path = pathlib.Path(__file__).parents[1] / "codegraph" args = Namespace(paths=[codegraph_path.as_posix()]) usage_graph = CodeGraph(args).usage_graph() @@ -329,9 +329,9 @@ def test_core_utils_connection(self): assert core_path is not None assert "CodeGraph" in usage_graph[core_path] - # CodeGraph class should use utils.get_python_paths_list + # CodeGraph class should use parsers.get_parser codegraph_deps = usage_graph[core_path]["CodeGraph"] - assert any("utils" in str(d) for d in codegraph_deps) + assert any("parsers" in str(d) for d in codegraph_deps) class TestCSVExport: diff --git a/tests/test_known_limitations.py b/tests/test_known_limitations.py new file mode 100644 index 0000000..3a8d5f2 --- /dev/null +++ b/tests/test_known_limitations.py @@ -0,0 +1,74 @@ +from argparse import Namespace + +from codegraph.core import CodeGraph +from codegraph.parser import Import, create_objects_array + + +def test_import_comma_separated_statement(): + source = """import os, sys + +def func(): + os.path.join() +""" + result = create_objects_array("test.py", source) + imports = result[-1] + assert isinstance(imports, Import) + assert "os" in imports.modules + assert "sys" in imports.modules + + +def test_usage_in_string_literal_is_not_dependency(tmp_path): + module_path = tmp_path / "module_a.py" + module_path.write_text( + """def foo(): + return 1 + +def bar(): + print("foo() should not count") + return "foo()" +""", + encoding="utf-8", + ) + + args = Namespace(paths=[module_path.as_posix()]) + usage_graph = CodeGraph(args).usage_graph() + + assert "foo" not in usage_graph[module_path.as_posix()]["bar"] + + +def test_alias_leak_between_modules(tmp_path): + module_b = tmp_path / "module_b.py" + module_b.write_text( + """def foo(): + return 1 +""", + encoding="utf-8", + ) + + module_a = tmp_path / "module_a.py" + module_a.write_text( + """import module_b as mb + +def use_alias(): + return mb.foo() +""", + encoding="utf-8", + ) + + module_c = tmp_path / "module_c.py" + module_c.write_text( + """import module_b + +def bar(): + mb = object() + return mb.foo() +""", + encoding="utf-8", + ) + + args = Namespace(paths=[tmp_path.as_posix()]) + usage_graph = CodeGraph(args).usage_graph() + + module_c_path = module_c.as_posix() + deps = usage_graph[module_c_path]["bar"] + assert all("module_b" not in dep for dep in deps) diff --git a/tests/test_legacy_parser_objects.py b/tests/test_legacy_parser_objects.py new file mode 100644 index 0000000..fb82267 --- /dev/null +++ b/tests/test_legacy_parser_objects.py @@ -0,0 +1,32 @@ +from codegraph import parser as legacy + + +def test_object_children_and_repr(): + parent = legacy._Object("parent", "file.py", 1, None) + child = legacy.Function("child", "file.py", 2) + parent._addchild("child", child) + + assert parent.children["child"] is child + assert child.main is parent + assert "parent" in repr(parent) + assert "parent" in str(parent) + + +def test_class_methods_and_nesting(): + cls = legacy.Class("MyClass", [], "file.py", 1) + method = legacy._nest_function(cls, "method", 2) + async_method = legacy._nest_function(cls, "amethod", 3, async_f=True) + nested_class = legacy._nest_class(cls, "Nested", 4) + + assert "method" in cls.methods + assert "amethod" in cls.async_methods + assert cls.children["Nested"] is nested_class + assert method in cls.methods.values() + assert async_method in cls.async_methods.values() + + +def test_import_add(): + imp = legacy.Import(["os"]) + imp.add("sys") + assert "os" in imp.modules + assert "sys" in imp.modules diff --git a/tests/test_parsers_and_utils.py b/tests/test_parsers_and_utils.py new file mode 100644 index 0000000..8bc44c6 --- /dev/null +++ b/tests/test_parsers_and_utils.py @@ -0,0 +1,59 @@ +from argparse import Namespace + +import pytest + +from codegraph.parsers import available_languages, get_parser +from codegraph.parsers.base import BaseParser +from codegraph.utils import get_paths_list + + +class DummyParser(BaseParser): + language = "dummy" + + def get_source_files(self, paths): + return [] + + def parse_files(self, paths_list): + return {} + + def usage_graph(self, modules_data): + return {} + + def get_entity_metadata(self, modules_data): + return {} + + +def test_available_languages_contains_python_and_rust(): + langs = available_languages() + assert "python" in langs + assert "rust" in langs + + +def test_get_parser_returns_python_parser(): + parser = get_parser("python", args=Namespace()) + assert parser.language == "python" + + +def test_get_parser_unsupported_language(): + with pytest.raises(ValueError): + get_parser("nope") + + +def test_base_parser_get_dependencies(): + parser = DummyParser() + usage_graph = { + "a.py": {"func": ["b.func", "c.other"], "_": []}, + "b.py": {"func": []}, + } + + deps = parser.get_dependencies(usage_graph, "a.py", 1) + assert deps[1] == {"b.py", "c.py"} + + +def test_get_paths_list_multi_extension(tmp_path): + (tmp_path / "a.py").write_text("", encoding="utf-8") + (tmp_path / "b.txt").write_text("", encoding="utf-8") + + paths = get_paths_list(tmp_path.as_posix(), [".py", ".txt"]) + names = {p.split("/")[-1] for p in paths} + assert names == {"a.py", "b.txt"} diff --git a/tests/test_python_ast_parser_versions.py b/tests/test_python_ast_parser_versions.py new file mode 100644 index 0000000..d483ea3 --- /dev/null +++ b/tests/test_python_ast_parser_versions.py @@ -0,0 +1,69 @@ +import sys +from argparse import Namespace + +import pytest + +from codegraph.core import CodeGraph + +try: + import typed_ast.ast27 as ast27 +except ImportError: # pragma: no cover - optional dependency + ast27 = None + + +@pytest.mark.skipif(ast27 is None, reason="typed_ast is required for Python 2 parsing") +def test_python2_6_parse_print_statement(tmp_path): + module_path = tmp_path / "module_py2_6.py" + module_path.write_text( + """def foo():\n print 'hi'\n""", + encoding="utf-8", + ) + args = Namespace(paths=[module_path.as_posix()], language="python", python_version="2.6") + usage_graph = CodeGraph(args).usage_graph() + + assert module_path.as_posix() in usage_graph + assert "foo" in usage_graph[module_path.as_posix()] + + +@pytest.mark.skipif(ast27 is None, reason="typed_ast is required for Python 2 parsing") +def test_python2_7_parse_exception_syntax(tmp_path): + module_path = tmp_path / "module_py2_7.py" + module_path.write_text( + """def foo():\n try:\n raise Exception('x')\n except Exception, e:\n return str(e)\n""", + encoding="utf-8", + ) + args = Namespace(paths=[module_path.as_posix()], language="python", python_version="2.7") + usage_graph = CodeGraph(args).usage_graph() + + assert module_path.as_posix() in usage_graph + assert "foo" in usage_graph[module_path.as_posix()] + + +def test_python3_8_parse_walrus(tmp_path): + module_path = tmp_path / "module_py3_8.py" + module_path.write_text( + """def foo(value):\n if (n := value):\n return n\n""", + encoding="utf-8", + ) + args = Namespace(paths=[module_path.as_posix()], language="python", python_version="3.8") + usage_graph = CodeGraph(args).usage_graph() + + assert module_path.as_posix() in usage_graph + assert "foo" in usage_graph[module_path.as_posix()] + + +@pytest.mark.skipif( + tuple(sys.version_info[:2]) < (3, 10), + reason="match/case parsing requires Python 3.10+ runtime", +) +def test_python3_10_parse_match_case(tmp_path): + module_path = tmp_path / "module_py3_10.py" + module_path.write_text( + """def foo(value):\n match value:\n case 1:\n return 1\n case _:\n return 0\n""", + encoding="utf-8", + ) + args = Namespace(paths=[module_path.as_posix()], language="python", python_version="3.10") + usage_graph = CodeGraph(args).usage_graph() + + assert module_path.as_posix() in usage_graph + assert "foo" in usage_graph[module_path.as_posix()] diff --git a/tests/test_python_parser_ast.py b/tests/test_python_parser_ast.py new file mode 100644 index 0000000..fd01290 --- /dev/null +++ b/tests/test_python_parser_ast.py @@ -0,0 +1,113 @@ +from argparse import Namespace + +import pytest + +from codegraph.core import CodeGraph +from codegraph.parsers.python_parser import PythonParser + + +def _write(tmp_path, name, content): + path = tmp_path / name + path.write_text(content, encoding="utf-8") + return path + + +def test_parse_python_version(): + parser = PythonParser() + assert parser._parse_python_version(None) == (3, None) + assert parser._parse_python_version("2.7") == (2, 7) + assert parser._parse_python_version("3.10") == (3, 10) + assert parser._parse_python_version("nope") == (3, None) + + +def test_collect_imports_module_and_entity_aliases(): + parser = PythonParser() + parser._module_names_set = {"module_b", "module_c"} + source = """import module_b as mb +from module_b import foo as bar +from module_c import sub as sub_alias +""" + tree = parser._parse_source(source, "test.py") + imports = parser._collect_imports(tree) + + assert imports.module_aliases["mb"] == "module_b" + assert imports.entity_aliases["bar"] == "module_b.foo" + assert imports.entity_aliases["sub_alias"] == "module_c.sub" + assert "module_b" in imports.module_imports + assert "module_c" in imports.module_imports + + +def test_usage_graph_resolves_alias_calls(tmp_path): + _write( + tmp_path, + "module_b.py", + """def foo():\n return 1\n""", + ) + _write( + tmp_path, + "module_a.py", + """import module_b as mb\nfrom module_b import foo as bar\n\ndef call_all():\n mb.foo()\n bar()\n""", + ) + + args = Namespace(paths=[tmp_path.as_posix()], language="python") + usage_graph = CodeGraph(args).usage_graph() + + module_a_path = (tmp_path / "module_a.py").as_posix() + deps = set(usage_graph[module_a_path]["call_all"]) + assert "module_b.foo" in deps + assert f"module_b._" in usage_graph[module_a_path]["_"] + + +def test_class_inheritance_dependency(tmp_path): + _write( + tmp_path, + "module_b.py", + """class Base:\n pass\n""", + ) + _write( + tmp_path, + "module_a.py", + """import module_b\n\nclass Child(module_b.Base):\n pass\n""", + ) + + args = Namespace(paths=[tmp_path.as_posix()], language="python") + usage_graph = CodeGraph(args).usage_graph() + + module_a_path = (tmp_path / "module_a.py").as_posix() + deps = set(usage_graph[module_a_path]["Child"]) + assert "module_b.Base" in deps + + +def test_entity_metadata_line_counts(tmp_path): + _write( + tmp_path, + "module_a.py", + """def foo():\n x = 1\n return x\n""", + ) + args = Namespace(paths=[tmp_path.as_posix()], language="python") + code_graph = CodeGraph(args) + metadata = code_graph.get_entity_metadata() + + module_a_path = (tmp_path / "module_a.py").as_posix() + assert metadata[module_a_path]["foo"]["lines"] == 3 + + +def test_get_lines_numbers(tmp_path): + _write( + tmp_path, + "module_a.py", + "\"\"\"module\"\"\"\n\ndef foo():\n return 1\n\n\ndef bar():\n return 2\n", + ) + args = Namespace(paths=[tmp_path.as_posix()], language="python") + code_graph = CodeGraph(args) + lines = code_graph.get_lines_numbers() + + module_a_path = (tmp_path / "module_a.py").as_posix() + assert lines[module_a_path]["foo"] == (3, 4) + assert lines[module_a_path]["bar"] == (7, 8) + + +def test_deduplicate_preserves_order(): + parser = PythonParser() + items = ["a", "b", "a", "c", "b"] + assert parser._deduplicate(items) == ["a", "b", "c"] diff --git a/tests/test_utils.py b/tests/test_utils.py index e9f8664..25c7c40 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -15,7 +15,19 @@ def test_get_python_paths_list(): base_path = pathlib.Path(__file__).parents[1] / "codegraph" expected = [ (base_path / x).as_posix() - for x in ["__init__.py", "core.py", "parser.py", "utils.py", "vizualyzer.py", "main.py"] + for x in [ + "__init__.py", + "core.py", + "parser.py", + "utils.py", + "vizualyzer.py", + "main.py", + "parsers/__init__.py", + "parsers/base.py", + "parsers/python_parser.py", + "parsers/registry.py", + "parsers/rust_parser.py", + ] ] result = get_python_paths_list(base_path.as_posix()) assert sorted(result) == sorted(expected) diff --git a/tests/test_visualizer_helpers.py b/tests/test_visualizer_helpers.py new file mode 100644 index 0000000..f6d40fa --- /dev/null +++ b/tests/test_visualizer_helpers.py @@ -0,0 +1,75 @@ +import builtins +import os + +import pytest + +from codegraph import vizualyzer + + +class DummyGraph: + def __init__(self): + self.edges = [] + self.nodes = [] + + def add_edges_from(self, edges): + self.edges.extend(edges) + + def add_node(self, node): + self.nodes.append(node) + + +def test_process_module_in_graph_edges(): + graph = DummyGraph() + module = "/path/to/mod.py" + module_links = {"func": ["other.dep", "local"]} + + module_edges, sub_edges = vizualyzer.process_module_in_graph(module, module_links, graph) + + assert ("mod.py", "func") in module_edges + assert ("func", "dep") in sub_edges + assert ("func", "local") in sub_edges + + +def test_get_template_dir_and_read_file(): + template_dir = vizualyzer._get_template_dir() + assert os.path.isdir(template_dir) + + content = vizualyzer._read_template_file("index.html") + assert "STYLES_PLACEHOLDER" in content + + +def test_get_d3_html_template_replaces_placeholders(): + html = vizualyzer.get_d3_html_template({"nodes": [], "links": [], "unlinkedModules": []}) + assert "STYLES_PLACEHOLDER" not in html + assert "GRAPH_DATA_PLACEHOLDER" not in html + assert "\"nodes\": []" in html + + +def test_draw_graph_writes_file_and_opens_browser(tmp_path, monkeypatch): + opened = {} + + def fake_open(url): + opened["url"] = url + return True + + monkeypatch.setattr(vizualyzer.webbrowser, "open", fake_open) + + output_path = tmp_path / "graph.html" + vizualyzer.draw_graph({"/tmp/a.py": {"func": []}}, output_path=output_path.as_posix()) + + assert output_path.exists() + assert opened["url"].startswith("file://") + + +def test_draw_graph_matplotlib_missing_deps(monkeypatch): + original_import = builtins.__import__ + + def fake_import(name, *args, **kwargs): + if name.startswith("matplotlib") or name.startswith("networkx"): + raise ImportError("blocked") + return original_import(name, *args, **kwargs) + + monkeypatch.setattr(builtins, "__import__", fake_import) + + with pytest.raises(ImportError): + vizualyzer.draw_graph_matplotlib({"/tmp/a.py": {"func": []}}) From 67bbb1ad82caedfcf8fa7f780d72f2b686a892a4 Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sat, 24 Jan 2026 18:44:09 +0300 Subject: [PATCH 2/2] Switch linting to black and ruff --- .flake8 | 8 - .github/workflows/main.yml | 11 +- .isort.cfg | 2 - .pre-commit-config.yaml | 16 +- CONTRIBUTING.md | 11 +- codegraph/main.py | 2 +- codegraph/parsers/base.py | 4 +- codegraph/parsers/python_parser.py | 36 +--- codegraph/parsers/registry.py | 1 - codegraph/utils.py | 5 +- codegraph/vizualyzer.py | 123 ++++++------- poetry.lock | 274 +++++++++++++++++++++++------ pyproject.toml | 15 +- tests/test_codegraph.py | 4 +- tests/test_data/alias_imports.py | 1 + tests/test_data/comma_imports.py | 1 + tests/test_data/module_a.py | 1 + tests/test_data/module_b.py | 1 + tests/test_data/vizualyzer.py | 8 +- tests/test_graph_generation.py | 92 +++++----- tests/test_package_install.py | 40 ++--- tests/test_python_parser_ast.py | 6 +- tests/test_visualizer_helpers.py | 2 +- tox.ini | 15 +- 24 files changed, 392 insertions(+), 287 deletions(-) delete mode 100644 .flake8 delete mode 100644 .isort.cfg diff --git a/.flake8 b/.flake8 deleted file mode 100644 index eff363c..0000000 --- a/.flake8 +++ /dev/null @@ -1,8 +0,0 @@ -[flake8] -exclude = .github,.git,__pycache__,docs/source/conf.py,old,build,dist,.tox,*.egg-info -ignore = D100, D103, D101, D102, D104, D107, D403, D210, D400, D401, W503, W293, D205 -max-complexity = 10 -max-line-length = 120 -per-file-ignores = - __init__.py:F401 - codegraph/vizualyzer.py:E501 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 615b52a..30a183e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -20,13 +20,14 @@ jobs: uses: actions/setup-python@v5 with: python-version: "3.12" - - name: Install flake8 + - name: Install black and ruff run: | python -m pip install --upgrade pip - pip install flake8 - - name: Run flake8 + pip install black ruff + - name: Run black and ruff run: | - flake8 codegraph/ tests/ + black . + ruff check . tests: runs-on: ubuntu-latest @@ -115,4 +116,4 @@ jobs: path: 'docs' - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4 \ No newline at end of file + uses: actions/deploy-pages@v4 diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index 3308f9b..0000000 --- a/.isort.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[settings] -known_third_party = click,matplotlib,networkx,pytest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 18133b0..3c526fb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,18 +1,10 @@ repos: -- repo: https://github.com/asottile/seed-isort-config - rev: v2.2.0 - hooks: - - id: seed-isort-config -- repo: https://github.com/pycqa/isort - rev: 5.13.2 - hooks: - - id: isort -- repo: https://github.com/ambv/black +- repo: https://github.com/psf/black rev: 24.10.0 hooks: - id: black language_version: python3.12 -- repo: https://github.com/PyCQA/flake8 - rev: 7.1.1 +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.6.9 hooks: - - id: flake8 + - id: ruff diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a217997..e6d983a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -21,4 +21,13 @@ To follow code styles and successfully pass github pipelines install pre-commit pre-commit install -``` \ No newline at end of file +``` + +### Formatting and linting + +Before committing changes, run the formatter and linter: + +``` + black . + ruff check . +``` diff --git a/codegraph/main.py b/codegraph/main.py index aef9c2b..789ac3f 100644 --- a/codegraph/main.py +++ b/codegraph/main.py @@ -1,4 +1,4 @@ -""" main module of testsdiffer for console (cli) usage""" +"""main module of testsdiffer for console (cli) usage""" import logging import pprint diff --git a/codegraph/parsers/base.py b/codegraph/parsers/base.py index e8ea6e8..fdec439 100644 --- a/codegraph/parsers/base.py +++ b/codegraph/parsers/base.py @@ -22,9 +22,7 @@ def usage_graph(self, modules_data: Dict) -> Dict: def get_entity_metadata(self, modules_data: Dict) -> Dict: raise NotImplementedError - def get_dependencies( - self, usage_graph: Dict, file_path: str, distance: int - ) -> Dict[int, Set[str]]: + def get_dependencies(self, usage_graph: Dict, file_path: str, distance: int) -> Dict[int, Set[str]]: """ Default implementation that expects dependencies as "module.entity". Parsers can override this to handle language-specific dependency formats. diff --git a/codegraph/parsers/python_parser.py b/codegraph/parsers/python_parser.py index d9a57e7..6dbfe22 100644 --- a/codegraph/parsers/python_parser.py +++ b/codegraph/parsers/python_parser.py @@ -44,9 +44,7 @@ def get_source_files(self, paths) -> List[str]: return get_python_paths_list(paths) def parse_files(self, paths_list: List[str]) -> Dict: - self._module_names_set = { - os.path.basename(path).replace(".py", "") for path in paths_list - } + self._module_names_set = {os.path.basename(path).replace(".py", "") for path in paths_list} all_data = {} for path in paths_list: source = self._read_file_content(path) @@ -121,9 +119,7 @@ def _read_file_content(self, path: Text) -> Text: def _parse_source(self, source: Text, filename: Text): if self._major == 2: if ast27 is None: - raise ImportError( - "typed_ast is required to parse Python 2 source code." - ) + raise ImportError("typed_ast is required to parse Python 2 source code.") return ast27.parse(source, filename=filename, mode="exec") return self._parse_with_feature_version(source, filename, self._feature_version) @@ -233,9 +229,7 @@ def _collect_dependencies_in_module( ) -> List[str]: deps: List[str] = [] ast_mod = self._ast_mod - collector = self._make_dependency_collector( - local_entities, module_aliases, entity_aliases, ast_mod - ) + collector = self._make_dependency_collector(local_entities, module_aliases, entity_aliases, ast_mod) for node in getattr(ast_tree, "body", []): if isinstance(node, (ast_mod.FunctionDef, ast_mod.ClassDef)): continue @@ -259,28 +253,20 @@ def _collect_dependencies_in_entity( if isinstance(node, ast_mod.ClassDef): for base in node.bases: if isinstance(base, ast_mod.Attribute): - dep = self._resolve_attribute( - base, local_entities, module_aliases, entity_aliases, ast_mod - ) + dep = self._resolve_attribute(base, local_entities, module_aliases, entity_aliases, ast_mod) elif isinstance(base, ast_mod.Name): - dep = self._resolve_name( - base.id, local_entities, module_aliases, entity_aliases - ) + dep = self._resolve_name(base.id, local_entities, module_aliases, entity_aliases) else: dep = None if dep: deps.append(dep) - collector = self._make_dependency_collector( - local_entities, module_aliases, entity_aliases, ast_mod - ) + collector = self._make_dependency_collector(local_entities, module_aliases, entity_aliases, ast_mod) for child in node.body: collector.visit(child) deps.extend(collector.dependencies) return deps - collector = self._make_dependency_collector( - local_entities, module_aliases, entity_aliases, ast_mod - ) + collector = self._make_dependency_collector(local_entities, module_aliases, entity_aliases, ast_mod) collector.visit(node) deps.extend(collector.dependencies) return deps @@ -311,13 +297,9 @@ def _resolve_call_target( ast_mod, ) -> Optional[str]: if isinstance(func_node, ast_mod.Name): - return self._resolve_name( - func_node.id, local_entities, module_aliases, entity_aliases - ) + return self._resolve_name(func_node.id, local_entities, module_aliases, entity_aliases) if isinstance(func_node, ast_mod.Attribute): - return self._resolve_attribute( - func_node, local_entities, module_aliases, entity_aliases, ast_mod - ) + return self._resolve_attribute(func_node, local_entities, module_aliases, entity_aliases, ast_mod) return None def _resolve_attribute( diff --git a/codegraph/parsers/registry.py b/codegraph/parsers/registry.py index 9b31b06..9ddc2a5 100644 --- a/codegraph/parsers/registry.py +++ b/codegraph/parsers/registry.py @@ -4,7 +4,6 @@ from codegraph.parsers.python_parser import PythonParser from codegraph.parsers.rust_parser import RustParser - _PARSERS: Dict[str, Type[BaseParser]] = { "python": PythonParser, "rust": RustParser, diff --git a/codegraph/utils.py b/codegraph/utils.py index 3b20538..a8041c3 100644 --- a/codegraph/utils.py +++ b/codegraph/utils.py @@ -18,10 +18,7 @@ def get_paths_list(paths: Union[str, List], extensions: Iterable[str]) -> List[s if not path.exists(): raise ValueError(f"Path {path.as_posix()} does not exist") for ext in extensions: - paths_list += [ - Path(p).as_posix() - for p in glob.glob(str(path / "**" / f"*{ext}"), recursive=True) - ] + paths_list += [Path(p).as_posix() for p in glob.glob(str(path / "**" / f"*{ext}"), recursive=True)] return paths_list diff --git a/codegraph/vizualyzer.py b/codegraph/vizualyzer.py index 6bf08b7..3a5b310 100644 --- a/codegraph/vizualyzer.py +++ b/codegraph/vizualyzer.py @@ -33,8 +33,7 @@ def draw_graph_matplotlib(modules_entities: Dict) -> None: import networkx as nx except ImportError: raise ImportError( - "matplotlib is required for matplotlib visualization. " - "Install it with: pip install codegraph[matplotlib]" + "matplotlib is required for matplotlib visualization. " "Install it with: pip install codegraph[matplotlib]" ) G = nx.DiGraph() @@ -44,9 +43,7 @@ def draw_graph_matplotlib(modules_entities: Dict) -> None: sub_edges_all = [] for module in modules_entities: - new_module_edges_all, new_edges_all = process_module_in_graph( - module, modules_entities[module], G - ) + new_module_edges_all, new_edges_all = process_module_in_graph(module, modules_entities[module], G) module_edges_all += new_module_edges_all sub_edges_all += new_edges_all @@ -75,9 +72,7 @@ def draw_graph_matplotlib(modules_entities: Dict) -> None: alpha=0.8, ) - nx.draw_networkx_labels( - G, pos, labels=module_list_labels, font_weight="bold", font_size=11 - ) + nx.draw_networkx_labels(G, pos, labels=module_list_labels, font_weight="bold", font_size=11) nx.draw_networkx_labels( G, pos, @@ -154,13 +149,15 @@ def convert_to_d3_format(modules_entities: Dict, entity_metadata: Dict = None) - # Add module node if module_name not in node_ids: - nodes.append({ - "id": module_name, - "type": "module", - "collapsed": False, - "fullPath": relative_path, - "lines": total_lines - }) + nodes.append( + { + "id": module_name, + "type": "module", + "collapsed": False, + "fullPath": relative_path, + "lines": total_lines, + } + ) node_ids.add(module_name) # Add entity nodes and build mapping @@ -175,14 +172,16 @@ def convert_to_d3_format(modules_entities: Dict, entity_metadata: Dict = None) - entity_type = ent_meta.get("entity_type", "function") if entity_id not in node_ids: - nodes.append({ - "id": entity_id, - "label": entity_name, - "type": "entity", - "parent": module_name, - "lines": lines, - "entityType": entity_type - }) + nodes.append( + { + "id": entity_id, + "label": entity_name, + "type": "entity", + "parent": module_name, + "lines": lines, + "entityType": entity_type, + } + ) node_ids.add(entity_id) # Second pass: create all links @@ -193,11 +192,7 @@ def convert_to_d3_format(modules_entities: Dict, entity_metadata: Dict = None) - entity_id = f"{module_name}:{entity_name}" # Link from module to entity - links.append({ - "source": module_name, - "target": entity_id, - "type": "module-entity" - }) + links.append({"source": module_name, "target": entity_id, "type": "module-entity"}) # Links from entity to dependencies for dep in dependencies: @@ -234,11 +229,7 @@ def convert_to_d3_format(modules_entities: Dict, entity_metadata: Dict = None) - if dep_target and dep_target in node_ids: # Link to existing entity - links.append({ - "source": entity_id, - "target": dep_target, - "type": "dependency" - }) + links.append({"source": entity_id, "target": dep_target, "type": "dependency"}) # Add module-to-module link if different modules if dep_module and dep_module != module_name: link_key = (module_name, dep_module) @@ -247,26 +238,14 @@ def convert_to_d3_format(modules_entities: Dict, entity_metadata: Dict = None) - else: # Add as external dependency node if dep_entity not in node_ids: - nodes.append({ - "id": dep_entity, - "type": "external", - "label": dep_entity - }) + nodes.append({"id": dep_entity, "type": "external", "label": dep_entity}) node_ids.add(dep_entity) - links.append({ - "source": entity_id, - "target": dep_entity, - "type": "dependency" - }) + links.append({"source": entity_id, "target": dep_entity, "type": "dependency"}) # Add module-to-module links for source_module, target_module in module_links: - links.append({ - "source": source_module, - "target": target_module, - "type": "module-module" - }) + links.append({"source": source_module, "target": target_module, "type": "module-module"}) # Find unlinked modules (no connections at all - neither incoming nor outgoing) all_modules = {n["id"] for n in nodes if n["type"] == "module"} @@ -277,8 +256,7 @@ def convert_to_d3_format(modules_entities: Dict, entity_metadata: Dict = None) - # Modules with any connection linked_modules = modules_with_outgoing | modules_with_incoming unlinked_modules = [ - {"id": m, "fullPath": module_full_paths.get(m, m)} - for m in sorted(all_modules - linked_modules) + {"id": m, "fullPath": module_full_paths.get(m, m)} for m in sorted(all_modules - linked_modules) ] return {"nodes": nodes, "links": links, "unlinkedModules": unlinked_modules} @@ -286,13 +264,13 @@ def convert_to_d3_format(modules_entities: Dict, entity_metadata: Dict = None) - def _get_template_dir() -> str: """Get the path to the templates directory.""" - return os.path.join(os.path.dirname(__file__), 'templates') + return os.path.join(os.path.dirname(__file__), "templates") def _read_template_file(filename: str) -> str: """Read a template file from the templates directory.""" template_path = os.path.join(_get_template_dir(), filename) - with open(template_path, 'r', encoding='utf-8') as f: + with open(template_path, "r", encoding="utf-8") as f: return f.read() @@ -301,14 +279,14 @@ def get_d3_html_template(graph_data: Dict) -> str: graph_json = json.dumps(graph_data, indent=2) # Read template files - html_template = _read_template_file('index.html') - css_content = _read_template_file('styles.css') - js_content = _read_template_file('main.js') + html_template = _read_template_file("index.html") + css_content = _read_template_file("styles.css") + js_content = _read_template_file("main.js") # Replace placeholders - html_content = html_template.replace('/* STYLES_PLACEHOLDER */', css_content) - html_content = html_content.replace('/* GRAPH_DATA_PLACEHOLDER */', graph_json) - html_content = html_content.replace('/* SCRIPT_PLACEHOLDER */', js_content) + html_content = html_template.replace("/* STYLES_PLACEHOLDER */", css_content) + html_content = html_content.replace("/* GRAPH_DATA_PLACEHOLDER */", graph_json) + html_content = html_content.replace("/* SCRIPT_PLACEHOLDER */", js_content) return html_content @@ -332,14 +310,15 @@ def draw_graph(modules_entities: Dict, entity_metadata: Dict = None, output_path output_path = os.path.abspath(output_path) # Save to file - with open(output_path, 'w', encoding='utf-8') as f: + with open(output_path, "w", encoding="utf-8") as f: f.write(html_content) # Open in default browser - webbrowser.open(f'file://{output_path}') + webbrowser.open(f"file://{output_path}") # Import click here to avoid circular imports and only when needed import click + click.echo(f"Interactive graph saved and opened in browser: {output_path}") @@ -381,8 +360,8 @@ def export_to_csv(modules_entities: Dict, entity_metadata: Dict = None, output_p output_path = os.path.abspath(output_path) # Write CSV - with open(output_path, 'w', newline='', encoding='utf-8') as csvfile: - fieldnames = ['name', 'type', 'parent_module', 'full_path', 'links_out', 'links_in', 'lines'] + with open(output_path, "w", newline="", encoding="utf-8") as csvfile: + fieldnames = ["name", "type", "parent_module", "full_path", "links_out", "links_in", "lines"] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() @@ -415,14 +394,16 @@ def export_to_csv(modules_entities: Dict, entity_metadata: Dict = None, output_p lines = 0 name = node.get("label", node_id) - writer.writerow({ - 'name': name, - 'type': display_type, - 'parent_module': parent_module, - 'full_path': full_path, - 'links_out': links_out.get(node_id, 0), - 'links_in': links_in.get(node_id, 0), - 'lines': lines - }) + writer.writerow( + { + "name": name, + "type": display_type, + "parent_module": parent_module, + "full_path": full_path, + "links_out": links_out.get(node_id, 0), + "links_in": links_in.get(node_id, 0), + "lines": lines, + } + ) click.echo(f"Graph data exported to CSV: {output_path}") diff --git a/poetry.lock b/poetry.lock index 686e4da..75eb1c7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -17,6 +17,58 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] +[[package]] +name = "black" +version = "25.11.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "black-25.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ec311e22458eec32a807f029b2646f661e6859c3f61bc6d9ffb67958779f392e"}, + {file = "black-25.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1032639c90208c15711334d681de2e24821af0575573db2810b0763bcd62e0f0"}, + {file = "black-25.11.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0c0f7c461df55cf32929b002335883946a4893d759f2df343389c4396f3b6b37"}, + {file = "black-25.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:f9786c24d8e9bd5f20dc7a7f0cdd742644656987f6ea6947629306f937726c03"}, + {file = "black-25.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:895571922a35434a9d8ca67ef926da6bc9ad464522a5fe0db99b394ef1c0675a"}, + {file = "black-25.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb4f4b65d717062191bdec8e4a442539a8ea065e6af1c4f4d36f0cdb5f71e170"}, + {file = "black-25.11.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d81a44cbc7e4f73a9d6ae449ec2317ad81512d1e7dce7d57f6333fd6259737bc"}, + {file = "black-25.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:7eebd4744dfe92ef1ee349dc532defbf012a88b087bb7ddd688ff59a447b080e"}, + {file = "black-25.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:80e7486ad3535636657aa180ad32a7d67d7c273a80e12f1b4bfa0823d54e8fac"}, + {file = "black-25.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cced12b747c4c76bc09b4db057c319d8545307266f41aaee665540bc0e04e96"}, + {file = "black-25.11.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb2d54a39e0ef021d6c5eef442e10fd71fcb491be6413d083a320ee768329dd"}, + {file = "black-25.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae263af2f496940438e5be1a0c1020e13b09154f3af4df0835ea7f9fe7bfa409"}, + {file = "black-25.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0a1d40348b6621cc20d3d7530a5b8d67e9714906dfd7346338249ad9c6cedf2b"}, + {file = "black-25.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:51c65d7d60bb25429ea2bf0731c32b2a2442eb4bd3b2afcb47830f0b13e58bfd"}, + {file = "black-25.11.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:936c4dd07669269f40b497440159a221ee435e3fddcf668e0c05244a9be71993"}, + {file = "black-25.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:f42c0ea7f59994490f4dccd64e6b2dd49ac57c7c84f38b8faab50f8759db245c"}, + {file = "black-25.11.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:35690a383f22dd3e468c85dc4b915217f87667ad9cce781d7b42678ce63c4170"}, + {file = "black-25.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:dae49ef7369c6caa1a1833fd5efb7c3024bb7e4499bf64833f65ad27791b1545"}, + {file = "black-25.11.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bd4a22a0b37401c8e492e994bce79e614f91b14d9ea911f44f36e262195fdda"}, + {file = "black-25.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:aa211411e94fdf86519996b7f5f05e71ba34835d8f0c0f03c00a26271da02664"}, + {file = "black-25.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3bb5ce32daa9ff0605d73b6f19da0b0e6c1f8f2d75594db539fdfed722f2b06"}, + {file = "black-25.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9815ccee1e55717fe9a4b924cae1646ef7f54e0f990da39a34fc7b264fcf80a2"}, + {file = "black-25.11.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92285c37b93a1698dcbc34581867b480f1ba3a7b92acf1fe0467b04d7a4da0dc"}, + {file = "black-25.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:43945853a31099c7c0ff8dface53b4de56c41294fa6783c0441a8b1d9bf668bc"}, + {file = "black-25.11.0-py3-none-any.whl", hash = "sha256:e3f562da087791e96cefcd9dda058380a442ab322a02e222add53736451f604b"}, + {file = "black-25.11.0.tar.gz", hash = "sha256:9a323ac32f5dc75ce7470501b887250be5005a01602e931a15e45593f70f6e08"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +pytokens = ">=0.3.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "cachetools" version = "6.2.4" @@ -292,7 +344,7 @@ version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, @@ -648,23 +700,6 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] -[[package]] -name = "flake8" -version = "7.1.1" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -groups = ["dev"] -files = [ - {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, - {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.12.0,<2.13.0" -pyflakes = ">=3.2.0,<3.3.0" - [[package]] name = "fonttools" version = "4.54.1" @@ -1181,18 +1216,6 @@ python-dateutil = ">=2.7" [package.extras] dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "mdurl" version = "0.1.2" @@ -1218,6 +1241,18 @@ files = [ {file = "more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd"}, ] +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + [[package]] name = "networkx" version = "3.2.1" @@ -1354,6 +1389,24 @@ files = [ {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] +[[package]] +name = "pathspec" +version = "1.0.3" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pathspec-1.0.3-py3-none-any.whl", hash = "sha256:e80767021c1cc524aa3fb14bedda9c34406591343cc42797b386ce7b9354fb6c"}, + {file = "pathspec-1.0.3.tar.gz", hash = "sha256:bac5cf97ae2c2876e2d25ebb15078eb04d76e4b98921ee31c6f85ade8b59444d"}, +] + +[package.extras] +hyperscan = ["hyperscan (>=0.7)"] +optional = ["typing-extensions (>=4)"] +re2 = ["google-re2 (>=1.1)"] +tests = ["pytest (>=9)", "typing-extensions (>=4.15)"] + [[package]] name = "pillow" version = "10.4.0" @@ -1505,18 +1558,6 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" -[[package]] -name = "pycodestyle" -version = "2.12.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, - {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, -] - [[package]] name = "pycparser" version = "2.23" @@ -1530,18 +1571,6 @@ files = [ {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] -[[package]] -name = "pyflakes" -version = "3.2.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, -] - [[package]] name = "pygments" version = "2.19.2" @@ -1652,6 +1681,61 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pytokens" +version = "0.4.0" +description = "A Fast, spec compliant Python 3.14+ tokenizer that runs on older Pythons." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytokens-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:af0c3166aea367a9e755a283171befb92dd3043858b94ae9b3b7efbe9def26a3"}, + {file = "pytokens-0.4.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:daae524ed14ca459932cbf51d74325bea643701ba8a8b0cc2d10f7cd4b3e2b63"}, + {file = "pytokens-0.4.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e95cb158c44d642ed62f555bf8136bbe780dbd64d2fb0b9169e11ffb944664c3"}, + {file = "pytokens-0.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:df58d44630eaf25f587540e94bdf1fc50b4e6d5f212c786de0fb024bfcb8753a"}, + {file = "pytokens-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55efcc36f9a2e0e930cfba0ce7f83445306b02f8326745585ed5551864eba73a"}, + {file = "pytokens-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:92eb3ef88f27c22dc9dbab966ace4d61f6826e02ba04dac8e2d65ea31df56c8e"}, + {file = "pytokens-0.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4b77858a680635ee9904306f54b0ee4781effb89e211ba0a773d76539537165"}, + {file = "pytokens-0.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25cacc20c2ad90acb56f3739d87905473c54ca1fa5967ffcd675463fe965865e"}, + {file = "pytokens-0.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:628fab535ebc9079e4db35cd63cb401901c7ce8720a9834f9ad44b9eb4e0f1d4"}, + {file = "pytokens-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:4d0f568d7e82b7e96be56d03b5081de40e43c904eb6492bf09aaca47cd55f35b"}, + {file = "pytokens-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd8da894e5a29ba6b6da8be06a4f7589d7220c099b5e363cb0643234b9b38c2a"}, + {file = "pytokens-0.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:237ba7cfb677dbd3b01b09860810aceb448871150566b93cd24501d5734a04b1"}, + {file = "pytokens-0.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01d1a61e36812e4e971cfe2c0e4c1f2d66d8311031dac8bf168af8a249fa04dd"}, + {file = "pytokens-0.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e47e2ef3ec6ee86909e520d79f965f9b23389fda47460303cf715d510a6fe544"}, + {file = "pytokens-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d36954aba4557fd5a418a03cf595ecbb1cdcce119f91a49b19ef09d691a22ae"}, + {file = "pytokens-0.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73eff3bdd8ad08da679867992782568db0529b887bed4c85694f84cdf35eafc6"}, + {file = "pytokens-0.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d97cc1f91b1a8e8ebccf31c367f28225699bea26592df27141deade771ed0afb"}, + {file = "pytokens-0.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a2c8952c537cb73a1a74369501a83b7f9d208c3cf92c41dd88a17814e68d48ce"}, + {file = "pytokens-0.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dbf56f3c748aed9310b310d5b8b14e2c96d3ad682ad5a943f381bdbbdddf753"}, + {file = "pytokens-0.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:e131804513597f2dff2b18f9911d9b6276e21ef3699abeffc1c087c65a3d975e"}, + {file = "pytokens-0.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0d7374c917197106d3c4761374718bc55ea2e9ac0fb94171588ef5840ee1f016"}, + {file = "pytokens-0.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cd3fa1caf9e47a72ee134a29ca6b5bea84712724bba165d6628baa190c6ea5b"}, + {file = "pytokens-0.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6986576b7b07fe9791854caa5347923005a80b079d45b63b0be70d50cce5f1"}, + {file = "pytokens-0.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9940f7c2e2f54fb1cb5fe17d0803c54da7a2bf62222704eb4217433664a186a7"}, + {file = "pytokens-0.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:54691cf8f299e7efabcc25adb4ce715d3cef1491e1c930eaf555182f898ef66a"}, + {file = "pytokens-0.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:94ff5db97a0d3cd7248a5b07ba2167bd3edc1db92f76c6db00137bbaf068ddf8"}, + {file = "pytokens-0.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0dd6261cd9cc95fae1227b1b6ebee023a5fd4a4b6330b071c73a516f5f59b63"}, + {file = "pytokens-0.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cdca8159df407dbd669145af4171a0d967006e0be25f3b520896bc7068f02c4"}, + {file = "pytokens-0.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4b5770abeb2a24347380a1164a558f0ebe06e98aedbd54c45f7929527a5fb26e"}, + {file = "pytokens-0.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:74500d72c561dad14c037a9e86a657afd63e277dd5a3bb7570932ab7a3b12551"}, + {file = "pytokens-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e368e0749e4e9d86a6e08763310dc92bc69ad73d9b6db5243b30174c71a8a534"}, + {file = "pytokens-0.4.0-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:865cc65c75c8f2e9e0d8330338f649b12bfd9442561900ebaf58c596a72107d2"}, + {file = "pytokens-0.4.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbb9338663b3538f31c4ca7afe4f38d9b9b3a16a8be18a273a5704a1bc7a2367"}, + {file = "pytokens-0.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:658f870523ac1a5f4733d7db61ce9af61a0c23b2aeea3d03d1800c93f760e15f"}, + {file = "pytokens-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d69a2491190a74e4b6f87f3b9dfce7a6873de3f3bf330d20083d374380becac0"}, + {file = "pytokens-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8cd795191c4127fcb3d7b76d84006a07748c390226f47657869235092eedbc05"}, + {file = "pytokens-0.4.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef2bcbddb73ac18599a86c8c549d5145130f2cd9d83dc2b5482fd8322b7806cd"}, + {file = "pytokens-0.4.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:06ac081c1187389762b58823d90d6339e6880ce0df912f71fb9022d81d7fd429"}, + {file = "pytokens-0.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:278129d54573efdc79e75c6082e73ebd19858e22a2e848359f93629323186ca6"}, + {file = "pytokens-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:9380fb6d96fa5ab83ed606ebad27b6171930cc14a8a8d215f6adb187ba428690"}, + {file = "pytokens-0.4.0-py3-none-any.whl", hash = "sha256:0508d11b4de157ee12063901603be87fb0253e8f4cb9305eb168b1202ab92068"}, + {file = "pytokens-0.4.0.tar.gz", hash = "sha256:6b0b03e6ea7c9f9d47c5c61164b69ad30f4f0d70a5d9fe7eac4d19f24f77af2d"}, +] + +[package.extras] +dev = ["black", "build", "mypy", "pytest", "pytest-cov", "setuptools", "tox", "twine", "wheel"] + [[package]] name = "pywin32-ctypes" version = "0.2.3" @@ -1819,6 +1903,35 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "ruff" +version = "0.14.14" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.14.14-py3-none-linux_armv6l.whl", hash = "sha256:7cfe36b56e8489dee8fbc777c61959f60ec0f1f11817e8f2415f429552846aed"}, + {file = "ruff-0.14.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6006a0082336e7920b9573ef8a7f52eec837add1265cc74e04ea8a4368cd704c"}, + {file = "ruff-0.14.14-py3-none-macosx_11_0_arm64.whl", hash = "sha256:026c1d25996818f0bf498636686199d9bd0d9d6341c9c2c3b62e2a0198b758de"}, + {file = "ruff-0.14.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f666445819d31210b71e0a6d1c01e24447a20b85458eea25a25fe8142210ae0e"}, + {file = "ruff-0.14.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c0f18b922c6d2ff9a5e6c3ee16259adc513ca775bcf82c67ebab7cbd9da5bc8"}, + {file = "ruff-0.14.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1629e67489c2dea43e8658c3dba659edbfd87361624b4040d1df04c9740ae906"}, + {file = "ruff-0.14.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:27493a2131ea0f899057d49d303e4292b2cae2bb57253c1ed1f256fbcd1da480"}, + {file = "ruff-0.14.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ff589aab3f5b539e35db38425da31a57521efd1e4ad1ae08fc34dbe30bd7df"}, + {file = "ruff-0.14.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc12d74eef0f29f51775f5b755913eb523546b88e2d733e1d701fe65144e89b"}, + {file = "ruff-0.14.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb8481604b7a9e75eff53772496201690ce2687067e038b3cc31aaf16aa0b974"}, + {file = "ruff-0.14.14-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:14649acb1cf7b5d2d283ebd2f58d56b75836ed8c6f329664fa91cdea19e76e66"}, + {file = "ruff-0.14.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8058d2145566510790eab4e2fad186002e288dec5e0d343a92fe7b0bc1b3e13"}, + {file = "ruff-0.14.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e651e977a79e4c758eb807f0481d673a67ffe53cfa92209781dfa3a996cf8412"}, + {file = "ruff-0.14.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cc8b22da8d9d6fdd844a68ae937e2a0adf9b16514e9a97cc60355e2d4b219fc3"}, + {file = "ruff-0.14.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:16bc890fb4cc9781bb05beb5ab4cd51be9e7cb376bf1dd3580512b24eb3fda2b"}, + {file = "ruff-0.14.14-py3-none-win32.whl", hash = "sha256:b530c191970b143375b6a68e6f743800b2b786bbcf03a7965b06c4bf04568167"}, + {file = "ruff-0.14.14-py3-none-win_amd64.whl", hash = "sha256:3dde1435e6b6fe5b66506c1dff67a421d0b7f6488d466f651c07f4cab3bf20fd"}, + {file = "ruff-0.14.14-py3-none-win_arm64.whl", hash = "sha256:56e6981a98b13a32236a72a8da421d7839221fa308b223b9283312312e5ac76c"}, + {file = "ruff-0.14.14.tar.gz", hash = "sha256:2d0f819c9a90205f3a867dbbd0be083bee9912e170fd7d9704cc8ae45824896b"}, +] + [[package]] name = "secretstorage" version = "3.3.3" @@ -1962,6 +2075,57 @@ urllib3 = ">=1.26.0" [package.extras] keyring = ["keyring (>=21.2.0)"] +[[package]] +name = "typed-ast" +version = "1.5.5" +description = "a fork of Python 2 and 3 ast modules with type comment support" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, + {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, + {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, + {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, + {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"}, + {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"}, + {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, + {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, + {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, + {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, + {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, +] + [[package]] name = "typing-extensions" version = "4.15.0" @@ -2041,4 +2205,4 @@ matplotlib = ["matplotlib", "networkx"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "749c3d007f4b49bc86ce4575fed42bd67b08e105777bb718cab6de0d77728ac0" +content-hash = "f2f454f130be2bf1b2802d5abd1cf0b1cc8a3f266cb969f5b7eaa0c0b9540d35" diff --git a/pyproject.toml b/pyproject.toml index de710c7..bd2c740 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,8 @@ optional = true [tool.poetry.group.dev.dependencies] pytest = ">=7.0" pytest-cov = ">=4.0" -flake8 = ">=5.0" +black = ">=24.0" +ruff = ">=0.6" pre-commit = ">=2.20" tox = ">=4.0" twine = ">=6.0" @@ -66,3 +67,15 @@ exclude_lines = [ "if __name__ == .__main__.:", "raise NotImplementedError", ] + +[tool.black] +line-length = 120 +target-version = ["py39"] + +[tool.ruff] +line-length = 120 +target-version = "py39" + +[tool.ruff.lint] +select = ["E4", "E7", "E9", "F"] +per-file-ignores = { "__init__.py" = ["F401"] } diff --git a/tests/test_codegraph.py b/tests/test_codegraph.py index 1c29b02..841b8fa 100644 --- a/tests/test_codegraph.py +++ b/tests/test_codegraph.py @@ -5,9 +5,7 @@ def test_main(): - module_path = ( - pathlib.Path(__file__).parents[0] / "test_data" / "vizualyzer.py" - ).as_posix() + module_path = (pathlib.Path(__file__).parents[0] / "test_data" / "vizualyzer.py").as_posix() args = Namespace(paths=[module_path]) usage_graph = CodeGraph(args).usage_graph() excepted = { diff --git a/tests/test_data/alias_imports.py b/tests/test_data/alias_imports.py index b10a0d2..868a40f 100644 --- a/tests/test_data/alias_imports.py +++ b/tests/test_data/alias_imports.py @@ -1,4 +1,5 @@ """Test module for aliased imports.""" + from tests.test_data import module_a as ma from tests.test_data import module_b as mb diff --git a/tests/test_data/comma_imports.py b/tests/test_data/comma_imports.py index 67e04be..88baa01 100644 --- a/tests/test_data/comma_imports.py +++ b/tests/test_data/comma_imports.py @@ -1,4 +1,5 @@ """Test module for comma-separated imports.""" + from tests.test_data import module_a, module_b, module_c diff --git a/tests/test_data/module_a.py b/tests/test_data/module_a.py index 540da27..33f0119 100644 --- a/tests/test_data/module_a.py +++ b/tests/test_data/module_a.py @@ -1,4 +1,5 @@ """Test module A for multi-module connection testing.""" + from tests.test_data import module_b, module_c diff --git a/tests/test_data/module_b.py b/tests/test_data/module_b.py index 5206cce..f5f4cd5 100644 --- a/tests/test_data/module_b.py +++ b/tests/test_data/module_b.py @@ -1,4 +1,5 @@ """Test module B for multi-module connection testing.""" + from tests.test_data import module_c diff --git a/tests/test_data/vizualyzer.py b/tests/test_data/vizualyzer.py index 67e9184..f8c7c01 100644 --- a/tests/test_data/vizualyzer.py +++ b/tests/test_data/vizualyzer.py @@ -33,9 +33,7 @@ def draw_graph(modules_entities: Dict) -> None: sub_edges_all = [] for module in modules_entities: - new_module_edges_all, new_edges_all = process_module_in_graph( - module, modules_entities[module], G - ) + new_module_edges_all, new_edges_all = process_module_in_graph(module, modules_entities[module], G) module_edges_all += new_module_edges_all sub_edges_all += new_edges_all @@ -64,9 +62,7 @@ def draw_graph(modules_entities: Dict) -> None: alpha=0.8, ) - nx.draw_networkx_labels( - G, pos, labels=module_list_labels, font_weight="bold", font_size=11 - ) + nx.draw_networkx_labels(G, pos, labels=module_list_labels, font_weight="bold", font_size=11) nx.draw_networkx_labels( G, pos, diff --git a/tests/test_graph_generation.py b/tests/test_graph_generation.py index 311731a..dbab673 100644 --- a/tests/test_graph_generation.py +++ b/tests/test_graph_generation.py @@ -1,4 +1,5 @@ """Tests for graph generation functionality.""" + import csv import pathlib import tempfile @@ -8,7 +9,6 @@ from codegraph.parser import create_objects_array, Import from codegraph.vizualyzer import convert_to_d3_format, export_to_csv - TEST_DATA_DIR = pathlib.Path(__file__).parent / "test_data" @@ -121,7 +121,7 @@ class MultiLineInherit( pass """ result = create_objects_array("test.py", source) - classes = [c for c in result if hasattr(c, 'super')] + classes = [c for c in result if hasattr(c, "super")] child = next(c for c in classes if c.name == "Child") assert "Base1" in child.super @@ -222,9 +222,7 @@ def test_module_entity_links(self): result = convert_to_d3_format(usage_graph) # Should have module-entity links - module_entity_links = [ - link for link in result["links"] if link["type"] == "module-entity" - ] + module_entity_links = [link for link in result["links"] if link["type"] == "module-entity"] assert len(module_entity_links) >= 2 def test_dependency_links(self): @@ -345,7 +343,7 @@ def test_export_creates_file(self): "func_b": [], } } - with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as f: + with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f: output_path = f.name export_to_csv(usage_graph, output_path=output_path) @@ -360,16 +358,16 @@ def test_export_has_correct_columns(self): "func_a": [], } } - with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as f: + with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f: output_path = f.name export_to_csv(usage_graph, output_path=output_path) - with open(output_path, 'r') as csvfile: + with open(output_path, "r") as csvfile: reader = csv.DictReader(csvfile) fieldnames = reader.fieldnames - expected_columns = ['name', 'type', 'parent_module', 'full_path', 'links_out', 'links_in', 'lines'] + expected_columns = ["name", "type", "parent_module", "full_path", "links_out", "links_in", "lines"] assert fieldnames == expected_columns pathlib.Path(output_path).unlink() @@ -380,25 +378,21 @@ def test_export_module_data(self): "func_a": [], } } - entity_metadata = { - "/path/to/module.py": { - "func_a": {"lines": 10, "entity_type": "function"} - } - } - with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as f: + entity_metadata = {"/path/to/module.py": {"func_a": {"lines": 10, "entity_type": "function"}}} + with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f: output_path = f.name export_to_csv(usage_graph, entity_metadata=entity_metadata, output_path=output_path) - with open(output_path, 'r') as csvfile: + with open(output_path, "r") as csvfile: reader = csv.DictReader(csvfile) rows = list(reader) # Find module row - module_row = next((r for r in rows if r['type'] == 'module'), None) + module_row = next((r for r in rows if r["type"] == "module"), None) assert module_row is not None - assert module_row['name'] == 'module.py' - assert module_row['parent_module'] == '' + assert module_row["name"] == "module.py" + assert module_row["parent_module"] == "" pathlib.Path(output_path).unlink() @@ -416,27 +410,27 @@ def test_export_entity_data(self): "MyClass": {"lines": 50, "entity_type": "class"}, } } - with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as f: + with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f: output_path = f.name export_to_csv(usage_graph, entity_metadata=entity_metadata, output_path=output_path) - with open(output_path, 'r') as csvfile: + with open(output_path, "r") as csvfile: reader = csv.DictReader(csvfile) rows = list(reader) # Find function row - func_row = next((r for r in rows if r['name'] == 'my_function'), None) + func_row = next((r for r in rows if r["name"] == "my_function"), None) assert func_row is not None - assert func_row['type'] == 'function' - assert func_row['parent_module'] == 'module.py' - assert func_row['lines'] == '15' + assert func_row["type"] == "function" + assert func_row["parent_module"] == "module.py" + assert func_row["lines"] == "15" # Find class row - class_row = next((r for r in rows if r['name'] == 'MyClass'), None) + class_row = next((r for r in rows if r["name"] == "MyClass"), None) assert class_row is not None - assert class_row['type'] == 'class' - assert class_row['lines'] == '50' + assert class_row["type"] == "class" + assert class_row["lines"] == "50" pathlib.Path(output_path).unlink() @@ -451,24 +445,24 @@ def test_export_links_count(self): "func_c": [], }, } - with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as f: + with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f: output_path = f.name export_to_csv(usage_graph, output_path=output_path) - with open(output_path, 'r') as csvfile: + with open(output_path, "r") as csvfile: reader = csv.DictReader(csvfile) rows = list(reader) # func_a should have links_out (dependencies) - func_a_row = next((r for r in rows if r['name'] == 'func_a'), None) + func_a_row = next((r for r in rows if r["name"] == "func_a"), None) assert func_a_row is not None - assert int(func_a_row['links_out']) >= 2 + assert int(func_a_row["links_out"]) >= 2 # func_b should have links_in (being depended on) - func_b_row = next((r for r in rows if r['name'] == 'func_b'), None) + func_b_row = next((r for r in rows if r["name"] == "func_b"), None) assert func_b_row is not None - assert int(func_b_row['links_in']) >= 1 + assert int(func_b_row["links_in"]) >= 1 pathlib.Path(output_path).unlink() @@ -481,33 +475,33 @@ def test_export_codegraph_on_itself(self): usage_graph = code_graph.usage_graph() entity_metadata = code_graph.get_entity_metadata() - with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as f: + with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f: output_path = f.name export_to_csv(usage_graph, entity_metadata=entity_metadata, output_path=output_path) - with open(output_path, 'r') as csvfile: + with open(output_path, "r") as csvfile: reader = csv.DictReader(csvfile) rows = list(reader) # Should have modules - module_names = [r['name'] for r in rows if r['type'] == 'module'] - assert 'core.py' in module_names - assert 'parser.py' in module_names - assert 'main.py' in module_names - assert 'vizualyzer.py' in module_names + module_names = [r["name"] for r in rows if r["type"] == "module"] + assert "core.py" in module_names + assert "parser.py" in module_names + assert "main.py" in module_names + assert "vizualyzer.py" in module_names # Should have functions and classes - types = set(r['type'] for r in rows) - assert 'module' in types - assert 'function' in types - assert 'class' in types + types = set(r["type"] for r in rows) + assert "module" in types + assert "function" in types + assert "class" in types # CodeGraph class should exist - codegraph_row = next((r for r in rows if r['name'] == 'CodeGraph'), None) + codegraph_row = next((r for r in rows if r["name"] == "CodeGraph"), None) assert codegraph_row is not None - assert codegraph_row['type'] == 'class' - assert codegraph_row['parent_module'] == 'core.py' - assert int(codegraph_row['lines']) > 0 + assert codegraph_row["type"] == "class" + assert codegraph_row["parent_module"] == "core.py" + assert int(codegraph_row["lines"]) > 0 pathlib.Path(output_path).unlink() diff --git a/tests/test_package_install.py b/tests/test_package_install.py index 996d733..30b39fd 100644 --- a/tests/test_package_install.py +++ b/tests/test_package_install.py @@ -7,50 +7,44 @@ def test_import_codegraph(): """Test that codegraph can be imported without matplotlib.""" import codegraph + assert codegraph.__version__ is not None def test_import_core(): """Test that core module can be imported without matplotlib.""" from codegraph import core - assert hasattr(core, 'CodeGraph') + + assert hasattr(core, "CodeGraph") def test_import_vizualyzer(): """Test that vizualyzer can be imported without matplotlib.""" from codegraph import vizualyzer - assert hasattr(vizualyzer, 'draw_graph') - assert hasattr(vizualyzer, 'draw_graph_matplotlib') + + assert hasattr(vizualyzer, "draw_graph") + assert hasattr(vizualyzer, "draw_graph_matplotlib") def test_d3_visualization_without_matplotlib(): """Test that D3.js visualization works without matplotlib installed.""" from codegraph import vizualyzer - test_data = { - '/test/module1.py': { - 'func1': ['module2.func2'], - '_': [] - }, - '/test/module2.py': { - 'func2': [], - '_': [] - } - } - - with tempfile.NamedTemporaryFile(suffix='.html', delete=False) as f: + test_data = {"/test/module1.py": {"func1": ["module2.func2"], "_": []}, "/test/module2.py": {"func2": [], "_": []}} + + with tempfile.NamedTemporaryFile(suffix=".html", delete=False) as f: output_path = f.name try: vizualyzer.draw_graph(test_data, output_path=output_path) assert os.path.exists(output_path) - with open(output_path, 'r') as f: + with open(output_path, "r") as f: content = f.read() - assert 'graphData' in content - assert 'd3.js' in content or 'd3.v7' in content - assert 'module1.py' in content - assert 'module2.py' in content + assert "graphData" in content + assert "d3.js" in content or "d3.v7" in content + assert "module1.py" in content + assert "module2.py" in content finally: if os.path.exists(output_path): os.unlink(output_path) @@ -62,7 +56,7 @@ def test_codegraph_on_itself_without_matplotlib(): from argparse import Namespace codegraph_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - codegraph_src = os.path.join(codegraph_path, 'codegraph') + codegraph_src = os.path.join(codegraph_path, "codegraph") args = Namespace(paths=[codegraph_src]) cg = CodeGraph(args) @@ -80,6 +74,6 @@ def test_cli_help(): from codegraph.main import cli runner = CliRunner() - result = runner.invoke(cli, ['--help']) + result = runner.invoke(cli, ["--help"]) assert result.exit_code == 0 - assert 'codegraph' in result.output.lower() or 'PATHS' in result.output + assert "codegraph" in result.output.lower() or "PATHS" in result.output diff --git a/tests/test_python_parser_ast.py b/tests/test_python_parser_ast.py index fd01290..320b7c6 100644 --- a/tests/test_python_parser_ast.py +++ b/tests/test_python_parser_ast.py @@ -1,7 +1,5 @@ from argparse import Namespace -import pytest - from codegraph.core import CodeGraph from codegraph.parsers.python_parser import PythonParser @@ -55,7 +53,7 @@ def test_usage_graph_resolves_alias_calls(tmp_path): module_a_path = (tmp_path / "module_a.py").as_posix() deps = set(usage_graph[module_a_path]["call_all"]) assert "module_b.foo" in deps - assert f"module_b._" in usage_graph[module_a_path]["_"] + assert "module_b._" in usage_graph[module_a_path]["_"] def test_class_inheritance_dependency(tmp_path): @@ -96,7 +94,7 @@ def test_get_lines_numbers(tmp_path): _write( tmp_path, "module_a.py", - "\"\"\"module\"\"\"\n\ndef foo():\n return 1\n\n\ndef bar():\n return 2\n", + '"""module"""\n\ndef foo():\n return 1\n\n\ndef bar():\n return 2\n', ) args = Namespace(paths=[tmp_path.as_posix()], language="python") code_graph = CodeGraph(args) diff --git a/tests/test_visualizer_helpers.py b/tests/test_visualizer_helpers.py index f6d40fa..3e00635 100644 --- a/tests/test_visualizer_helpers.py +++ b/tests/test_visualizer_helpers.py @@ -42,7 +42,7 @@ def test_get_d3_html_template_replaces_placeholders(): html = vizualyzer.get_d3_html_template({"nodes": [], "links": [], "unlinkedModules": []}) assert "STYLES_PLACEHOLDER" not in html assert "GRAPH_DATA_PLACEHOLDER" not in html - assert "\"nodes\": []" in html + assert '"nodes": []' in html def test_draw_graph_writes_file_and_opens_browser(tmp_path, monkeypatch): diff --git a/tox.ini b/tox.ini index b90a618..d9a30ba 100644 --- a/tox.ini +++ b/tox.ini @@ -17,11 +17,13 @@ commands = pytest tests/ -v --cov=codegraph --cov-report=term-missing {posargs} [testenv:lint] -description = Run linting with flake8 +description = Run formatting and linting with black and ruff deps = - flake8>=5.0 + black>=24.0 + ruff>=0.6 commands = - flake8 codegraph/ tests/ + black . + ruff check . [testenv:package] description = Test package installation and usage without matplotlib @@ -62,10 +64,3 @@ basepython = python3.12 [testenv:py313] basepython = python3.13 - -[flake8] -max-line-length = 120 -exclude = .tox,dist,build,*.egg-info -per-file-ignores = - __init__.py:F401 - vizualyzer.py:E501