Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,11 @@ exclude = [
'tests/assets', # These are sample packages for tests to run under - we don't want ruff to mess with them.
'docs/', # Documentation configuration scripts
]
lint.select = ["A", "C4", "E4", "E7", "E9", "F", "FLY", "FURB", "INP", "PLE", "PLR", "PT", "RUF", "SIM", "UP"]
lint.ignore = ["C408", "PLR2004", "PLR0913", "PT006", "PT007", "SIM108", "SIM118", "SIM300"]
lint.per-file-ignores."tests/**/*.py" = [ "RUF012", "RUF059" ]
lint.select = [ "ALL" ]
Comment thread
nathanjmcdougall marked this conversation as resolved.
Outdated
lint.ignore = [ "A005", "ANN401" ,"ARG002", "C408", "COM812", "D1", "D2", "E501", "FBT", "FIX", "G004", "N818", "PERF203", "PLR2004", "PLR0913", "PLW1641", "PTH", "PT006", "PT007", "RET504", "SIM108", "SIM118", "SIM300", "TD", "TID252" ]
Comment thread
nathanjmcdougall marked this conversation as resolved.
Outdated
lint.per-file-ignores."tests/**/*.py" = [ "ANN", "ARG", "B007", "B018", "B033", "D", "EM", "FBT", "N806", "RET505", "RUF012", "RUF059", "S", "TRY" ]
lint.pydocstyle.convention = "google"
lint.future-annotations = true
lint.flake8-builtins.strict-checking = true
lint.flake8-type-checking.quote-annotations = true
lint.typing-extensions = false
Comment thread
nathanjmcdougall marked this conversation as resolved.
Outdated
4 changes: 2 additions & 2 deletions src/grimp/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
__version__ = "3.14"

from .application.graph import DetailedImport, ImportGraph, Import
from .application.graph import DetailedImport, Import, ImportGraph
from .domain.analysis import PackageDependency, Route
from .domain.valueobjects import DirectImport, Module, Layer
from .domain.valueobjects import DirectImport, Layer, Module
from .main import build_graph

__all__ = [
Expand Down
30 changes: 18 additions & 12 deletions src/grimp/adaptors/caching.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
import hashlib
from __future__ import annotations
Comment thread
nathanjmcdougall marked this conversation as resolved.

import hashlib
import json
import logging
from typing import TYPE_CHECKING, Any

from grimp.application.ports.filesystem import BasicFileSystem
from grimp.application.ports.modulefinder import FoundPackage, ModuleFile
from grimp.domain.valueobjects import DirectImport, Module
from grimp import _rustgrimp as rust # type: ignore[attr-defined]

from ..application.ports.caching import Cache as AbstractCache
from ..application.ports.caching import CacheMiss
from grimp import _rustgrimp as rust # type: ignore[attr-defined]

if TYPE_CHECKING:
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

On the fence on this one - I suppose there is hypothetically a slight runtime penalty, but for me it makes the code a little less readable. What rule is it?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The TC ruleset from flake8-type-checking, specifically codes TC001, TC002, and TC003.

https://docs.astral.sh/ruff/rules/#flake8-type-checking-tc

The runtime penalty is probably near-zero because import grimp will basically import all submodules, and grimp has no dependencies. So it would only be stdlib imports but I think it's not worth being concerned about those.

I've gone ahead and reverted this from the PR. We can always add it back in later if there's a desire for it, it's easy to migrate with autofix.

from grimp.application.ports.filesystem import BasicFileSystem
from grimp.application.ports.modulefinder import FoundPackage, ModuleFile
from grimp.domain.valueobjects import DirectImport, Module

logger = logging.getLogger(__name__)
PrimitiveFormat = dict[str, list[tuple[str, int | None, str]]]
Expand Down Expand Up @@ -64,7 +68,7 @@ def make_data_file_unique_string(
class Cache(AbstractCache):
DEFAULT_CACHE_DIR = ".grimp_cache"

def __init__(self, *args, namer: type[CacheFileNamer], **kwargs) -> None:
def __init__(self, *args: Any, namer: type[CacheFileNamer], **kwargs: Any) -> None:
"""
Don't instantiate Cache directly; use Cache.setup().
"""
Expand All @@ -82,7 +86,7 @@ def setup(
exclude_type_checking_imports: bool = False,
cache_dir: str | None = None,
namer: type[CacheFileNamer] = CacheFileNamer,
) -> "Cache":
) -> Cache:
cache = cls(
file_system=file_system,
found_packages=found_packages,
Expand All @@ -93,7 +97,8 @@ def setup(
)
cache._build_mtime_map()
cache._build_data_map()
assert cache.cache_dir
if not cache.cache_dir:
Comment thread
nathanjmcdougall marked this conversation as resolved.
Outdated
raise AssertionError
return cache

@classmethod
Expand All @@ -104,7 +109,7 @@ def read_imports(self, module_file: ModuleFile) -> set[DirectImport]:
try:
cached_mtime = self._mtime_map[module_file.module.name]
except KeyError:
raise CacheMiss
raise CacheMiss from None
Comment thread
nathanjmcdougall marked this conversation as resolved.
Outdated
if cached_mtime != module_file.mtime:
raise CacheMiss

Expand All @@ -113,7 +118,7 @@ def read_imports(self, module_file: ModuleFile) -> set[DirectImport]:
except KeyError:
# While we would expect the module to be in here,
# there's no point in crashing if, for some reason, it's not.
raise CacheMiss
raise CacheMiss from None
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As above


def write(
self,
Expand Down Expand Up @@ -170,12 +175,13 @@ def _read_mtime_map_file(self, found_package: FoundPackage) -> dict[str, float]:
return {}
try:
deserialized = json.loads(serialized)
logger.info(f"Used cache meta file {meta_cache_filename}.")
return deserialized
except json.JSONDecodeError:
logger.warning(f"Could not use corrupt cache file {meta_cache_filename}.")
return {}

logger.info(f"Used cache meta file {meta_cache_filename}.")
return deserialized

def _build_data_map(self) -> None:
self._data_map = self._read_data_map_file()

Expand Down
11 changes: 9 additions & 2 deletions src/grimp/adaptors/filesystem.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
from __future__ import annotations

import os
import tokenize
from collections.abc import Iterator
from typing import TYPE_CHECKING

from grimp.application.ports.filesystem import AbstractFileSystem, BasicFileSystem
from grimp import _rustgrimp as rust # type: ignore[attr-defined]
from grimp.application.ports.filesystem import AbstractFileSystem

if TYPE_CHECKING:
from collections.abc import Iterator

from grimp.application.ports.filesystem import BasicFileSystem


class FileSystem(AbstractFileSystem):
Expand Down
49 changes: 31 additions & 18 deletions src/grimp/adaptors/modulefinder.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
from __future__ import annotations

import logging
from collections.abc import Iterable, Set
from typing import TYPE_CHECKING

from grimp.application.ports import modulefinder
from grimp.application.ports.filesystem import AbstractFileSystem
from grimp.domain.valueobjects import Module

if TYPE_CHECKING:
from collections.abc import Iterable
from collections.abc import Set as AbstractSet
Comment thread
nathanjmcdougall marked this conversation as resolved.
Outdated

from grimp.application.ports.filesystem import AbstractFileSystem

logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -44,7 +51,7 @@ def find_package(

def _get_python_files_and_namespace_dirs_inside_package(
self, directory: str
) -> tuple[Iterable[str], Set[str]]:
) -> tuple[Iterable[str], AbstractSet[str]]:
"""
Search the supplied package directory for Python files and namespaces.

Expand Down Expand Up @@ -79,14 +86,16 @@ def _get_python_files_and_namespace_dirs_inside_package(
for d in dirs_to_remove:
dirs.remove(d)

for filename in files:
if self._is_python_file(filename, dirpath):
python_files.append(self.file_system.join(dirpath, filename))
python_files.extend(
self.file_system.join(dirpath, filename)
for filename in files
if self._is_python_file(filename, dirpath)
)

namespace_dirs = self._determine_namespace_dirs(candidate_namespace_dirs, python_files)
return python_files, namespace_dirs

def _is_in_portion(self, directory: str, portions: Set[str]) -> bool:
def _is_in_portion(self, directory: str, portions: AbstractSet[str]) -> bool:
return any(directory.startswith(portion) for portion in portions)

def _should_ignore_dir(self, directory: str) -> bool:
Expand All @@ -112,9 +121,11 @@ def _is_python_file(self, filename: str, dirpath: str) -> bool:
Files with extra dots in the name won't be treated as Python files.

Args:
filename (str): the filename, excluding the path.
filename: the filename, excluding the path.
dirpath: the directory path containing the file.

Returns:
bool: whether it's a Python file.
Whether it's a Python file.
"""
# Ignore hidden files.
if filename.startswith("."):
Expand All @@ -138,12 +149,13 @@ def _module_name_from_filename(
) -> str:
"""
Args:
package_name (string) - the importable name of the top level package. Could
package_name: the importable name of the top level package. Could
be namespaced.
filename_and_path (string) - the full name of the Python file.
package_directory (string) - the full path of the top level Python package directory.
Returns:
Absolute module name for importing (string).
filename_and_path: the full name of the Python file.
package_directory: the full path of the top level Python package directory.

Returns:
Absolute module name for importing.
"""
internal_filename_and_path = filename_and_path[len(package_directory) :]
internal_filename_and_path_without_extension = internal_filename_and_path[1:-3]
Expand All @@ -160,11 +172,12 @@ def _namespace_from_dir(
) -> str:
"""
Args:
package_name (string) - the importable name of the top level package. Could
package_name: the importable name of the top level package. Could
be namespaced.
namespace_dir (string) - the full name of the namespace directory.
package_directory (string) - the full path of the top level Python package directory.
Returns:
namespace_dir: the full name of the namespace directory.
package_directory: the full path of the top level Python package directory.

Returns:
Absolute module name for importing (string).
"""
parent_of_package_directory = package_directory[: -len(package_name)]
Expand Down
23 changes: 17 additions & 6 deletions src/grimp/adaptors/packagefinder.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
from __future__ import annotations

import importlib.util
import logging
import sys
from importlib.machinery import ModuleSpec
from typing import TYPE_CHECKING

from grimp import exceptions
from grimp.application.ports.filesystem import AbstractFileSystem
from grimp.application.ports.packagefinder import AbstractPackageFinder
from grimp.domain.valueobjects import Module

if TYPE_CHECKING:
from importlib.machinery import ModuleSpec

from grimp.application.ports.filesystem import AbstractFileSystem

logger = logging.getLogger(__name__)


Expand All @@ -19,7 +25,8 @@ def determine_package_directories(
spec = importlib.util.find_spec(package_name)
if not spec:
logger.debug(f"sys.path: {sys.path}")
raise ValueError(f"Could not find package '{package_name}' in your Python path.")
msg = f"Could not find package '{package_name}' in your Python path."
Comment thread
nathanjmcdougall marked this conversation as resolved.
raise ValueError(msg)

if (
spec.has_location
Expand All @@ -30,11 +37,14 @@ def determine_package_directories(
):
raise exceptions.NotATopLevelModule

assert spec.submodule_search_locations # This should be the case if spec.has_location.
if not spec.submodule_search_locations:
# This should never be the case the case if spec.has_location.
raise AssertionError
return set(spec.submodule_search_locations)

def _is_a_package(self, spec: ModuleSpec, file_system: AbstractFileSystem) -> bool:
assert spec.origin
if not spec.origin:
raise AssertionError
filename = file_system.split(spec.origin)[1]
return filename == "__init__.py"

Expand All @@ -46,5 +56,6 @@ def _has_a_non_namespace_parent(self, spec: ModuleSpec) -> bool:
return False

root_spec = importlib.util.find_spec(module.parent.name)
assert root_spec
if not root_spec:
raise AssertionError
return root_spec.has_location
14 changes: 8 additions & 6 deletions src/grimp/application/config.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
from __future__ import annotations

from typing import Any


class Settings:
def __init__(self):
self._config = {}
def __init__(self) -> None:
self._config: dict[str, Any] = {}

def configure(self, **config_dict: Any):
def configure(self, **config_dict: Any) -> None:
self._config.update(config_dict)

def __getattr__(self, name):
def __getattr__(self, name: str) -> Any:
if name[:2] != "__":
return self._config[name]
return super().__getattr__(name)
return super().__getattribute__(name)
Comment thread
nathanjmcdougall marked this conversation as resolved.

def copy(self) -> "Settings":
def copy(self) -> Settings:
new_instance = self.__class__()
new_instance.configure(**self._config)
return new_instance
Expand Down
Loading