Skip to content

File Systems API

Pluggable storage backends for simulation I/O.

FileSystem Protocol

idfkit.simulation.fs.FileSystem

Bases: Protocol

Protocol for file system operations used by the simulation module.

All methods accept str | Path for path arguments.

Source code in src/idfkit/simulation/fs.py
@runtime_checkable
class FileSystem(Protocol):
    """Protocol for file system operations used by the simulation module.

    All methods accept ``str | Path`` for path arguments.
    """

    def read_bytes(self, path: str | Path) -> bytes:
        """Read a file as raw bytes.

        Args:
            path: Path to the file.

        Returns:
            The file contents as bytes.
        """
        ...

    def write_bytes(self, path: str | Path, data: bytes) -> None:
        """Write raw bytes to a file.

        Args:
            path: Path to the file.
            data: Bytes to write.
        """
        ...

    def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
        """Read a file as text.

        Args:
            path: Path to the file.
            encoding: Text encoding (default ``"utf-8"``).

        Returns:
            The file contents as a string.
        """
        ...

    def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
        """Write text to a file.

        Args:
            path: Path to the file.
            text: Text to write.
            encoding: Text encoding (default ``"utf-8"``).
        """
        ...

    def exists(self, path: str | Path) -> bool:
        """Check whether a file exists.

        Args:
            path: Path to check.

        Returns:
            True if the file exists.
        """
        ...

    def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
        """Create directories recursively.

        Args:
            path: Directory path to create.
            exist_ok: If True, do not raise if the directory already exists.
        """
        ...

    def copy(self, src: str | Path, dst: str | Path) -> None:
        """Copy a file from *src* to *dst*.

        Args:
            src: Source file path.
            dst: Destination file path.
        """
        ...

    def glob(self, path: str | Path, pattern: str) -> list[str]:
        """List files matching a glob pattern under *path*.

        Args:
            path: Base directory.
            pattern: Glob pattern (e.g. ``"*.sql"``).

        Returns:
            List of matching file paths as strings.
        """
        ...

    def remove(self, path: str | Path) -> None:
        """Remove a file.

        Args:
            path: Path to the file to remove.
        """
        ...

read_bytes(path)

Read a file as raw bytes.

Parameters:

Name Type Description Default
path str | Path

Path to the file.

required

Returns:

Type Description
bytes

The file contents as bytes.

Source code in src/idfkit/simulation/fs.py
def read_bytes(self, path: str | Path) -> bytes:
    """Read a file as raw bytes.

    Args:
        path: Path to the file.

    Returns:
        The file contents as bytes.
    """
    ...

write_bytes(path, data)

Write raw bytes to a file.

Parameters:

Name Type Description Default
path str | Path

Path to the file.

required
data bytes

Bytes to write.

required
Source code in src/idfkit/simulation/fs.py
def write_bytes(self, path: str | Path, data: bytes) -> None:
    """Write raw bytes to a file.

    Args:
        path: Path to the file.
        data: Bytes to write.
    """
    ...

read_text(path, encoding='utf-8')

Read a file as text.

Parameters:

Name Type Description Default
path str | Path

Path to the file.

required
encoding str

Text encoding (default "utf-8").

'utf-8'

Returns:

Type Description
str

The file contents as a string.

Source code in src/idfkit/simulation/fs.py
def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
    """Read a file as text.

    Args:
        path: Path to the file.
        encoding: Text encoding (default ``"utf-8"``).

    Returns:
        The file contents as a string.
    """
    ...

write_text(path, text, encoding='utf-8')

Write text to a file.

Parameters:

Name Type Description Default
path str | Path

Path to the file.

required
text str

Text to write.

required
encoding str

Text encoding (default "utf-8").

'utf-8'
Source code in src/idfkit/simulation/fs.py
def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
    """Write text to a file.

    Args:
        path: Path to the file.
        text: Text to write.
        encoding: Text encoding (default ``"utf-8"``).
    """
    ...

exists(path)

Check whether a file exists.

Parameters:

Name Type Description Default
path str | Path

Path to check.

required

Returns:

Type Description
bool

True if the file exists.

Source code in src/idfkit/simulation/fs.py
def exists(self, path: str | Path) -> bool:
    """Check whether a file exists.

    Args:
        path: Path to check.

    Returns:
        True if the file exists.
    """
    ...

makedirs(path, *, exist_ok=False)

Create directories recursively.

Parameters:

Name Type Description Default
path str | Path

Directory path to create.

required
exist_ok bool

If True, do not raise if the directory already exists.

False
Source code in src/idfkit/simulation/fs.py
def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
    """Create directories recursively.

    Args:
        path: Directory path to create.
        exist_ok: If True, do not raise if the directory already exists.
    """
    ...

copy(src, dst)

Copy a file from src to dst.

Parameters:

Name Type Description Default
src str | Path

Source file path.

required
dst str | Path

Destination file path.

required
Source code in src/idfkit/simulation/fs.py
def copy(self, src: str | Path, dst: str | Path) -> None:
    """Copy a file from *src* to *dst*.

    Args:
        src: Source file path.
        dst: Destination file path.
    """
    ...

glob(path, pattern)

List files matching a glob pattern under path.

Parameters:

Name Type Description Default
path str | Path

Base directory.

required
pattern str

Glob pattern (e.g. "*.sql").

required

Returns:

Type Description
list[str]

List of matching file paths as strings.

Source code in src/idfkit/simulation/fs.py
def glob(self, path: str | Path, pattern: str) -> list[str]:
    """List files matching a glob pattern under *path*.

    Args:
        path: Base directory.
        pattern: Glob pattern (e.g. ``"*.sql"``).

    Returns:
        List of matching file paths as strings.
    """
    ...

remove(path)

Remove a file.

Parameters:

Name Type Description Default
path str | Path

Path to the file to remove.

required
Source code in src/idfkit/simulation/fs.py
def remove(self, path: str | Path) -> None:
    """Remove a file.

    Args:
        path: Path to the file to remove.
    """
    ...

AsyncFileSystem Protocol

idfkit.simulation.fs.AsyncFileSystem

Bases: Protocol

Protocol for async file system operations used by the async simulation module.

This is the async counterpart to FileSystem. Use this with async_simulate and the async batch functions to avoid blocking the event loop during file I/O — especially important for network-backed storage like S3.

All methods accept str | Path for path arguments.

Source code in src/idfkit/simulation/fs.py
@runtime_checkable
class AsyncFileSystem(Protocol):
    """Protocol for async file system operations used by the async simulation module.

    This is the async counterpart to [FileSystem][idfkit.simulation.fs.FileSystem].  Use this with
    [async_simulate][idfkit.simulation.async_runner.async_simulate] and the async batch
    functions to avoid blocking the event loop during file I/O — especially
    important for network-backed storage like S3.

    All methods accept ``str | Path`` for path arguments.
    """

    async def read_bytes(self, path: str | Path) -> bytes:
        """Read a file as raw bytes.

        Args:
            path: Path to the file.

        Returns:
            The file contents as bytes.
        """
        ...

    async def write_bytes(self, path: str | Path, data: bytes) -> None:
        """Write raw bytes to a file.

        Args:
            path: Path to the file.
            data: Bytes to write.
        """
        ...

    async def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
        """Read a file as text.

        Args:
            path: Path to the file.
            encoding: Text encoding (default ``"utf-8"``).

        Returns:
            The file contents as a string.
        """
        ...

    async def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
        """Write text to a file.

        Args:
            path: Path to the file.
            text: Text to write.
            encoding: Text encoding (default ``"utf-8"``).
        """
        ...

    async def exists(self, path: str | Path) -> bool:
        """Check whether a file exists.

        Args:
            path: Path to check.

        Returns:
            True if the file exists.
        """
        ...

    async def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
        """Create directories recursively.

        Args:
            path: Directory path to create.
            exist_ok: If True, do not raise if the directory already exists.
        """
        ...

    async def copy(self, src: str | Path, dst: str | Path) -> None:
        """Copy a file from *src* to *dst*.

        Args:
            src: Source file path.
            dst: Destination file path.
        """
        ...

    async def glob(self, path: str | Path, pattern: str) -> list[str]:
        """List files matching a glob pattern under *path*.

        Args:
            path: Base directory.
            pattern: Glob pattern (e.g. ``"*.sql"``).

        Returns:
            List of matching file paths as strings.
        """
        ...

    async def remove(self, path: str | Path) -> None:
        """Remove a file.

        Args:
            path: Path to the file to remove.
        """
        ...

read_bytes(path) async

Read a file as raw bytes.

Parameters:

Name Type Description Default
path str | Path

Path to the file.

required

Returns:

Type Description
bytes

The file contents as bytes.

Source code in src/idfkit/simulation/fs.py
async def read_bytes(self, path: str | Path) -> bytes:
    """Read a file as raw bytes.

    Args:
        path: Path to the file.

    Returns:
        The file contents as bytes.
    """
    ...

write_bytes(path, data) async

Write raw bytes to a file.

Parameters:

Name Type Description Default
path str | Path

Path to the file.

required
data bytes

Bytes to write.

required
Source code in src/idfkit/simulation/fs.py
async def write_bytes(self, path: str | Path, data: bytes) -> None:
    """Write raw bytes to a file.

    Args:
        path: Path to the file.
        data: Bytes to write.
    """
    ...

read_text(path, encoding='utf-8') async

Read a file as text.

Parameters:

Name Type Description Default
path str | Path

Path to the file.

required
encoding str

Text encoding (default "utf-8").

'utf-8'

Returns:

Type Description
str

The file contents as a string.

Source code in src/idfkit/simulation/fs.py
async def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
    """Read a file as text.

    Args:
        path: Path to the file.
        encoding: Text encoding (default ``"utf-8"``).

    Returns:
        The file contents as a string.
    """
    ...

write_text(path, text, encoding='utf-8') async

Write text to a file.

Parameters:

Name Type Description Default
path str | Path

Path to the file.

required
text str

Text to write.

required
encoding str

Text encoding (default "utf-8").

'utf-8'
Source code in src/idfkit/simulation/fs.py
async def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
    """Write text to a file.

    Args:
        path: Path to the file.
        text: Text to write.
        encoding: Text encoding (default ``"utf-8"``).
    """
    ...

exists(path) async

Check whether a file exists.

Parameters:

Name Type Description Default
path str | Path

Path to check.

required

Returns:

Type Description
bool

True if the file exists.

Source code in src/idfkit/simulation/fs.py
async def exists(self, path: str | Path) -> bool:
    """Check whether a file exists.

    Args:
        path: Path to check.

    Returns:
        True if the file exists.
    """
    ...

makedirs(path, *, exist_ok=False) async

Create directories recursively.

Parameters:

Name Type Description Default
path str | Path

Directory path to create.

required
exist_ok bool

If True, do not raise if the directory already exists.

False
Source code in src/idfkit/simulation/fs.py
async def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
    """Create directories recursively.

    Args:
        path: Directory path to create.
        exist_ok: If True, do not raise if the directory already exists.
    """
    ...

copy(src, dst) async

Copy a file from src to dst.

Parameters:

Name Type Description Default
src str | Path

Source file path.

required
dst str | Path

Destination file path.

required
Source code in src/idfkit/simulation/fs.py
async def copy(self, src: str | Path, dst: str | Path) -> None:
    """Copy a file from *src* to *dst*.

    Args:
        src: Source file path.
        dst: Destination file path.
    """
    ...

glob(path, pattern) async

List files matching a glob pattern under path.

Parameters:

Name Type Description Default
path str | Path

Base directory.

required
pattern str

Glob pattern (e.g. "*.sql").

required

Returns:

Type Description
list[str]

List of matching file paths as strings.

Source code in src/idfkit/simulation/fs.py
async def glob(self, path: str | Path, pattern: str) -> list[str]:
    """List files matching a glob pattern under *path*.

    Args:
        path: Base directory.
        pattern: Glob pattern (e.g. ``"*.sql"``).

    Returns:
        List of matching file paths as strings.
    """
    ...

remove(path) async

Remove a file.

Parameters:

Name Type Description Default
path str | Path

Path to the file to remove.

required
Source code in src/idfkit/simulation/fs.py
async def remove(self, path: str | Path) -> None:
    """Remove a file.

    Args:
        path: Path to the file to remove.
    """
    ...

LocalFileSystem

idfkit.simulation.fs.LocalFileSystem

File system implementation backed by pathlib and shutil.

Source code in src/idfkit/simulation/fs.py
class LocalFileSystem:
    """File system implementation backed by [pathlib][] and [shutil][]."""

    def read_bytes(self, path: str | Path) -> bytes:
        """Read a file as raw bytes."""
        return Path(path).read_bytes()

    def write_bytes(self, path: str | Path, data: bytes) -> None:
        """Write raw bytes to a file."""
        Path(path).write_bytes(data)

    def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
        """Read a file as text."""
        return Path(path).read_text(encoding=encoding)

    def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
        """Write text to a file."""
        Path(path).write_text(text, encoding=encoding)

    def exists(self, path: str | Path) -> bool:
        """Check whether a file exists."""
        return Path(path).exists()

    def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
        """Create directories recursively."""
        Path(path).mkdir(parents=True, exist_ok=exist_ok)

    def copy(self, src: str | Path, dst: str | Path) -> None:
        """Copy a file from *src* to *dst*."""
        shutil.copy2(str(src), str(dst))

    def glob(self, path: str | Path, pattern: str) -> list[str]:
        """List files matching a glob pattern under *path*."""
        return [str(p) for p in Path(path).glob(pattern)]

    def remove(self, path: str | Path) -> None:
        """Remove a file."""
        Path(path).unlink()

read_bytes(path)

Read a file as raw bytes.

Source code in src/idfkit/simulation/fs.py
def read_bytes(self, path: str | Path) -> bytes:
    """Read a file as raw bytes."""
    return Path(path).read_bytes()

write_bytes(path, data)

Write raw bytes to a file.

Source code in src/idfkit/simulation/fs.py
def write_bytes(self, path: str | Path, data: bytes) -> None:
    """Write raw bytes to a file."""
    Path(path).write_bytes(data)

read_text(path, encoding='utf-8')

Read a file as text.

Source code in src/idfkit/simulation/fs.py
def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
    """Read a file as text."""
    return Path(path).read_text(encoding=encoding)

write_text(path, text, encoding='utf-8')

Write text to a file.

Source code in src/idfkit/simulation/fs.py
def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
    """Write text to a file."""
    Path(path).write_text(text, encoding=encoding)

exists(path)

Check whether a file exists.

Source code in src/idfkit/simulation/fs.py
def exists(self, path: str | Path) -> bool:
    """Check whether a file exists."""
    return Path(path).exists()

makedirs(path, *, exist_ok=False)

Create directories recursively.

Source code in src/idfkit/simulation/fs.py
def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
    """Create directories recursively."""
    Path(path).mkdir(parents=True, exist_ok=exist_ok)

copy(src, dst)

Copy a file from src to dst.

Source code in src/idfkit/simulation/fs.py
def copy(self, src: str | Path, dst: str | Path) -> None:
    """Copy a file from *src* to *dst*."""
    shutil.copy2(str(src), str(dst))

glob(path, pattern)

List files matching a glob pattern under path.

Source code in src/idfkit/simulation/fs.py
def glob(self, path: str | Path, pattern: str) -> list[str]:
    """List files matching a glob pattern under *path*."""
    return [str(p) for p in Path(path).glob(pattern)]

remove(path)

Remove a file.

Source code in src/idfkit/simulation/fs.py
def remove(self, path: str | Path) -> None:
    """Remove a file."""
    Path(path).unlink()

AsyncLocalFileSystem

idfkit.simulation.fs.AsyncLocalFileSystem

Non-blocking local file system using asyncio.to_thread.

Wraps LocalFileSystem so that each blocking I/O call runs in the default executor, keeping the event loop free.

Source code in src/idfkit/simulation/fs.py
class AsyncLocalFileSystem:
    """Non-blocking local file system using [asyncio.to_thread][].

    Wraps [LocalFileSystem][idfkit.simulation.fs.LocalFileSystem] so that each blocking I/O call runs in
    the default executor, keeping the event loop free.
    """

    def __init__(self) -> None:
        self._sync = LocalFileSystem()

    async def read_bytes(self, path: str | Path) -> bytes:
        """Read a file as raw bytes without blocking the event loop."""
        return await asyncio.to_thread(self._sync.read_bytes, path)

    async def write_bytes(self, path: str | Path, data: bytes) -> None:
        """Write raw bytes to a file without blocking the event loop."""
        await asyncio.to_thread(self._sync.write_bytes, path, data)

    async def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
        """Read a file as text without blocking the event loop."""
        return await asyncio.to_thread(self._sync.read_text, path, encoding)

    async def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
        """Write text to a file without blocking the event loop."""
        await asyncio.to_thread(self._sync.write_text, path, text, encoding)

    async def exists(self, path: str | Path) -> bool:
        """Check whether a file exists without blocking the event loop."""
        return await asyncio.to_thread(self._sync.exists, path)

    async def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
        """Create directories recursively without blocking the event loop."""
        await asyncio.to_thread(self._sync.makedirs, path, exist_ok=exist_ok)

    async def copy(self, src: str | Path, dst: str | Path) -> None:
        """Copy a file without blocking the event loop."""
        await asyncio.to_thread(self._sync.copy, src, dst)

    async def glob(self, path: str | Path, pattern: str) -> list[str]:
        """List files matching a glob pattern without blocking the event loop."""
        return await asyncio.to_thread(self._sync.glob, path, pattern)

    async def remove(self, path: str | Path) -> None:
        """Remove a file without blocking the event loop."""
        await asyncio.to_thread(self._sync.remove, path)

read_bytes(path) async

Read a file as raw bytes without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def read_bytes(self, path: str | Path) -> bytes:
    """Read a file as raw bytes without blocking the event loop."""
    return await asyncio.to_thread(self._sync.read_bytes, path)

write_bytes(path, data) async

Write raw bytes to a file without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def write_bytes(self, path: str | Path, data: bytes) -> None:
    """Write raw bytes to a file without blocking the event loop."""
    await asyncio.to_thread(self._sync.write_bytes, path, data)

read_text(path, encoding='utf-8') async

Read a file as text without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
    """Read a file as text without blocking the event loop."""
    return await asyncio.to_thread(self._sync.read_text, path, encoding)

write_text(path, text, encoding='utf-8') async

Write text to a file without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
    """Write text to a file without blocking the event loop."""
    await asyncio.to_thread(self._sync.write_text, path, text, encoding)

exists(path) async

Check whether a file exists without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def exists(self, path: str | Path) -> bool:
    """Check whether a file exists without blocking the event loop."""
    return await asyncio.to_thread(self._sync.exists, path)

makedirs(path, *, exist_ok=False) async

Create directories recursively without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
    """Create directories recursively without blocking the event loop."""
    await asyncio.to_thread(self._sync.makedirs, path, exist_ok=exist_ok)

copy(src, dst) async

Copy a file without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def copy(self, src: str | Path, dst: str | Path) -> None:
    """Copy a file without blocking the event loop."""
    await asyncio.to_thread(self._sync.copy, src, dst)

glob(path, pattern) async

List files matching a glob pattern without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def glob(self, path: str | Path, pattern: str) -> list[str]:
    """List files matching a glob pattern without blocking the event loop."""
    return await asyncio.to_thread(self._sync.glob, path, pattern)

remove(path) async

Remove a file without blocking the event loop.

Source code in src/idfkit/simulation/fs.py
async def remove(self, path: str | Path) -> None:
    """Remove a file without blocking the event loop."""
    await asyncio.to_thread(self._sync.remove, path)

S3FileSystem

idfkit.simulation.fs.S3FileSystem

File system implementation backed by Amazon S3.

Requires the boto3 package (install via pip install idfkit[s3]).

This backend enables cloud-native simulation workflows where results are stored directly in S3 for later retrieval. EnergyPlus runs locally in a temporary directory, then results are uploaded to S3 after completion.

Parameters:

Name Type Description Default
bucket str

S3 bucket name.

required
prefix str

Optional key prefix prepended to all paths. Use this to namespace simulations (e.g., "project-x/batch-42/").

''
**boto_kwargs Any

Additional keyword arguments passed to boto3.client("s3", ...). Common options include:

  • region_name: AWS region (e.g., "us-east-1")
  • endpoint_url: Custom endpoint for S3-compatible services (MinIO, LocalStack, etc.)
  • aws_access_key_id, aws_secret_access_key: Explicit credentials (normally use IAM roles or environment variables)
{}

Examples:

# Basic usage
fs = S3FileSystem(bucket="my-bucket", prefix="simulations/")

# With MinIO (S3-compatible)
fs = S3FileSystem(
    bucket="local-bucket",
    endpoint_url="http://localhost:9000",
    aws_access_key_id="minioadmin",
    aws_secret_access_key="minioadmin",
)

# Use with simulate()
result = simulate(model, weather, output_dir="run-001", fs=fs)
Source code in src/idfkit/simulation/fs.py
class S3FileSystem:
    """File system implementation backed by Amazon S3.

    Requires the ``boto3`` package (install via ``pip install idfkit[s3]``).

    This backend enables cloud-native simulation workflows where results are
    stored directly in S3 for later retrieval. EnergyPlus runs locally in a
    temporary directory, then results are uploaded to S3 after completion.

    Args:
        bucket: S3 bucket name.
        prefix: Optional key prefix prepended to all paths. Use this to
            namespace simulations (e.g., ``"project-x/batch-42/"``).
        **boto_kwargs: Additional keyword arguments passed to
            ``boto3.client("s3", ...)``. Common options include:

            - ``region_name``: AWS region (e.g., ``"us-east-1"``)
            - ``endpoint_url``: Custom endpoint for S3-compatible services
              (MinIO, LocalStack, etc.)
            - ``aws_access_key_id``, ``aws_secret_access_key``: Explicit
              credentials (normally use IAM roles or environment variables)

    Examples:
        ```python
        # Basic usage
        fs = S3FileSystem(bucket="my-bucket", prefix="simulations/")

        # With MinIO (S3-compatible)
        fs = S3FileSystem(
            bucket="local-bucket",
            endpoint_url="http://localhost:9000",
            aws_access_key_id="minioadmin",
            aws_secret_access_key="minioadmin",
        )

        # Use with simulate()
        result = simulate(model, weather, output_dir="run-001", fs=fs)
        ```
    """

    def __init__(self, bucket: str, prefix: str = "", **boto_kwargs: Any) -> None:
        try:
            import boto3  # type: ignore[import-not-found]
        except ImportError:
            msg = "boto3 is required for S3FileSystem. Install it with: pip install idfkit[s3]"
            raise ImportError(msg) from None
        _boto3: Any = boto3
        self._bucket = bucket
        self._prefix = prefix.strip("/")
        self._client: S3Client = _boto3.client("s3", **boto_kwargs)

    def _key(self, path: str | Path) -> str:
        """Build an S3 key by prepending the configured prefix.

        Args:
            path: Logical file path.

        Returns:
            The full S3 object key.
        """
        raw = str(path).lstrip("/")
        if self._prefix:
            return f"{self._prefix}/{raw}"
        return raw

    def read_bytes(self, path: str | Path) -> bytes:
        """Read a file as raw bytes from S3."""
        resp = self._client.get_object(Bucket=self._bucket, Key=self._key(path))
        return resp["Body"].read()  # type: ignore[no-any-return]

    def write_bytes(self, path: str | Path, data: bytes) -> None:
        """Write raw bytes to S3."""
        self._client.put_object(Bucket=self._bucket, Key=self._key(path), Body=data)

    def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
        """Read a file as text from S3."""
        return self.read_bytes(path).decode(encoding)

    def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
        """Write text to S3."""
        self.write_bytes(path, text.encode(encoding))

    def exists(self, path: str | Path) -> bool:
        """Check whether an object exists in S3."""
        try:
            self._client.head_object(Bucket=self._bucket, Key=self._key(path))
        except Exception:
            return False
        return True

    def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
        """No-op — S3 has no directory concept."""

    def copy(self, src: str | Path, dst: str | Path) -> None:
        """Copy an object within the same bucket."""
        self._client.copy_object(
            Bucket=self._bucket,
            CopySource={"Bucket": self._bucket, "Key": self._key(src)},
            Key=self._key(dst),
        )

    def glob(self, path: str | Path, pattern: str) -> list[str]:
        """List objects matching a glob pattern under *path*.

        Returns logical paths (without the configured S3 prefix) so that
        they can be passed back to other ``S3FileSystem`` methods which
        prepend the prefix automatically via ``_key()``.
        """
        prefix = self._key(path).rstrip("/") + "/"
        paginator = self._client.get_paginator("list_objects_v2")
        matches: list[str] = []
        # Compute how much of the key is the S3 prefix, so we can strip it.
        logical_base = str(path).strip("/")
        for page in paginator.paginate(Bucket=self._bucket, Prefix=prefix):
            for obj in page.get("Contents", []):
                key = obj.get("Key", "")
                if not key:
                    continue
                # Match only the filename portion against the pattern
                name = key[len(prefix) :]
                if fnmatch.fnmatch(name, pattern):
                    # Return logical path (base/name) that _key() can prefix
                    matches.append(f"{logical_base}/{name}" if logical_base else name)
        return matches

    def remove(self, path: str | Path) -> None:
        """Delete an object from S3."""
        self._client.delete_object(Bucket=self._bucket, Key=self._key(path))

read_bytes(path)

Read a file as raw bytes from S3.

Source code in src/idfkit/simulation/fs.py
def read_bytes(self, path: str | Path) -> bytes:
    """Read a file as raw bytes from S3."""
    resp = self._client.get_object(Bucket=self._bucket, Key=self._key(path))
    return resp["Body"].read()  # type: ignore[no-any-return]

write_bytes(path, data)

Write raw bytes to S3.

Source code in src/idfkit/simulation/fs.py
def write_bytes(self, path: str | Path, data: bytes) -> None:
    """Write raw bytes to S3."""
    self._client.put_object(Bucket=self._bucket, Key=self._key(path), Body=data)

read_text(path, encoding='utf-8')

Read a file as text from S3.

Source code in src/idfkit/simulation/fs.py
def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
    """Read a file as text from S3."""
    return self.read_bytes(path).decode(encoding)

write_text(path, text, encoding='utf-8')

Write text to S3.

Source code in src/idfkit/simulation/fs.py
def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
    """Write text to S3."""
    self.write_bytes(path, text.encode(encoding))

exists(path)

Check whether an object exists in S3.

Source code in src/idfkit/simulation/fs.py
def exists(self, path: str | Path) -> bool:
    """Check whether an object exists in S3."""
    try:
        self._client.head_object(Bucket=self._bucket, Key=self._key(path))
    except Exception:
        return False
    return True

makedirs(path, *, exist_ok=False)

No-op — S3 has no directory concept.

Source code in src/idfkit/simulation/fs.py
def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
    """No-op — S3 has no directory concept."""

copy(src, dst)

Copy an object within the same bucket.

Source code in src/idfkit/simulation/fs.py
def copy(self, src: str | Path, dst: str | Path) -> None:
    """Copy an object within the same bucket."""
    self._client.copy_object(
        Bucket=self._bucket,
        CopySource={"Bucket": self._bucket, "Key": self._key(src)},
        Key=self._key(dst),
    )

glob(path, pattern)

List objects matching a glob pattern under path.

Returns logical paths (without the configured S3 prefix) so that they can be passed back to other S3FileSystem methods which prepend the prefix automatically via _key().

Source code in src/idfkit/simulation/fs.py
def glob(self, path: str | Path, pattern: str) -> list[str]:
    """List objects matching a glob pattern under *path*.

    Returns logical paths (without the configured S3 prefix) so that
    they can be passed back to other ``S3FileSystem`` methods which
    prepend the prefix automatically via ``_key()``.
    """
    prefix = self._key(path).rstrip("/") + "/"
    paginator = self._client.get_paginator("list_objects_v2")
    matches: list[str] = []
    # Compute how much of the key is the S3 prefix, so we can strip it.
    logical_base = str(path).strip("/")
    for page in paginator.paginate(Bucket=self._bucket, Prefix=prefix):
        for obj in page.get("Contents", []):
            key = obj.get("Key", "")
            if not key:
                continue
            # Match only the filename portion against the pattern
            name = key[len(prefix) :]
            if fnmatch.fnmatch(name, pattern):
                # Return logical path (base/name) that _key() can prefix
                matches.append(f"{logical_base}/{name}" if logical_base else name)
    return matches

remove(path)

Delete an object from S3.

Source code in src/idfkit/simulation/fs.py
def remove(self, path: str | Path) -> None:
    """Delete an object from S3."""
    self._client.delete_object(Bucket=self._bucket, Key=self._key(path))

AsyncS3FileSystem

idfkit.simulation.fs.AsyncS3FileSystem

Async file system implementation backed by Amazon S3 via aiobotocore.

Requires the aiobotocore package (install via pip install idfkit[async-s3]).

This is the non-blocking counterpart to S3FileSystem. Use it with async_simulate and the async batch functions to avoid blocking the event loop during S3 I/O.

The client must be initialised via the async context manager protocol:

```python
async with AsyncS3FileSystem(bucket="my-bucket") as fs:
    result = await async_simulate(model, weather, output_dir="run-001", fs=fs)
```

Parameters:

Name Type Description Default
bucket str

S3 bucket name.

required
prefix str

Optional key prefix prepended to all paths. Use this to namespace simulations (e.g., "project-x/batch-42/").

''
**boto_kwargs Any

Additional keyword arguments passed to session.create_client("s3", ...). Common options include:

  • region_name: AWS region (e.g., "us-east-1")
  • endpoint_url: Custom endpoint for S3-compatible services (MinIO, LocalStack, etc.)
  • aws_access_key_id, aws_secret_access_key: Explicit credentials (normally use IAM roles or environment variables)
{}

Examples:

from idfkit.simulation import AsyncS3FileSystem, async_simulate

async with AsyncS3FileSystem(bucket="my-bucket", prefix="sims/") as fs:
    result = await async_simulate(model, weather, output_dir="run-001", fs=fs)
    errors = await result.async_errors()
Source code in src/idfkit/simulation/fs.py
class AsyncS3FileSystem:
    """Async file system implementation backed by Amazon S3 via ``aiobotocore``.

    Requires the ``aiobotocore`` package
    (install via ``pip install idfkit[async-s3]``).

    This is the non-blocking counterpart to [S3FileSystem][idfkit.simulation.fs.S3FileSystem].  Use it
    with [async_simulate][idfkit.simulation.async_runner.async_simulate] and the
    async batch functions to avoid blocking the event loop during S3 I/O.

    The client must be initialised via the async context manager protocol:

        ```python
        async with AsyncS3FileSystem(bucket="my-bucket") as fs:
            result = await async_simulate(model, weather, output_dir="run-001", fs=fs)
        ```

    Args:
        bucket: S3 bucket name.
        prefix: Optional key prefix prepended to all paths. Use this to
            namespace simulations (e.g., ``"project-x/batch-42/"``).
        **boto_kwargs: Additional keyword arguments passed to
            ``session.create_client("s3", ...)``. Common options include:

            - ``region_name``: AWS region (e.g., ``"us-east-1"``)
            - ``endpoint_url``: Custom endpoint for S3-compatible services
              (MinIO, LocalStack, etc.)
            - ``aws_access_key_id``, ``aws_secret_access_key``: Explicit
              credentials (normally use IAM roles or environment variables)

    Examples:
        ```python
        from idfkit.simulation import AsyncS3FileSystem, async_simulate

        async with AsyncS3FileSystem(bucket="my-bucket", prefix="sims/") as fs:
            result = await async_simulate(model, weather, output_dir="run-001", fs=fs)
            errors = await result.async_errors()
        ```
    """

    def __init__(self, bucket: str, prefix: str = "", **boto_kwargs: Any) -> None:
        try:
            from aiobotocore.session import get_session  # type: ignore[import-not-found]
        except ImportError:
            msg = "aiobotocore is required for AsyncS3FileSystem. Install it with: pip install idfkit[async-s3]"
            raise ImportError(msg) from None
        self._bucket = bucket
        self._prefix = prefix.strip("/")
        self._session: Any = get_session()
        self._boto_kwargs = boto_kwargs
        self._client: AsyncS3Client | None = None
        self._client_ctx: Any = None

    async def __aenter__(self) -> AsyncS3FileSystem:
        """Create the aiobotocore S3 client."""
        self._client_ctx = self._session.create_client("s3", **self._boto_kwargs)
        self._client = await self._client_ctx.__aenter__()
        return self

    async def __aexit__(self, *args: Any) -> None:
        """Close the aiobotocore S3 client."""
        if self._client_ctx is not None:
            await self._client_ctx.__aexit__(*args)
            self._client = None
            self._client_ctx = None

    def _ensure_client(self) -> AsyncS3Client:
        if self._client is None:
            msg = (
                "AsyncS3FileSystem client is not initialised. "
                "Use 'async with AsyncS3FileSystem(...) as fs:' to create the client."
            )
            raise RuntimeError(msg)
        return self._client

    def _key(self, path: str | Path) -> str:
        """Build an S3 key by prepending the configured prefix.

        Args:
            path: Logical file path.

        Returns:
            The full S3 object key.
        """
        raw = str(path).lstrip("/")
        if self._prefix:
            return f"{self._prefix}/{raw}"
        return raw

    async def read_bytes(self, path: str | Path) -> bytes:
        """Read a file as raw bytes from S3."""
        client = self._ensure_client()
        resp = await client.get_object(Bucket=self._bucket, Key=self._key(path))
        async with resp["Body"] as stream:
            return await stream.read()  # type: ignore[no-any-return]

    async def write_bytes(self, path: str | Path, data: bytes) -> None:
        """Write raw bytes to S3."""
        client = self._ensure_client()
        await client.put_object(Bucket=self._bucket, Key=self._key(path), Body=data)

    async def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
        """Read a file as text from S3."""
        return (await self.read_bytes(path)).decode(encoding)

    async def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
        """Write text to S3."""
        await self.write_bytes(path, text.encode(encoding))

    async def exists(self, path: str | Path) -> bool:
        """Check whether an object exists in S3."""
        client = self._ensure_client()
        try:
            await client.head_object(Bucket=self._bucket, Key=self._key(path))
        except Exception:
            return False
        return True

    async def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
        """No-op — S3 has no directory concept."""

    async def copy(self, src: str | Path, dst: str | Path) -> None:
        """Copy an object within the same bucket."""
        client = self._ensure_client()
        await client.copy_object(
            Bucket=self._bucket,
            CopySource={"Bucket": self._bucket, "Key": self._key(src)},
            Key=self._key(dst),
        )

    async def glob(self, path: str | Path, pattern: str) -> list[str]:
        """List objects matching a glob pattern under *path*.

        Returns logical paths (without the configured S3 prefix) so that
        they can be passed back to other ``AsyncS3FileSystem`` methods which
        prepend the prefix automatically via ``_key()``.
        """
        client = self._ensure_client()
        prefix = self._key(path).rstrip("/") + "/"
        paginator = client.get_paginator("list_objects_v2")
        logical_base = str(path).strip("/")
        return await self._collect_glob_matches(paginator, prefix, pattern, logical_base)

    async def _collect_glob_matches(
        self,
        paginator: Any,
        prefix: str,
        pattern: str,
        logical_base: str,
    ) -> list[str]:
        """Iterate the paginator and collect matching keys.

        Separated to keep the ``glob`` method's return type fully known
        to the type checker (aiobotocore's paginator yields untyped pages).
        """
        matches: list[str] = []
        async for raw_page in paginator.paginate(Bucket=self._bucket, Prefix=prefix):
            page: dict[str, Any] = raw_page
            contents: list[dict[str, Any]] = page.get("Contents", [])
            for obj in contents:
                key: str = obj.get("Key", "")
                if not key:
                    continue
                name: str = key[len(prefix) :]
                if fnmatch.fnmatch(name, pattern):
                    matches.append(f"{logical_base}/{name}" if logical_base else name)
        return matches

    async def remove(self, path: str | Path) -> None:
        """Delete an object from S3."""
        client = self._ensure_client()
        await client.delete_object(Bucket=self._bucket, Key=self._key(path))

read_bytes(path) async

Read a file as raw bytes from S3.

Source code in src/idfkit/simulation/fs.py
async def read_bytes(self, path: str | Path) -> bytes:
    """Read a file as raw bytes from S3."""
    client = self._ensure_client()
    resp = await client.get_object(Bucket=self._bucket, Key=self._key(path))
    async with resp["Body"] as stream:
        return await stream.read()  # type: ignore[no-any-return]

write_bytes(path, data) async

Write raw bytes to S3.

Source code in src/idfkit/simulation/fs.py
async def write_bytes(self, path: str | Path, data: bytes) -> None:
    """Write raw bytes to S3."""
    client = self._ensure_client()
    await client.put_object(Bucket=self._bucket, Key=self._key(path), Body=data)

read_text(path, encoding='utf-8') async

Read a file as text from S3.

Source code in src/idfkit/simulation/fs.py
async def read_text(self, path: str | Path, encoding: str = "utf-8") -> str:
    """Read a file as text from S3."""
    return (await self.read_bytes(path)).decode(encoding)

write_text(path, text, encoding='utf-8') async

Write text to S3.

Source code in src/idfkit/simulation/fs.py
async def write_text(self, path: str | Path, text: str, encoding: str = "utf-8") -> None:
    """Write text to S3."""
    await self.write_bytes(path, text.encode(encoding))

exists(path) async

Check whether an object exists in S3.

Source code in src/idfkit/simulation/fs.py
async def exists(self, path: str | Path) -> bool:
    """Check whether an object exists in S3."""
    client = self._ensure_client()
    try:
        await client.head_object(Bucket=self._bucket, Key=self._key(path))
    except Exception:
        return False
    return True

makedirs(path, *, exist_ok=False) async

No-op — S3 has no directory concept.

Source code in src/idfkit/simulation/fs.py
async def makedirs(self, path: str | Path, *, exist_ok: bool = False) -> None:
    """No-op — S3 has no directory concept."""

copy(src, dst) async

Copy an object within the same bucket.

Source code in src/idfkit/simulation/fs.py
async def copy(self, src: str | Path, dst: str | Path) -> None:
    """Copy an object within the same bucket."""
    client = self._ensure_client()
    await client.copy_object(
        Bucket=self._bucket,
        CopySource={"Bucket": self._bucket, "Key": self._key(src)},
        Key=self._key(dst),
    )

glob(path, pattern) async

List objects matching a glob pattern under path.

Returns logical paths (without the configured S3 prefix) so that they can be passed back to other AsyncS3FileSystem methods which prepend the prefix automatically via _key().

Source code in src/idfkit/simulation/fs.py
async def glob(self, path: str | Path, pattern: str) -> list[str]:
    """List objects matching a glob pattern under *path*.

    Returns logical paths (without the configured S3 prefix) so that
    they can be passed back to other ``AsyncS3FileSystem`` methods which
    prepend the prefix automatically via ``_key()``.
    """
    client = self._ensure_client()
    prefix = self._key(path).rstrip("/") + "/"
    paginator = client.get_paginator("list_objects_v2")
    logical_base = str(path).strip("/")
    return await self._collect_glob_matches(paginator, prefix, pattern, logical_base)

remove(path) async

Delete an object from S3.

Source code in src/idfkit/simulation/fs.py
async def remove(self, path: str | Path) -> None:
    """Delete an object from S3."""
    client = self._ensure_client()
    await client.delete_object(Bucket=self._bucket, Key=self._key(path))