Skip to content

anypinn.core.config

Configuration dataclasses for PINN models.

AdamConfig dataclass

Configuration for the Adam optimizer.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class AdamConfig:
    """
    Configuration for the Adam optimizer.
    """

    lr: float = 1e-3
    betas: tuple[float, float] = (0.9, 0.999)
    weight_decay: float = 0.0

    def __post_init__(self) -> None:
        if self.lr <= 0:
            raise ValueError(f"lr must be positive, got {self.lr}.")
        if self.weight_decay < 0:
            raise ValueError(f"weight_decay must be non-negative, got {self.weight_decay}.")
        if not (0 < self.betas[0] < 1):
            raise ValueError(f"betas[0] must be in (0, 1), got {self.betas[0]}.")
        if not (0 < self.betas[1] < 1):
            raise ValueError(f"betas[1] must be in (0, 1), got {self.betas[1]}.")

betas: tuple[float, float] = (0.9, 0.999) class-attribute instance-attribute

lr: float = 0.001 class-attribute instance-attribute

weight_decay: float = 0.0 class-attribute instance-attribute

__init__(*, lr: float = 0.001, betas: tuple[float, float] = (0.9, 0.999), weight_decay: float = 0.0) -> None

__post_init__() -> None

Source code in src/anypinn/core/config.py
def __post_init__(self) -> None:
    if self.lr <= 0:
        raise ValueError(f"lr must be positive, got {self.lr}.")
    if self.weight_decay < 0:
        raise ValueError(f"weight_decay must be non-negative, got {self.weight_decay}.")
    if not (0 < self.betas[0] < 1):
        raise ValueError(f"betas[0] must be in (0, 1), got {self.betas[0]}.")
    if not (0 < self.betas[1] < 1):
        raise ValueError(f"betas[1] must be in (0, 1), got {self.betas[1]}.")

CosineAnnealingConfig dataclass

Configuration for Cosine Annealing LR Scheduler.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class CosineAnnealingConfig:
    """
    Configuration for Cosine Annealing LR Scheduler.
    """

    T_max: int
    eta_min: float = 0.0

    def __post_init__(self) -> None:
        if self.T_max <= 0:
            raise ValueError(f"T_max must be positive, got {self.T_max}.")

T_max: int instance-attribute

eta_min: float = 0.0 class-attribute instance-attribute

__init__(*, T_max: int, eta_min: float = 0.0) -> None

__post_init__() -> None

Source code in src/anypinn/core/config.py
def __post_init__(self) -> None:
    if self.T_max <= 0:
        raise ValueError(f"T_max must be positive, got {self.T_max}.")

EarlyStoppingConfig dataclass

Configuration for Early Stopping callback.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class EarlyStoppingConfig:
    """
    Configuration for Early Stopping callback.
    """

    patience: int
    mode: Literal["min", "max"]

    def __post_init__(self) -> None:
        if self.patience <= 0:
            raise ValueError(f"patience must be positive, got {self.patience}.")

mode: Literal['min', 'max'] instance-attribute

patience: int instance-attribute

__init__(*, patience: int, mode: Literal['min', 'max']) -> None

__post_init__() -> None

Source code in src/anypinn/core/config.py
def __post_init__(self) -> None:
    if self.patience <= 0:
        raise ValueError(f"patience must be positive, got {self.patience}.")

GenerationConfig dataclass

Bases: TrainingDataConfig

Configuration for data generation.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class GenerationConfig(TrainingDataConfig):
    """
    Configuration for data generation.
    """

    x: Tensor
    noise_level: float
    args_to_train: ArgsRegistry

args_to_train: ArgsRegistry instance-attribute

noise_level: float instance-attribute

x: Tensor instance-attribute

__init__(*, batch_size: int, data_ratio: int | float, collocations: int, collocation_sampler: CollocationStrategies = 'random', collocation_seed: int | None = None, x: Tensor, noise_level: float, args_to_train: ArgsRegistry) -> None

IngestionConfig dataclass

Bases: TrainingDataConfig

Configuration for data ingestion from files. If x_column is None, the data is assumed to be evenly spaced.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class IngestionConfig(TrainingDataConfig):
    """
    Configuration for data ingestion from files.
    If x_column is None, the data is assumed to be evenly spaced.
    """

    df_path: Path
    x_transform: Callable[[Any], Any] | None = None
    x_column: str | None = None
    y_columns: list[str]

df_path: Path instance-attribute

x_column: str | None = None class-attribute instance-attribute

x_transform: Callable[[Any], Any] | None = None class-attribute instance-attribute

y_columns: list[str] instance-attribute

__init__(*, batch_size: int, data_ratio: int | float, collocations: int, collocation_sampler: CollocationStrategies = 'random', collocation_seed: int | None = None, df_path: Path, x_transform: Callable[[Any], Any] | None = None, x_column: str | None = None, y_columns: list[str]) -> None

LBFGSConfig dataclass

Configuration for the L-BFGS optimizer.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class LBFGSConfig:
    """
    Configuration for the L-BFGS optimizer.
    """

    lr: float = 1.0
    max_iter: int = 20
    max_eval: int | None = None
    history_size: int = 100
    line_search_fn: str | None = "strong_wolfe"

    def __post_init__(self) -> None:
        if self.lr <= 0:
            raise ValueError(f"lr must be positive, got {self.lr}.")
        if self.max_iter <= 0:
            raise ValueError(f"max_iter must be positive, got {self.max_iter}.")
        if self.history_size <= 0:
            raise ValueError(f"history_size must be positive, got {self.history_size}.")

history_size: int = 100 class-attribute instance-attribute

line_search_fn: str | None = 'strong_wolfe' class-attribute instance-attribute

lr: float = 1.0 class-attribute instance-attribute

max_eval: int | None = None class-attribute instance-attribute

max_iter: int = 20 class-attribute instance-attribute

__init__(*, lr: float = 1.0, max_iter: int = 20, max_eval: int | None = None, history_size: int = 100, line_search_fn: str | None = 'strong_wolfe') -> None

__post_init__() -> None

Source code in src/anypinn/core/config.py
def __post_init__(self) -> None:
    if self.lr <= 0:
        raise ValueError(f"lr must be positive, got {self.lr}.")
    if self.max_iter <= 0:
        raise ValueError(f"max_iter must be positive, got {self.max_iter}.")
    if self.history_size <= 0:
        raise ValueError(f"history_size must be positive, got {self.history_size}.")

MLPConfig dataclass

Configuration for a Multi-Layer Perceptron (MLP).

Attributes:

Name Type Description
in_dim int

Dimension of input layer.

out_dim int

Dimension of output layer.

hidden_layers list[int]

List of dimensions for hidden layers.

activation Activations

Activation function to use between layers.

output_activation Activations | None

Optional activation function for the output layer.

encode Callable[[Tensor], Tensor] | None

Optional function to encode inputs before passing to MLP.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class MLPConfig:
    """
    Configuration for a Multi-Layer Perceptron (MLP).

    Attributes:
        in_dim: Dimension of input layer.
        out_dim: Dimension of output layer.
        hidden_layers: List of dimensions for hidden layers.
        activation: Activation function to use between layers.
        output_activation: Optional activation function for the output layer.
        encode: Optional function to encode inputs before passing to MLP.
    """

    in_dim: int
    out_dim: int
    hidden_layers: list[int]
    activation: Activations
    output_activation: Activations | None = None
    encode: Callable[[Tensor], Tensor] | None = None

activation: Activations instance-attribute

encode: Callable[[Tensor], Tensor] | None = None class-attribute instance-attribute

hidden_layers: list[int] instance-attribute

in_dim: int instance-attribute

out_dim: int instance-attribute

output_activation: Activations | None = None class-attribute instance-attribute

__init__(*, in_dim: int, out_dim: int, hidden_layers: list[int], activation: Activations, output_activation: Activations | None = None, encode: Callable[[Tensor], Tensor] | None = None) -> None

PINNHyperparameters dataclass

Aggregated hyperparameters for the PINN model.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class PINNHyperparameters:
    """
    Aggregated hyperparameters for the PINN model.
    """

    lr: float
    training_data: IngestionConfig | GenerationConfig
    fields_config: MLPConfig
    params_config: MLPConfig | ScalarConfig
    max_epochs: int | None = None
    gradient_clip_val: float | None = None
    criterion: Criteria = "mse"
    optimizer: AdamConfig | LBFGSConfig | None = None
    scheduler: ReduceLROnPlateauConfig | CosineAnnealingConfig | None = None
    early_stopping: EarlyStoppingConfig | None = None
    smma_stopping: SMMAStoppingConfig | None = None

    def __post_init__(self) -> None:
        if self.lr <= 0:
            raise ValueError(f"lr must be positive, got {self.lr}.")

criterion: Criteria = 'mse' class-attribute instance-attribute

early_stopping: EarlyStoppingConfig | None = None class-attribute instance-attribute

fields_config: MLPConfig instance-attribute

gradient_clip_val: float | None = None class-attribute instance-attribute

lr: float instance-attribute

max_epochs: int | None = None class-attribute instance-attribute

optimizer: AdamConfig | LBFGSConfig | None = None class-attribute instance-attribute

params_config: MLPConfig | ScalarConfig instance-attribute

scheduler: ReduceLROnPlateauConfig | CosineAnnealingConfig | None = None class-attribute instance-attribute

smma_stopping: SMMAStoppingConfig | None = None class-attribute instance-attribute

training_data: IngestionConfig | GenerationConfig instance-attribute

__init__(*, lr: float, training_data: IngestionConfig | GenerationConfig, fields_config: MLPConfig, params_config: MLPConfig | ScalarConfig, max_epochs: int | None = None, gradient_clip_val: float | None = None, criterion: Criteria = 'mse', optimizer: AdamConfig | LBFGSConfig | None = None, scheduler: ReduceLROnPlateauConfig | CosineAnnealingConfig | None = None, early_stopping: EarlyStoppingConfig | None = None, smma_stopping: SMMAStoppingConfig | None = None) -> None

__post_init__() -> None

Source code in src/anypinn/core/config.py
def __post_init__(self) -> None:
    if self.lr <= 0:
        raise ValueError(f"lr must be positive, got {self.lr}.")

ReduceLROnPlateauConfig dataclass

Configuration for Learning Rate Scheduler (ReduceLROnPlateau).

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class ReduceLROnPlateauConfig:
    """
    Configuration for Learning Rate Scheduler (ReduceLROnPlateau).
    """

    mode: Literal["min", "max"]
    factor: float
    patience: int
    threshold: float
    min_lr: float

    def __post_init__(self) -> None:
        if not (0 < self.factor < 1):
            raise ValueError(f"factor must be in (0, 1), got {self.factor}.")
        if self.patience <= 0:
            raise ValueError(f"patience must be positive, got {self.patience}.")

factor: float instance-attribute

min_lr: float instance-attribute

mode: Literal['min', 'max'] instance-attribute

patience: int instance-attribute

threshold: float instance-attribute

__init__(*, mode: Literal['min', 'max'], factor: float, patience: int, threshold: float, min_lr: float) -> None

__post_init__() -> None

Source code in src/anypinn/core/config.py
def __post_init__(self) -> None:
    if not (0 < self.factor < 1):
        raise ValueError(f"factor must be in (0, 1), got {self.factor}.")
    if self.patience <= 0:
        raise ValueError(f"patience must be positive, got {self.patience}.")

SMMAStoppingConfig dataclass

Configuration for Simple Moving Average Stopping callback.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class SMMAStoppingConfig:
    """
    Configuration for Simple Moving Average Stopping callback.
    """

    window: int
    threshold: float
    lookback: int

    def __post_init__(self) -> None:
        if self.window <= 0:
            raise ValueError(f"window must be positive, got {self.window}.")
        if self.lookback <= 0:
            raise ValueError(f"lookback must be positive, got {self.lookback}.")
        if self.threshold <= 0:
            raise ValueError(f"threshold must be positive, got {self.threshold}.")

lookback: int instance-attribute

threshold: float instance-attribute

window: int instance-attribute

__init__(*, window: int, threshold: float, lookback: int) -> None

__post_init__() -> None

Source code in src/anypinn/core/config.py
def __post_init__(self) -> None:
    if self.window <= 0:
        raise ValueError(f"window must be positive, got {self.window}.")
    if self.lookback <= 0:
        raise ValueError(f"lookback must be positive, got {self.lookback}.")
    if self.threshold <= 0:
        raise ValueError(f"threshold must be positive, got {self.threshold}.")

ScalarConfig dataclass

Configuration for a scalar parameter.

Attributes:

Name Type Description
init_value float

Initial value for the parameter.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class ScalarConfig:
    """
    Configuration for a scalar parameter.

    Attributes:
        init_value: Initial value for the parameter.
    """

    init_value: float

init_value: float instance-attribute

__init__(*, init_value: float) -> None

TrainingDataConfig dataclass

Configuration for data loading and batching.

Attributes:

Name Type Description
batch_size int

Number of points per training batch.

data_ratio int | float

Ratio of data to collocation points per batch.

collocations int

Total number of collocation points to generate.

collocation_sampler CollocationStrategies

Sampling strategy for collocation points.

collocation_seed int | None

Optional seed for reproducible collocation sampling.

Source code in src/anypinn/core/config.py
@dataclass(kw_only=True)
class TrainingDataConfig:
    """
    Configuration for data loading and batching.

    Attributes:
        batch_size: Number of points per training batch.
        data_ratio: Ratio of data to collocation points per batch.
        collocations: Total number of collocation points to generate.
        collocation_sampler: Sampling strategy for collocation points.
        collocation_seed: Optional seed for reproducible collocation sampling.
    """

    batch_size: int
    data_ratio: int | float
    collocations: int
    collocation_sampler: CollocationStrategies = "random"
    collocation_seed: int | None = None

    def __post_init__(self) -> None:
        if self.batch_size <= 0:
            raise ValueError(f"batch_size must be positive, got {self.batch_size}.")
        if self.collocations < 0:
            raise ValueError(f"collocations must be non-negative, got {self.collocations}.")
        if isinstance(self.data_ratio, float):
            if not (0.0 <= self.data_ratio <= 1.0):
                raise ValueError(f"Float data_ratio must be in [0.0, 1.0], got {self.data_ratio}.")
        else:
            if not (0 <= self.data_ratio <= self.batch_size):
                raise ValueError(
                    f"Integer data_ratio must be in [0, {self.batch_size}], got {self.data_ratio}."
                )

batch_size: int instance-attribute

collocation_sampler: CollocationStrategies = 'random' class-attribute instance-attribute

collocation_seed: int | None = None class-attribute instance-attribute

collocations: int instance-attribute

data_ratio: int | float instance-attribute

__init__(*, batch_size: int, data_ratio: int | float, collocations: int, collocation_sampler: CollocationStrategies = 'random', collocation_seed: int | None = None) -> None

__post_init__() -> None

Source code in src/anypinn/core/config.py
def __post_init__(self) -> None:
    if self.batch_size <= 0:
        raise ValueError(f"batch_size must be positive, got {self.batch_size}.")
    if self.collocations < 0:
        raise ValueError(f"collocations must be non-negative, got {self.collocations}.")
    if isinstance(self.data_ratio, float):
        if not (0.0 <= self.data_ratio <= 1.0):
            raise ValueError(f"Float data_ratio must be in [0.0, 1.0], got {self.data_ratio}.")
    else:
        if not (0 <= self.data_ratio <= self.batch_size):
            raise ValueError(
                f"Integer data_ratio must be in [0, {self.batch_size}], got {self.data_ratio}."
            )