diff --git a/docs/source/_rst/_code.rst b/docs/source/_rst/_code.rst index e4a5f8a61..02e8e1242 100644 --- a/docs/source/_rst/_code.rst +++ b/docs/source/_rst/_code.rst @@ -208,7 +208,8 @@ Problems .. toctree:: :titlesonly: - AbstractProblem + ProblemInterface + BaseProblem InverseProblem ParametricProblem SpatialProblem @@ -220,13 +221,13 @@ Problems Zoo .. toctree:: :titlesonly: - AcousticWaveProblem - AdvectionProblem - AllenCahnProblem - DiffusionReactionProblem - HelmholtzProblem - InversePoisson2DSquareProblem - Poisson2DSquareProblem + AcousticWaveProblem + AdvectionProblem + AllenCahnProblem + DiffusionReactionProblem + HelmholtzProblem + InversePoisson2DSquareProblem + Poisson2DSquareProblem SupervisedProblem diff --git a/docs/source/_rst/problem/abstract_problem.rst b/docs/source/_rst/problem/abstract_problem.rst deleted file mode 100644 index ae5e5f26e..000000000 --- a/docs/source/_rst/problem/abstract_problem.rst +++ /dev/null @@ -1,9 +0,0 @@ -AbstractProblem -=============== -.. currentmodule:: pina.problem.abstract_problem - -.. automodule:: pina._src.problem.abstract_problem - -.. autoclass:: pina._src.problem.abstract_problem.AbstractProblem - :members: - :show-inheritance: diff --git a/docs/source/_rst/problem/base_problem.rst b/docs/source/_rst/problem/base_problem.rst new file mode 100644 index 000000000..2261a90f7 --- /dev/null +++ b/docs/source/_rst/problem/base_problem.rst @@ -0,0 +1,9 @@ +Base Problem +=============== +.. currentmodule:: pina.problem.base_problem + +.. automodule:: pina._src.problem.base_problem + +.. autoclass:: pina._src.problem.base_problem.BaseProblem + :members: + :show-inheritance: diff --git a/docs/source/_rst/problem/problem_interface.rst b/docs/source/_rst/problem/problem_interface.rst new file mode 100644 index 000000000..08136e23c --- /dev/null +++ b/docs/source/_rst/problem/problem_interface.rst @@ -0,0 +1,9 @@ +ProblemInterface +=================== +.. currentmodule:: pina.problem.problem_interface + +.. automodule:: pina._src.problem.problem_interface + +.. autoclass:: pina._src.problem.problem_interface.ProblemInterface + :members: + :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/acoustic_wave.rst b/docs/source/_rst/problem/zoo/acoustic_wave.rst deleted file mode 100644 index 34fd46895..000000000 --- a/docs/source/_rst/problem/zoo/acoustic_wave.rst +++ /dev/null @@ -1,9 +0,0 @@ -AcousticWaveProblem -===================== -.. currentmodule:: pina.problem.zoo.acoustic_wave - -.. automodule:: pina._src.problem.zoo.acoustic_wave - -.. autoclass:: pina._src.problem.zoo.acoustic_wave.AcousticWaveProblem - :members: - :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/acoustic_wave_problem.rst b/docs/source/_rst/problem/zoo/acoustic_wave_problem.rst new file mode 100644 index 000000000..c6acb93f1 --- /dev/null +++ b/docs/source/_rst/problem/zoo/acoustic_wave_problem.rst @@ -0,0 +1,9 @@ +AcousticWaveProblem +===================== +.. currentmodule:: pina.problem.zoo.acoustic_wave_problem + +.. automodule:: pina._src.problem.zoo.acoustic_wave_problem + +.. autoclass:: pina._src.problem.zoo.acoustic_wave_problem.AcousticWaveProblem + :members: + :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/advection.rst b/docs/source/_rst/problem/zoo/advection.rst deleted file mode 100644 index 07d0cd45d..000000000 --- a/docs/source/_rst/problem/zoo/advection.rst +++ /dev/null @@ -1,9 +0,0 @@ -AdvectionProblem -================== -.. currentmodule:: pina.problem.zoo.advection - -.. automodule:: pina._src.problem.zoo.advection - -.. autoclass:: pina._src.problem.zoo.advection.AdvectionProblem - :members: - :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/advection_problem.rst b/docs/source/_rst/problem/zoo/advection_problem.rst new file mode 100644 index 000000000..df37679cb --- /dev/null +++ b/docs/source/_rst/problem/zoo/advection_problem.rst @@ -0,0 +1,9 @@ +AdvectionProblem +================== +.. currentmodule:: pina.problem.zoo.advection_problem + +.. automodule:: pina._src.problem.zoo.advection_problem + +.. autoclass:: pina._src.problem.zoo.advection_problem.AdvectionProblem + :members: + :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/allen_cahn.rst b/docs/source/_rst/problem/zoo/allen_cahn.rst deleted file mode 100644 index 7be2104bf..000000000 --- a/docs/source/_rst/problem/zoo/allen_cahn.rst +++ /dev/null @@ -1,9 +0,0 @@ -AllenCahnProblem -================== -.. currentmodule:: pina.problem.zoo.allen_cahn - -.. automodule:: pina._src.problem.zoo.allen_cahn - -.. autoclass:: pina._src.problem.zoo.allen_cahn.AllenCahnProblem - :members: - :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/allen_cahn_problem.rst b/docs/source/_rst/problem/zoo/allen_cahn_problem.rst new file mode 100644 index 000000000..463be3a55 --- /dev/null +++ b/docs/source/_rst/problem/zoo/allen_cahn_problem.rst @@ -0,0 +1,9 @@ +AllenCahnProblem +================== +.. currentmodule:: pina.problem.zoo.allen_cahn_problem + +.. automodule:: pina._src.problem.zoo.allen_cahn_problem + +.. autoclass:: pina._src.problem.zoo.allen_cahn_problem.AllenCahnProblem + :members: + :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/diffusion_reaction.rst b/docs/source/_rst/problem/zoo/diffusion_reaction.rst deleted file mode 100644 index d5269edd7..000000000 --- a/docs/source/_rst/problem/zoo/diffusion_reaction.rst +++ /dev/null @@ -1,9 +0,0 @@ -DiffusionReactionProblem -========================= -.. currentmodule:: pina.problem.zoo.diffusion_reaction - -.. automodule:: pina._src.problem.zoo.diffusion_reaction - -.. autoclass:: pina._src.problem.zoo.diffusion_reaction.DiffusionReactionProblem - :members: - :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/diffusion_reaction_problem.rst b/docs/source/_rst/problem/zoo/diffusion_reaction_problem.rst new file mode 100644 index 000000000..307a56c52 --- /dev/null +++ b/docs/source/_rst/problem/zoo/diffusion_reaction_problem.rst @@ -0,0 +1,9 @@ +DiffusionReactionProblem +========================= +.. currentmodule:: pina.problem.zoo.diffusion_reaction_problem + +.. automodule:: pina._src.problem.zoo.diffusion_reaction_problem + +.. autoclass:: pina._src.problem.zoo.diffusion_reaction_problem.DiffusionReactionProblem + :members: + :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/helmholtz.rst b/docs/source/_rst/problem/zoo/helmholtz.rst deleted file mode 100644 index 06724f83b..000000000 --- a/docs/source/_rst/problem/zoo/helmholtz.rst +++ /dev/null @@ -1,9 +0,0 @@ -HelmholtzProblem -================== -.. currentmodule:: pina.problem.zoo.helmholtz - -.. automodule:: pina._src.problem.zoo.helmholtz - -.. autoclass:: pina._src.problem.zoo.helmholtz.HelmholtzProblem - :members: - :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/helmholtz_problem.rst b/docs/source/_rst/problem/zoo/helmholtz_problem.rst new file mode 100644 index 000000000..952578a2b --- /dev/null +++ b/docs/source/_rst/problem/zoo/helmholtz_problem.rst @@ -0,0 +1,9 @@ +HelmholtzProblem +================== +.. currentmodule:: pina.problem.zoo.helmholtz_problem + +.. automodule:: pina._src.problem.zoo.helmholtz_problem + +.. autoclass:: pina._src.problem.zoo.helmholtz_problem.HelmholtzProblem + :members: + :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/inverse_poisson_2d_square.rst b/docs/source/_rst/problem/zoo/inverse_poisson_2d_square.rst deleted file mode 100644 index d4885ff0c..000000000 --- a/docs/source/_rst/problem/zoo/inverse_poisson_2d_square.rst +++ /dev/null @@ -1,9 +0,0 @@ -InversePoisson2DSquareProblem -============================== -.. currentmodule:: pina.problem.zoo.inverse_poisson_2d_square - -.. automodule:: pina._src.problem.zoo.inverse_poisson_2d_square - -.. autoclass:: pina._src.problem.zoo.inverse_poisson_2d_square.InversePoisson2DSquareProblem - :members: - :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/inverse_poisson_problem.rst b/docs/source/_rst/problem/zoo/inverse_poisson_problem.rst new file mode 100644 index 000000000..503eb21bf --- /dev/null +++ b/docs/source/_rst/problem/zoo/inverse_poisson_problem.rst @@ -0,0 +1,9 @@ +InversePoisson2DSquareProblem +============================== +.. currentmodule:: pina.problem.zoo.inverse_poisson_problem + +.. automodule:: pina._src.problem.zoo.inverse_poisson_problem + +.. autoclass:: pina._src.problem.zoo.inverse_poisson_problem.InversePoisson2DSquareProblem + :members: + :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/poisson_2d_square.rst b/docs/source/_rst/problem/zoo/poisson_2d_square.rst deleted file mode 100644 index 96b5e4397..000000000 --- a/docs/source/_rst/problem/zoo/poisson_2d_square.rst +++ /dev/null @@ -1,9 +0,0 @@ -Poisson2DSquareProblem -======================== -.. currentmodule:: pina.problem.zoo.poisson_2d_square - -.. automodule:: pina._src.problem.zoo.poisson_2d_square - -.. autoclass:: pina._src.problem.zoo.poisson_2d_square.Poisson2DSquareProblem - :members: - :show-inheritance: diff --git a/docs/source/_rst/problem/zoo/poisson_problem.rst b/docs/source/_rst/problem/zoo/poisson_problem.rst new file mode 100644 index 000000000..a480a8953 --- /dev/null +++ b/docs/source/_rst/problem/zoo/poisson_problem.rst @@ -0,0 +1,9 @@ +Poisson2DSquareProblem +======================== +.. currentmodule:: pina.problem.zoo.poisson_problem + +.. automodule:: pina._src.problem.zoo.poisson_problem + +.. autoclass:: pina._src.problem.zoo.poisson_problem.Poisson2DSquareProblem + :members: + :show-inheritance: diff --git a/pina/_src/condition/condition.py b/pina/_src/condition/condition.py index 8b2c814ba..71cb80e2f 100644 --- a/pina/_src/condition/condition.py +++ b/pina/_src/condition/condition.py @@ -12,7 +12,7 @@ class Condition: """ The :class:`Condition` class is a core component of the PINA framework that provides a unified interface to define heterogeneous constraints that must - be satisfied by a :class:`~pina.problem.abstract_problem.AbstractProblem`. + be satisfied by a :class:`~pina.problem.base_problem.BaseProblem`. It encapsulates all types of constraints - physical, boundary, initial, or data-driven - that the solver must satisfy during training. The specific diff --git a/pina/_src/condition/condition_base.py b/pina/_src/condition/condition_base.py index 0d1a8cb15..4a7c8c1c8 100644 --- a/pina/_src/condition/condition_base.py +++ b/pina/_src/condition/condition_base.py @@ -42,7 +42,7 @@ def problem(self): Return the problem associated with this condition. :return: Problem associated with this condition. - :rtype: ~pina.problem.abstract_problem.AbstractProblem + :rtype: ~pina.problem.base_problem.BaseProblem """ return self._problem @@ -51,8 +51,8 @@ def problem(self, value): """ Set the problem associated with this condition. - :param pina.problem.abstract_problem.AbstractProblem value: The problem - to associate with this condition + :param pina.problem.base_problem.BaseProblem value: The problem to + associate with this condition. """ self._problem = value diff --git a/pina/_src/condition/condition_interface.py b/pina/_src/condition/condition_interface.py index 229b9a025..68898b082 100644 --- a/pina/_src/condition/condition_interface.py +++ b/pina/_src/condition/condition_interface.py @@ -25,7 +25,7 @@ def problem(self): Return the problem associated with this condition. :return: Problem associated with this condition. - :rtype: ~pina.problem.abstract_problem.AbstractProblem + :rtype: ~pina.problem.base_problem.BaseProblem """ @problem.setter @@ -34,7 +34,7 @@ def problem(self, value): """ Set the problem associated with this condition. - :param pina.problem.abstract_problem.AbstractProblem value: The problem + :param pina.problem.base_problem.BaseProblem value: The problem to associate with this condition """ diff --git a/pina/_src/core/trainer.py b/pina/_src/core/trainer.py index d18350d14..f4a3a4f5a 100644 --- a/pina/_src/core/trainer.py +++ b/pina/_src/core/trainer.py @@ -48,7 +48,7 @@ def __init__( :param SolverInterface solver: A :class:`~pina.solver.solver.SolverInterface` solver used to solve a - :class:`~pina.problem.abstract_problem.AbstractProblem`. + :class:`~pina.problem.base_problem.BaseProblem`. :param int batch_size: The number of samples per batch to load. If ``None``, all samples are loaded and data is not batched. Default is ``None``. @@ -184,7 +184,7 @@ def __init__( def _move_to_device(self): """ Moves the ``unknown_parameters`` of an instance of - :class:`~pina.problem.abstract_problem.AbstractProblem` to the + :class:`~pina.problem.base_problem.BaseProblem` to the :class:`Trainer` device. """ device = self._accelerator_connector._parallel_devices[0] diff --git a/pina/_src/core/utils.py b/pina/_src/core/utils.py index ea70ed944..d0226ea83 100644 --- a/pina/_src/core/utils.py +++ b/pina/_src/core/utils.py @@ -93,9 +93,9 @@ def labelize_forward(forward, input_variables, output_variables): :param Callable forward: The forward function of a :class:`torch.nn.Module`. :param list[str] input_variables: The names of the input variables of a - :class:`~pina.problem.abstract_problem.AbstractProblem`. + :class:`~pina.problem.base_problem.BaseProblem`. :param list[str] output_variables: The names of the output variables of a - :class:`~pina.problem.abstract_problem.AbstractProblem`. + :class:`~pina.problem.base_problem.BaseProblem`. :return: The decorated forward function. :rtype: Callable """ diff --git a/pina/_src/data/data_module.py b/pina/_src/data/data_module.py index d0fb5989a..4c7ab70c4 100644 --- a/pina/_src/data/data_module.py +++ b/pina/_src/data/data_module.py @@ -84,7 +84,7 @@ def __init__( """ Initialize the object and creating datasets based on the input problem. - :param AbstractProblem problem: The problem containing the data on which + :param BaseProblem problem: The problem containing the data on which to create the datasets and dataloaders. :param float train_size: Fraction of elements in the training split. It must be in the range [0, 1]. diff --git a/pina/_src/problem/abstract_problem.py b/pina/_src/problem/abstract_problem.py deleted file mode 100644 index 28bccf089..000000000 --- a/pina/_src/problem/abstract_problem.py +++ /dev/null @@ -1,312 +0,0 @@ -"""Module for the AbstractProblem class.""" - -from abc import ABCMeta, abstractmethod -import warnings -from copy import deepcopy -from pina._src.core.utils import check_consistency -from pina._src.domain.domain_interface import DomainInterface -from pina._src.domain.cartesian_domain import CartesianDomain -from pina._src.condition.domain_equation_condition import ( - DomainEquationCondition, -) -from pina._src.core.label_tensor import LabelTensor -from pina._src.core.utils import merge_tensors, custom_warning_format -from pina._src.condition.condition import Condition - - -class AbstractProblem(metaclass=ABCMeta): - """ - Abstract base class for PINA problems. All specific problem types should - inherit from this class. - - A PINA problem is defined by key components, which typically include output - variables, conditions, and domains over which the conditions are applied. - """ - - def __init__(self): - """ - Initialization of the :class:`AbstractProblem` class. - """ - self._discretised_domains = {} - - # create hook conditions <-> problems - for condition_name in self.conditions: - self.conditions[condition_name].problem = self - - # Store in domains dict all the domains object directly passed to - # ConditionInterface. Done for back compatibility with PINA <0.2 - if not hasattr(self, "domains"): - self.domains = {} - for cond_name, cond in self.conditions.items(): - if isinstance(cond, DomainEquationCondition): - if isinstance(cond.domain, DomainInterface): - self.domains[cond_name] = cond.domain - cond.domain = cond_name - - # # back compatibility 0.1 - # @property - # def input_pts(self): - # """ - # Return a dictionary mapping condition names to their corresponding - # input points. If some domains are not sampled, they will not be returned - # and the corresponding condition will be empty. - - # :return: The input points of the problem. - # :rtype: dict - # """ - # to_return = {} - # for cond_name, data in self.collected_data.items(): - # to_return[cond_name] = data["input"] - # return to_return - - @property - def discretised_domains(self): - """ - Return a dictionary mapping domains to their corresponding sampled - points. - - :return: The discretised domains. - :rtype: dict - """ - return self._discretised_domains - - def __deepcopy__(self, memo): - """ - Perform a deep copy of the :class:`AbstractProblem` instance. - - :param dict memo: A dictionary used to track objects already copied - during the deep copy process to prevent redundant copies. - :return: A deep copy of the :class:`AbstractProblem` instance. - :rtype: AbstractProblem - """ - cls = self.__class__ - result = cls.__new__(cls) - memo[id(self)] = result - for k, v in self.__dict__.items(): - setattr(result, k, deepcopy(v, memo)) - return result - - @property - def are_all_domains_discretised(self): - """ - Check if all the domains are discretised. - - :return: ``True`` if all domains are discretised, ``False`` otherwise. - :rtype: bool - """ - return all( - domain in self.discretised_domains for domain in self.domains - ) - - @property - def input_variables(self): - """ - Get the input variables of the problem. - - :return: The input variables of the problem. - :rtype: list[str] - """ - variables = [] - - if hasattr(self, "spatial_variables"): - variables += self.spatial_variables - if hasattr(self, "temporal_variable"): - variables += self.temporal_variable - if hasattr(self, "parameters"): - variables += self.parameters - - return variables - - @input_variables.setter - def input_variables(self, variables): - """ - Set the input variables of the AbstractProblem. - - :param list[str] variables: The input variables of the problem. - :raises RuntimeError: Not implemented. - """ - raise RuntimeError - - @property - @abstractmethod - def output_variables(self): - """ - Get the output variables of the problem. - """ - - @property - @abstractmethod - def conditions(self): - """ - Get the conditions of the problem. - - :return: The conditions of the problem. - :rtype: dict - """ - return self.conditions - - def discretise_domain( - self, n=None, mode="random", domains="all", sample_rules=None - ): - """ - Discretize the problem's domains by sampling a specified number of - points according to the selected sampling mode. - - :param int n: The number of points to sample. - :param mode: The sampling method. Default is ``random``. - Available modes include: random sampling, ``random``; - latin hypercube sampling, ``latin`` or ``lh``; - chebyshev sampling, ``chebyshev``; grid sampling ``grid``. - :param domains: The domains from which to sample. Default is ``all``. - :type domains: str | list[str] - :param dict sample_rules: A dictionary defining custom sampling rules - for input variables. If provided, it must contain a dictionary - specifying the sampling rule for each variable, overriding the - ``n`` and ``mode`` arguments. Each key must correspond to the - input variables from - :meth:~pina.problem.AbstractProblem.input_variables, and its value - should be another dictionary with - two keys: ``n`` (number of points to sample) and ``mode`` - (sampling method). Defaults to None. - :raises RuntimeError: If both ``n`` and ``sample_rules`` are specified. - :raises RuntimeError: If neither ``n`` nor ``sample_rules`` are set. - - :Example: - >>> problem.discretise_domain(n=10, mode='grid') - >>> problem.discretise_domain(n=10, mode='grid', domains=['gamma1']) - >>> problem.discretise_domain( - ... sample_rules={ - ... 'x': {'n': 10, 'mode': 'grid'}, - ... 'y': {'n': 100, 'mode': 'grid'} - ... }, - ... domains=['D'] - ... ) - - .. warning:: - ``random`` is currently the only implemented ``mode`` for all - geometries, i.e. :class:`~pina.domain.ellipsoid.EllipsoidDomain`, - :class:`~pina.domain.cartesian.CartesianDomain`, - :class:`~pina.domain.simplex.SimplexDomain`, and geometry - compositions :class:`~pina.domain.union_domain.Union`, - :class:`~pina.domain.difference_domain.Difference`, - :class:`~pina.domain.exclusion_domain.Exclusion`, and - :class:`~pina.domain.intersection_domain.Intersection`. - The modes ``latin`` or ``lh``, ``chebyshev``, ``grid`` are only - implemented for :class:`~pina.domain.cartesian.CartesianDomain`. - - .. warning:: - If custom discretisation is applied by setting ``sample_rules`` not - to ``None``, then the discretised domain must be of class - :class:`~pina.domain.cartesian.CartesianDomain` - """ - - # check consistecy n, mode, variables, locations - if sample_rules is not None: - check_consistency(sample_rules, dict) - if mode is not None: - check_consistency(mode, str) - check_consistency(domains, (list, str)) - - # check correct location - if domains == "all": - domains = self.domains.keys() - elif not isinstance(domains, (list)): - domains = [domains] - if n is not None and sample_rules is None: - self._apply_default_discretization(n, mode, domains) - if n is None and sample_rules is not None: - self._apply_custom_discretization(sample_rules, domains) - elif n is not None and sample_rules is not None: - raise RuntimeError( - "You can't specify both n and sample_rules at the same time." - ) - elif n is None and sample_rules is None: - raise RuntimeError("You have to specify either n or sample_rules.") - - def _apply_default_discretization(self, n, mode, domains): - """ - Apply default discretization to the problem's domains. - - :param int n: The number of points to sample. - :param mode: The sampling method. - :param domains: The domains from which to sample. - :type domains: str | list[str] - """ - for domain in domains: - self.discretised_domains[domain] = ( - self.domains[domain].sample(n, mode).sort_labels() - ) - - def _apply_custom_discretization(self, sample_rules, domains): - """ - Apply custom discretization to the problem's domains. - - :param dict sample_rules: A dictionary of custom sampling rules. - :param domains: The domains from which to sample. - :type domains: str | list[str] - :raises RuntimeError: If the keys of the sample_rules dictionary are not - the same as the input variables. - :raises RuntimeError: If custom discretisation is applied on a domain - that is not a CartesianDomain. - """ - if sorted(list(sample_rules.keys())) != sorted(self.input_variables): - raise RuntimeError( - "The keys of the sample_rules dictionary must be the same as " - "the input variables." - ) - for domain in domains: - if not isinstance(self.domains[domain], CartesianDomain): - raise RuntimeError( - "Custom discretisation can be applied only on Cartesian " - "domains" - ) - discretised_tensor = [] - for var, rules in sample_rules.items(): - n, mode = rules["n"], rules["mode"] - points = self.domains[domain].sample(n, mode, var) - discretised_tensor.append(points) - - self.discretised_domains[domain] = merge_tensors( - discretised_tensor - ).sort_labels() - - def add_points(self, new_points_dict): - """ - Add new points to an already sampled domain. - - :param dict new_points_dict: The dictionary mapping new points to their - corresponding domain. - """ - for k, v in new_points_dict.items(): - self.discretised_domains[k] = LabelTensor.vstack( - [self.discretised_domains[k], v] - ) - - def move_discretisation_into_conditions(self): - """ - Move the discretised domains into their corresponding conditions. - """ - if not self.are_all_domains_discretised: - warnings.formatwarning = custom_warning_format - warnings.filterwarnings("always", category=RuntimeWarning) - warning_message = "\n".join([f"""{" " * 13} ---> Domain {key} { - "sampled" if key in self.discretised_domains - else - "not sampled"}""" for key in self.domains]) - warnings.warn( - "Some of the domains are still not sampled. Consider calling " - "problem.discretise_domain function for all domains before " - "accessing the collected data:\n" - f"{warning_message}", - RuntimeWarning, - ) - - for name, cond in self.conditions.items(): - if hasattr(cond, "domain"): - domain = cond.domain - self.conditions[name] = Condition( - input=self.discretised_domains[cond.domain], - equation=cond.equation, - ) - self.conditions[name].domain = domain - self.conditions[name].problem = self diff --git a/pina/_src/problem/base_problem.py b/pina/_src/problem/base_problem.py new file mode 100644 index 000000000..dc02b20ae --- /dev/null +++ b/pina/_src/problem/base_problem.py @@ -0,0 +1,308 @@ +"""Module for the BaseProblem class.""" + +import warnings +from copy import deepcopy +from pina._src.problem.problem_interface import ProblemInterface +from pina._src.domain.domain_interface import DomainInterface +from pina._src.core.label_tensor import LabelTensor +from pina._src.condition.condition import Condition +from pina._src.condition.domain_equation_condition import ( + DomainEquationCondition, +) +from pina._src.core.utils import ( + check_consistency, + check_positive_integer, + merge_tensors, +) + + +class BaseProblem(ProblemInterface): + """ + Base class for all problems, implementing common functionality. + + A problem is defined by core components, including input and output + variables, a set of conditions to be satisfied, and optionally the domains + on which these conditions are defined. + + All problems must inherit from this class and implement abstract methods + defined in :class:`~pina.problem.problem_interface.ProblemInterface`. + + This class is not meant to be instantiated directly. + """ + + def __init__(self): + """ + Initialization of the :class:`BaseProblem` class. + """ + self._discretised_domains = {} + + # Create a correspondence between the problem and the conditions + for condition_name in self.conditions: + self.conditions[condition_name].problem = self + + # Create a dictionary to store the domains of the problem + if not hasattr(self, "domains"): + self.domains = {} + + # Store all the domains object passed to the problem's conditions + for name, cond in self.conditions.items(): + if isinstance(cond, DomainEquationCondition): + if isinstance(cond.domain, DomainInterface): + self.domains[name] = cond.domain + cond.domain = name + + def __deepcopy__(self, memo): + """ + Create a deep copy of the problem instance. + + :param dict memo: The memorization dictionary used by the deepcopy + function. + :return: A deep copy of the problem instance. + :rtype: ProblemInterface + """ + # Create a new instance of the same class and store it in a dictionary + result = self.__class__.__new__(self.__class__) + memo[id(self)] = result + + # Set the attributes of the new instance to deep copies of the original + for k, v in self.__dict__.items(): + setattr(result, k, deepcopy(v, memo)) + + return result + + def discretise_domain( + self, n=None, mode="random", domains=None, sample_rules=None + ): + """ + Discretise the problem's domains by sampling a specified number of + points according to the selected sampling mode. + + :param int n: The number of points to sample. This is ignored if + ``sample_rules`` is provided. Default is ``None``. + :param str mode: The sampling method. Available modes include: + ``"random"`` for random sampling, ``"latin"`` or ``"lh"`` for latin + hypercube sampling, ``"chebyshev"`` for Chebyshev sampling, and + ``"grid"`` for grid sampling. Default is ``"random"``. + :param domains: The domains from which to sample. If ``None``, all + domains are considered for sampling. Default is ``None``. + :type domains: str | list[str] + :param dict sample_rules: The dictionary specifying custom sampling + rules for each input variable. When provided, it overrides the + global ``n`` and ``mode`` arguments. Each key in the dictionary must + match one of the variables defined in :meth:`input_variables`, and + each value must be a dictionary containing two keys: ``n`` for the + number of points to sample for that variable, and ``mode`` for the + sampling method to use. If ``None``, the global ``n`` and ``mode`` + parameters are used for all variables. Default is ``None``. + :raises ValueError: If ``sample_rules`` is provided but it is not a + dictionary. + :raises ValueError: If ``sample_rules`` is provided but its keys do not + match the input variables of the problem. + :raises ValueError: If ``sample_rules`` is provided but any of its rules + is not a dictionary containing both ``n`` and ``mode`` keys, with + ``n`` being a positive integer and ``mode`` being a string. + :raises AssertionError: If ``n`` is not a positive integer. + :raises ValueError: If ``mode`` is not a string + :raises ValueError: If ``domains`` is provided by it is neither a string + nor a list of strings. + + .. warning:: + ``"random"`` is the only supported ``mode`` across all geometries: + :class:`~pina.domain.cartesian_domain.CartesianDomain`, + :class:`~pina.domain.ellipsoid_domain.EllipsoidDomain`, and + :class:`~pina.domain.simplex_domain.SimplexDomain`. + Sampling modes such as ``"latin"``, ``"chebyshev"``, and ``"grid"`` + are only implemented for + :class:`~pina.domain.cartesian_domain.CartesianDomain`. + When custom discretisation is specified via ``sample_rules``, the + domain to be discretised must be an instance of + :class:`~pina.domain.cartesian_domain.CartesianDomain`. + + :Example: + >>> problem.discretise_domain(n=10, mode="random") + >>> problem.discretise_domain(n=10, mode="lh", domains=["boundary"]) + >>> problem.discretise_domain( + ... sample_rules={ + ... 'x': {'n': 10, 'mode': 'grid'}, + ... 'y': {'n': 100, 'mode': 'grid'} + ... }, + ... ) + """ + # Initialize the domains to be discretised + if domains is None: + domains = list(self.domains) + if not isinstance(domains, (list)): + domains = [domains] + + # Check sampling rules + if sample_rules is not None: + check_consistency(sample_rules, dict) + + # Check that the keys of sample_rules match the input variables + if sorted(list(sample_rules.keys())) != sorted( + self.input_variables + ): + raise ValueError( + "The keys of the sample_rules dictionary must match the " + "input variables." + ) + + # Check that the rules for each variable are valid + for var, rules in sample_rules.items(): + check_consistency(rules, dict) + if "n" not in rules or "mode" not in rules: + raise ValueError( + f"Sampling rules for variable {var} must contain 'n' " + "and 'mode' keys." + ) + check_positive_integer(rules["n"], strict=True) + check_consistency(rules["mode"], str) + + # Check n only if sample_rules is not provided + else: + check_positive_integer(n, strict=True) + + # Check consistency + check_consistency(mode, str) + check_consistency(domains, str) + + # If sample_rules is provided, apply custom discretisation + if sample_rules is not None: + for d in domains: + + # Discretise each variable according to its custom rules + discretised_tensor = [ + self.domains[d].sample(rules["n"], rules["mode"], var) + for var, rules in sample_rules.items() + ] + + # Merge the discretised tensors into a single one for the domain + self.discretised_domains[d] = merge_tensors(discretised_tensor) + + # Otherwise, apply the same n and mode to all specified domains + else: + for d in domains: + self.discretised_domains[d] = self.domains[d].sample(n, mode) + + def add_points(self, new_points_dict): + """ + Append additional points to an already discretised domain. + + :param dict new_points_dict: The dictionary mapping each domain to the + corresponding set of new points to be added. Each key in the + dictionary must match one of the domains defined in :attr:`domains`, + and each value must be a :class:`~pina.tensor.LabelTensor` + containing the new points to be added to that domain. The labels of + the points to be added must correspond to those of the domain to + which they are being added. + :raises ValueError: If ``new_points_dict`` is not a dictionary. + :raises ValueError: If any of the values in ``new_points_dict`` is not + a :class:`~pina.tensor.LabelTensor`. + :raises ValueError: If any of the keys in ``new_points_dict`` does not + match any of the domains defined in :attr:`domains`. + :raises ValueError: If any of the domains in ``new_points_dict`` has not + been discretised yet. + + :Example: + >>> additional_points = { + ... "boundary": LabelTensor(torch.rand(5, 2), labels=["x", "y"]) + ... } + >>> problem.add_points(additional_points) + """ + # Check consistency + check_consistency(new_points_dict, dict) + + # Check the keys and values of the dictionary + for key, value in new_points_dict.items(): + check_consistency(value, LabelTensor) + if key not in self.domains: + raise ValueError( + f"Key {key} does not match any domain of the problem." + ) + if key not in self.discretised_domains: + raise ValueError(f"Domain {key} has not been discretised yet.") + + # Append the new points to the corresponding discretised domains + for key, value in new_points_dict.items(): + self.discretised_domains[key] = LabelTensor.vstack( + [self.discretised_domains[key], value] + ) + + def move_discretisation_into_conditions(self): + """ + Move the sampled points from the discretised domains into their + corresponding conditions. This ensures that the conditions are evaluated + on the correct set of points after discretisation. + """ + # Move the discretised domains into their corresponding conditions + for name, cond in self.conditions.items(): + if hasattr(cond, "domain"): + + # Create a new condition with the discretised domain as input + new_condition = Condition( + input=self.discretised_domains[cond.domain], + equation=cond.equation, + ) + + # Set the domain and problem attributes of the new condition + new_condition.domain = cond.domain + new_condition.problem = self + + # Replace the old condition in the conditions dictionary + self.conditions[name] = new_condition + + @property + def input_variables(self): + """ + The input variables of the problem. + + :return: The input variables of the problem. + :rtype: list[str] + """ + # Define a helper function to convert a string to a list if needed + _as_list = lambda x: [x] if isinstance(x, str) else x + + # Collect the spatial, temporal, and parametric variables + variables = [] + if hasattr(self, "spatial_variables"): + variables += _as_list(self.spatial_variables) + if hasattr(self, "temporal_variables"): + variables += _as_list(self.temporal_variables) + if hasattr(self, "parameters"): + variables += _as_list(self.parameters) + + return variables + + @property + def discretised_domains(self): + """ + The dictionary containing the discretised domains of the problem. Each + key corresponds to a domain defined in :attr:`domains`, and each value + is a :class:`~pina.tensor.LabelTensor` containing the sampled points for + that domain. + + :return: The discretised domains. + :rtype: dict + """ + return self._discretised_domains + + @property + def are_all_domains_discretised(self): + """ + Whether all domains of the problem have been discretised. + + :return: ``True`` if all domains are discretised, ``False`` otherwise. + :rtype: bool + """ + return all(d in self.discretised_domains for d in self.domains) + + +# Back-compatibility with version 0.2, to be removed soon +class AbstractProblem(BaseProblem): + def __init__(self, *args, **kwargs): + warnings.warn( + "AbstractProblem is deprecated, use BaseProblem instead", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) diff --git a/pina/_src/problem/inverse_problem.py b/pina/_src/problem/inverse_problem.py index fa2f3d57f..7ee28bb96 100644 --- a/pina/_src/problem/inverse_problem.py +++ b/pina/_src/problem/inverse_problem.py @@ -2,13 +2,18 @@ from abc import abstractmethod import torch -from pina._src.problem.abstract_problem import AbstractProblem +from pina._src.problem.base_problem import BaseProblem -class InverseProblem(AbstractProblem): +class InverseProblem(BaseProblem): """ - Class for defining inverse problems, where the objective is to determine - unknown parameters through training, based on given data. + Base class for all inverse problems, extending the standard problem + definition with unknown parameters to be determined through training. + + An inverse problem is defined by a set of unknown parameters that need to be + estimated from observed data. + + This class is not meant to be instantiated directly. """ def __init__(self): @@ -16,15 +21,15 @@ def __init__(self): Initialization of the :class:`InverseProblem` class. """ super().__init__() - # storing unknown_parameters for optimization + + # Set the unknown parameters as trainable parameters self.unknown_parameters = {} for var in self.unknown_variables: - range_var = self.unknown_parameter_domain._range[var] - tensor_var = ( - torch.rand(1, requires_grad=True) * range_var[1] + range_var[0] - ) + low, high = self.unknown_parameter_domain._range[var] + tensor_var = low + (high - low) * torch.rand(1) self.unknown_parameters[var] = torch.nn.Parameter(tensor_var) + @property @abstractmethod def unknown_parameter_domain(self): """ @@ -34,7 +39,7 @@ def unknown_parameter_domain(self): @property def unknown_variables(self): """ - Get the unknown variables of the problem. + The unknown variables of the problem. :return: The unknown variables of the problem. :rtype: list[str] @@ -44,7 +49,7 @@ def unknown_variables(self): @property def unknown_parameters(self): """ - Get the unknown parameters of the problem. + The unknown parameters of the problem. :return: The unknown parameters of the problem. :rtype: torch.nn.Parameter diff --git a/pina/_src/problem/parametric_problem.py b/pina/_src/problem/parametric_problem.py index e361074b3..12a9cd089 100644 --- a/pina/_src/problem/parametric_problem.py +++ b/pina/_src/problem/parametric_problem.py @@ -1,17 +1,23 @@ """Module for the ParametricProblem class.""" from abc import abstractmethod +from pina._src.problem.base_problem import BaseProblem -from .abstract_problem import AbstractProblem - -class ParametricProblem(AbstractProblem): +class ParametricProblem(BaseProblem): """ - Class for defining parametric problems, where certain input variables are - treated as parameters that can vary, allowing the model to adapt to - different scenarios based on the chosen parameters. + Base class for all parametric problems, extending the standard problem + definition with parameter-dependent inputs. + + A parametric problem includes additional input variables, defined over a + dedicated parameter domain, which represent external quantities + (e.g., physical coefficients or control variables) that can vary across + different evaluations and influence the solution. + + This class is not meant to be instantiated directly. """ + @property @abstractmethod def parameter_domain(self): """ @@ -21,7 +27,7 @@ def parameter_domain(self): @property def parameters(self): """ - Get the parameters of the problem. + The parameters of the problem. :return: The parameters of the problem. :rtype: list[str] diff --git a/pina/_src/problem/problem_interface.py b/pina/_src/problem/problem_interface.py new file mode 100644 index 000000000..d64130d61 --- /dev/null +++ b/pina/_src/problem/problem_interface.py @@ -0,0 +1,150 @@ +"""Module for the Problem Interface.""" + +from abc import ABCMeta, abstractmethod + + +class ProblemInterface(metaclass=ABCMeta): + """ + Abstract interface for all problems. + """ + + @abstractmethod + def __deepcopy__(self, memo): + """ + Create a deep copy of the problem instance. + + :param dict memo: The memorization dictionary used by the deepcopy + function. + :return: A deep copy of the problem instance. + :rtype: ProblemInterface + """ + + @abstractmethod + def discretise_domain( + self, n=None, mode="random", domains=None, sample_rules=None + ): + """ + Discretise the problem's domains by sampling a specified number of + points according to the selected sampling mode. + + :param int n: The number of points to sample. This is ignored if + ``sample_rules`` is provided. Default is ``None``. + :param str mode: The sampling method. Available modes include: + ``"random"`` for random sampling, ``"latin"`` or ``"lh"`` for latin + hypercube sampling, ``"chebyshev"`` for Chebyshev sampling, and + ``"grid"`` for grid sampling. Default is ``"random"``. + :param domains: The domains from which to sample. If ``None``, all + domains are considered for sampling. Default is ``None``. + :type domains: str | list[str] + :param dict sample_rules: The dictionary specifying custom sampling + rules for each input variable. When provided, it overrides the + global ``n`` and ``mode`` arguments. Each key in the dictionary must + match one of the variables defined in :meth:`input_variables`, and + each value must be a dictionary containing two keys: ``n`` for the + number of points to sample for that variable, and ``mode`` for the + sampling method to use. If ``None``, the global ``n`` and ``mode`` + parameters are used for all variables. Default is ``None``. + + .. warning:: + ``"random"`` is the only supported ``mode`` across all geometries: + :class:`~pina.domain.cartesian_domain.CartesianDomain`, + :class:`~pina.domain.ellipsoid_domain.EllipsoidDomain`, and + :class:`~pina.domain.simplex_domain.SimplexDomain`. + Sampling modes such as ``"latin"``, ``"chebyshev"``, and ``"grid"`` + are only implemented for + :class:`~pina.domain.cartesian_domain.CartesianDomain`. + When custom discretisation is specified via ``sample_rules``, the + domain to be discretised must be an instance of + :class:`~pina.domain.cartesian_domain.CartesianDomain`. + + :Example: + >>> problem.discretise_domain(n=10, mode="random") + >>> problem.discretise_domain(n=10, mode="lh", domains=["boundary"]) + >>> problem.discretise_domain( + ... sample_rules={ + ... 'x': {'n': 10, 'mode': 'grid'}, + ... 'y': {'n': 100, 'mode': 'grid'} + ... }, + ... ) + """ + + @abstractmethod + def add_points(self, new_points_dict): + """ + Append additional points to an already discretised domain. + + :param dict new_points_dict: The dictionary mapping each domain to the + corresponding set of new points to be added. Each key in the + dictionary must match one of the domains defined in :attr:`domains`, + and each value must be a :class:`~pina.tensor.LabelTensor` + containing the new points to be added to that domain. The labels of + the points to be added must correspond to those of the domain to + which they are being added. + + :Example: + >>> additional_points = { + ... "boundary": LabelTensor(torch.rand(5, 2), labels=["x", "y"]) + ... } + >>> problem.add_points(additional_points) + """ + + @abstractmethod + def move_discretisation_into_conditions(self): + """ + Move the sampled points from the discretised domains into their + corresponding conditions. This ensures that the conditions are evaluated + on the correct set of points after discretisation. + """ + + @property + @abstractmethod + def input_variables(self): + """ + The input variables of the problem. + + :return: The input variables of the problem. + :rtype: list[str] + """ + + @property + @abstractmethod + def output_variables(self): + """ + The output variables of the problem. + + :return: The output variables of the problem. + :rtype: list[str] + """ + + @property + @abstractmethod + def conditions(self): + """ + The conditions associated with the problem. + + :return: The conditions associated with the problem. + :rtype: dict + """ + + @property + @abstractmethod + def discretised_domains(self): + """ + The dictionary containing the discretised domains of the problem. Each + key corresponds to a domain defined in :attr:`domains`, and each value + is a :class:`~pina.tensor.LabelTensor` containing the sampled points for + that domain. + + :return: The discretised domains. + :rtype: dict + """ + + @property + @abstractmethod + def are_all_domains_discretised(self): + """ + Whether all domains of the problem have been discretised. + + :return: ``True`` if all domains are discretised, ``False`` otherwise. + :rtype: bool + """ diff --git a/pina/_src/problem/spatial_problem.py b/pina/_src/problem/spatial_problem.py index 608e31691..16ea9365b 100644 --- a/pina/_src/problem/spatial_problem.py +++ b/pina/_src/problem/spatial_problem.py @@ -1,26 +1,32 @@ """Module for the SpatialProblem class.""" from abc import abstractmethod +from pina._src.problem.base_problem import BaseProblem -from .abstract_problem import AbstractProblem - -class SpatialProblem(AbstractProblem): +class SpatialProblem(BaseProblem): """ - Class for defining spatial problems, where the problem domain is defined in - terms of spatial variables. + Base class for all spatial problems, extending the standard problem + definition with spatial-dependent inputs. + + A spatial problem is defined over a spatial domain, where input variables + represent the coordinates of the system (e.g., positions in one or more + dimensions) on which the solution is evaluated. + + This class is not meant to be instantiated directly. """ + @property @abstractmethod def spatial_domain(self): """ - The spatial domain of the problem. + The domain of spatial variables of the problem. """ @property def spatial_variables(self): """ - Get the spatial input variables of the problem. + The spatial input variables of the problem. :return: The spatial input variables of the problem. :rtype: list[str] diff --git a/pina/_src/problem/time_dependent_problem.py b/pina/_src/problem/time_dependent_problem.py index ea2ad7d54..b81ab4778 100644 --- a/pina/_src/problem/time_dependent_problem.py +++ b/pina/_src/problem/time_dependent_problem.py @@ -1,28 +1,33 @@ """Module for the TimeDependentProblem class.""" from abc import abstractmethod +from pina._src.problem.base_problem import BaseProblem -from .abstract_problem import AbstractProblem - -class TimeDependentProblem(AbstractProblem): +class TimeDependentProblem(BaseProblem): """ - Class for defining time-dependent problems, where the system's behavior - changes with respect to time. + Base class for all time-dependent problems, extending the standard problem + definition with time-dependent inputs. + + A time-dependent problem is defined over a temporal domain, where input + variables represent the time at which the solution is evaluated. + + This class is not meant to be instantiated directly. """ + @property @abstractmethod def temporal_domain(self): """ - The temporal domain of the problem. + The domain of temporal variables of the problem. """ @property - def temporal_variable(self): + def temporal_variables(self): """ - Get the time variable of the problem. + The temporal variables of the problem. - :return: The time variable of the problem. + :return: The temporal variables of the problem. :rtype: list[str] """ return self.temporal_domain.variables diff --git a/pina/_src/problem/zoo/acoustic_wave.py b/pina/_src/problem/zoo/acoustic_wave_problem.py similarity index 95% rename from pina/_src/problem/zoo/acoustic_wave.py rename to pina/_src/problem/zoo/acoustic_wave_problem.py index 44db8eb96..e4e241e8a 100644 --- a/pina/_src/problem/zoo/acoustic_wave.py +++ b/pina/_src/problem/zoo/acoustic_wave_problem.py @@ -1,13 +1,13 @@ """Formulation of the acoustic wave problem.""" import torch -from pina._src.condition.condition import Condition -from pina._src.problem.spatial_problem import SpatialProblem from pina._src.problem.time_dependent_problem import TimeDependentProblem -from pina._src.core.utils import check_consistency from pina._src.domain.cartesian_domain import CartesianDomain -from pina._src.equation.equation import Equation from pina._src.equation.system_equation import SystemEquation +from pina._src.problem.spatial_problem import SpatialProblem +from pina._src.condition.condition import Condition +from pina._src.core.utils import check_consistency +from pina._src.equation.equation import Equation from pina._src.equation.equation_factory import ( FixedValue, FixedGradient, @@ -70,7 +70,7 @@ def __init__(self, c=2.0): """ Initialization of the :class:`AcousticWaveProblem` class. - :param c: The wave propagation speed. Default is 2.0. + :param c: The wave propagation speed. Default is ``2.0``. :type c: float | int """ super().__init__() @@ -93,4 +93,7 @@ def solution(self, pts): arg_t = self.c * torch.pi * pts["t"] term1 = torch.sin(arg_x) * torch.cos(arg_t) term2 = 0.5 * torch.sin(4 * arg_x) * torch.cos(4 * arg_t) - return term1 + term2 + + sol = term1 + term2 + sol.labels = self.output_variables + return sol diff --git a/pina/_src/problem/zoo/advection.py b/pina/_src/problem/zoo/advection_problem.py similarity index 93% rename from pina/_src/problem/zoo/advection.py rename to pina/_src/problem/zoo/advection_problem.py index 3067ce8bf..c1cfa85f6 100644 --- a/pina/_src/problem/zoo/advection.py +++ b/pina/_src/problem/zoo/advection_problem.py @@ -1,13 +1,13 @@ """Formulation of the advection problem.""" import torch -from pina._src.condition.condition import Condition -from pina._src.problem.spatial_problem import SpatialProblem from pina._src.problem.time_dependent_problem import TimeDependentProblem -from pina._src.equation.equation import Equation +from pina._src.domain.cartesian_domain import CartesianDomain +from pina._src.problem.spatial_problem import SpatialProblem from pina._src.equation.equation_factory import Advection +from pina._src.condition.condition import Condition from pina._src.core.utils import check_consistency -from pina._src.domain.cartesian_domain import CartesianDomain +from pina._src.equation.equation import Equation def initial_condition(input_, output_): @@ -25,7 +25,8 @@ def initial_condition(input_, output_): class AdvectionProblem(SpatialProblem, TimeDependentProblem): r""" Implementation of the advection problem in the spatial interval - :math:`[0, 2 \pi]` and temporal interval :math:`[0, 1]`. + :math:`[0, 2 \pi]` and temporal interval :math:`[0, 1]` with periodic + boundary conditions. .. seealso:: @@ -56,7 +57,7 @@ def __init__(self, c=1.0): """ Initialization of the :class:`AdvectionProblem`. - :param c: The advection velocity parameter. Default is 1.0. + :param c: The advection velocity parameter. Default is ``1.0``. :type c: float | int """ super().__init__() diff --git a/pina/_src/problem/zoo/allen_cahn.py b/pina/_src/problem/zoo/allen_cahn_problem.py similarity index 91% rename from pina/_src/problem/zoo/allen_cahn.py rename to pina/_src/problem/zoo/allen_cahn_problem.py index 125a10304..b46713d9d 100644 --- a/pina/_src/problem/zoo/allen_cahn.py +++ b/pina/_src/problem/zoo/allen_cahn_problem.py @@ -28,7 +28,8 @@ def initial_condition(input_, output_): class AllenCahnProblem(TimeDependentProblem, SpatialProblem): r""" Implementation of the Allen Cahn problem in the spatial interval - :math:`[-1, 1]` and temporal interval :math:`[0, 1]`. + :math:`[-1, 1]` and temporal interval :math:`[0, 1]` with periodic + boundary conditions. .. seealso:: @@ -62,9 +63,9 @@ def __init__(self, alpha=1e-4, beta=5): """ Initialization of the :class:`AllenCahnProblem`. - :param alpha: The diffusion coefficient. Default is 1e-4. + :param alpha: The diffusion coefficient. Default is ``1e-4``. :type alpha: float | int - :param beta: The reaction coefficient. Default is 5.0. + :param beta: The reaction coefficient. Default is ``5.0``. :type beta: float | int """ super().__init__() diff --git a/pina/_src/problem/zoo/diffusion_reaction.py b/pina/_src/problem/zoo/diffusion_reaction_problem.py similarity index 90% rename from pina/_src/problem/zoo/diffusion_reaction.py rename to pina/_src/problem/zoo/diffusion_reaction_problem.py index 443ff49c5..5f05efedc 100644 --- a/pina/_src/problem/zoo/diffusion_reaction.py +++ b/pina/_src/problem/zoo/diffusion_reaction_problem.py @@ -65,7 +65,7 @@ def __init__(self, alpha=1e-4): """ Initialization of the :class:`DiffusionReactionProblem`. - :param alpha: The diffusion coefficient. Default is 1e-4. + :param alpha: The diffusion coefficient. Default is ``1e-4``. :type alpha: float | int """ super().__init__() @@ -82,10 +82,11 @@ def forcing_term(input_): t = input_.extract("t") return torch.exp(-t) * ( - 1.5 * torch.sin(2 * x) - + (8 / 3) * torch.sin(3 * x) - + (15 / 4) * torch.sin(4 * x) - + (63 / 8) * torch.sin(8 * x) + (self.alpha - 1) * torch.sin(x) + + ((4 * self.alpha - 1) / 2) * torch.sin(2 * x) + + ((9 * self.alpha - 1) / 3) * torch.sin(3 * x) + + ((16 * self.alpha - 1) / 4) * torch.sin(4 * x) + + ((64 * self.alpha - 1) / 8) * torch.sin(8 * x) ) self.conditions["D"] = Condition( diff --git a/pina/_src/problem/zoo/helmholtz.py b/pina/_src/problem/zoo/helmholtz_problem.py similarity index 96% rename from pina/_src/problem/zoo/helmholtz.py rename to pina/_src/problem/zoo/helmholtz_problem.py index 992dda638..9e07d0c59 100644 --- a/pina/_src/problem/zoo/helmholtz.py +++ b/pina/_src/problem/zoo/helmholtz_problem.py @@ -41,10 +41,10 @@ def __init__(self, k=1.0, alpha_x=1, alpha_y=4): """ Initialization of the :class:`HelmholtzProblem` class. - :param k: The squared wavenumber. Default is 1.0. + :param k: The squared wavenumber. Default is ``1.0``. :type k: float | int - :param int alpha_x: The frequency in the x-direction. Default is 1. - :param int alpha_y: The frequency in the y-direction. Default is 4. + :param int alpha_x: The frequency in the x-direction. Default is ``1``. + :param int alpha_y: The frequency in the y-direction. Default is ``4``. """ super().__init__() check_consistency(k, (int, float)) diff --git a/pina/_src/problem/zoo/inverse_poisson_2d_square.py b/pina/_src/problem/zoo/inverse_poisson_problem.py similarity index 97% rename from pina/_src/problem/zoo/inverse_poisson_2d_square.py rename to pina/_src/problem/zoo/inverse_poisson_problem.py index 19628cae0..f0865d4cb 100644 --- a/pina/_src/problem/zoo/inverse_poisson_2d_square.py +++ b/pina/_src/problem/zoo/inverse_poisson_problem.py @@ -5,16 +5,15 @@ import torch from io import BytesIO - -from pina._src.condition.condition import Condition -from pina._src.equation.equation import Equation -from pina._src.equation.equation_factory import FixedValue -from pina._src.problem.spatial_problem import SpatialProblem -from pina._src.problem.inverse_problem import InverseProblem +from pina._src.core.utils import custom_warning_format, check_consistency from pina._src.domain.cartesian_domain import CartesianDomain +from pina._src.problem.inverse_problem import InverseProblem +from pina._src.problem.spatial_problem import SpatialProblem +from pina._src.equation.equation_factory import FixedValue +from pina._src.condition.condition import Condition from pina._src.core.label_tensor import LabelTensor +from pina._src.equation.equation import Equation from pina._src.core.operator import laplacian -from pina._src.core.utils import custom_warning_format, check_consistency warnings.formatwarning = custom_warning_format warnings.filterwarnings("always", category=ResourceWarning) @@ -32,7 +31,7 @@ def _load_tensor_from_url(url, labels, timeout=10): :param str url: URL to the remote `.pth` tensor file. :param labels: Labels for the resulting LabelTensor. :type labels: list[str] | tuple[str] - :param int timeout: Timeout for the request in seconds. Default is 10s. + :param int timeout: Timeout for the request in seconds. Default is ``10`` s. :return: A LabelTensor object if successful, otherwise None. :rtype: LabelTensor | None """ @@ -109,10 +108,10 @@ def __init__(self, load=True, data_size=1.0): :param bool load: If True, it attempts to load data from remote URLs. Set to False to skip data loading (e.g., if no internet connection). - Default is True. + Default is ``True``. :param float data_size: The fraction of the total data to use for the "data" condition. If set to 1.0, all available data is used. - If set to 0.0, no data is used. Default is 1.0. + If set to 0.0, no data is used. Default is ``1.0``. :raises ValueError: If `data_size` is not in the range [0.0, 1.0]. :raises ValueError: If `data_size` is not a float. """ diff --git a/pina/_src/problem/zoo/poisson_2d_square.py b/pina/_src/problem/zoo/poisson_problem.py similarity index 100% rename from pina/_src/problem/zoo/poisson_2d_square.py rename to pina/_src/problem/zoo/poisson_problem.py index 12b365666..6abe69967 100644 --- a/pina/_src/problem/zoo/poisson_2d_square.py +++ b/pina/_src/problem/zoo/poisson_problem.py @@ -2,10 +2,10 @@ import torch -from pina._src.condition.condition import Condition from pina._src.equation.equation_factory import FixedValue, Poisson -from pina._src.problem.spatial_problem import SpatialProblem from pina._src.domain.cartesian_domain import CartesianDomain +from pina._src.problem.spatial_problem import SpatialProblem +from pina._src.condition.condition import Condition def forcing_term(input_): diff --git a/pina/_src/problem/zoo/supervised_problem.py b/pina/_src/problem/zoo/supervised_problem.py index 81fb18a44..fea7f80a3 100644 --- a/pina/_src/problem/zoo/supervised_problem.py +++ b/pina/_src/problem/zoo/supervised_problem.py @@ -1,15 +1,14 @@ """Formulation of a Supervised Problem in PINA.""" -from pina._src.problem.abstract_problem import AbstractProblem +from pina._src.problem.base_problem import BaseProblem from pina._src.condition.condition import Condition -class SupervisedProblem(AbstractProblem): +class SupervisedProblem(BaseProblem): """ Definition of a supervised-learning problem. - This class provides a simple way to define a supervised problem - using a single condition of type + This class provides a simple way to define a supervised problem using the :class:`~pina.condition.input_target_condition.InputTargetCondition`. :Example: @@ -20,6 +19,9 @@ class SupervisedProblem(AbstractProblem): >>> problem = SupervisedProblem(input_data, output_data) """ + # TODO: This is necessary to override the abstract properties of + # BaseProblem, but it is not an ideal solution. We should consider + # a different desgin to manage input and output variables. conditions = {} output_variables = None input_variables = None @@ -36,10 +38,10 @@ def __init__( :type output_: torch.Tensor | LabelTensor | Graph | Data :param list[str] input_variables: List of names of the input variables. If None, the input variables are inferred from `input_`. - Default is None. + Default is ``None``. :param list[str] output_variables: List of names of the output variables. If None, the output variables are inferred from - `output_`. Default is None. + `output_`. Default is ``None``. """ # Set input and output variables self.input_variables = input_variables diff --git a/pina/_src/solver/autoregressive_solver/autoregressive_solver.py b/pina/_src/solver/autoregressive_solver/autoregressive_solver.py index e0b92af3d..f0b151c63 100644 --- a/pina/_src/solver/autoregressive_solver/autoregressive_solver.py +++ b/pina/_src/solver/autoregressive_solver/autoregressive_solver.py @@ -48,7 +48,7 @@ def __init__( """ Initialization of the :class:`AutoregressiveSolver` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. diff --git a/pina/_src/solver/ensemble_solver/ensemble_pinn.py b/pina/_src/solver/ensemble_solver/ensemble_pinn.py index f010753ec..6d50ddd05 100644 --- a/pina/_src/solver/ensemble_solver/ensemble_pinn.py +++ b/pina/_src/solver/ensemble_solver/ensemble_pinn.py @@ -87,7 +87,7 @@ def __init__( """ Initialization of the :class:`DeepEnsemblePINN` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module models: The neural network models to be used. :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. diff --git a/pina/_src/solver/ensemble_solver/ensemble_solver_interface.py b/pina/_src/solver/ensemble_solver/ensemble_solver_interface.py index 7b87e28f1..ed0fc2d29 100644 --- a/pina/_src/solver/ensemble_solver/ensemble_solver_interface.py +++ b/pina/_src/solver/ensemble_solver/ensemble_solver_interface.py @@ -13,7 +13,7 @@ class DeepEnsembleSolverInterface(MultiSolverInterface): The ensemble dimension can be customized to control how outputs are stacked. By default, it is compatible with problems defined by - :class:`~pina.problem.abstract_problem.AbstractProblem`, + :class:`~pina.problem.base_problem.BaseProblem`, and users can choose the problem type the solver is meant to address. An ensemble model is constructed by combining multiple models that solve @@ -59,7 +59,7 @@ def __init__( """ Initialization of the :class:`DeepEnsembleSolverInterface` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module models: The neural network models to be used. :param Optimizer optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. diff --git a/pina/_src/solver/ensemble_solver/ensemble_supervised.py b/pina/_src/solver/ensemble_solver/ensemble_supervised.py index ea6f7edde..e98ab7ed1 100644 --- a/pina/_src/solver/ensemble_solver/ensemble_supervised.py +++ b/pina/_src/solver/ensemble_solver/ensemble_supervised.py @@ -76,7 +76,7 @@ def __init__( """ Initialization of the :class:`DeepEnsembleSupervisedSolver` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module models: The neural network models to be used. :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. diff --git a/pina/_src/solver/garom.py b/pina/_src/solver/garom.py index 3f499abd1..29b1c67ac 100644 --- a/pina/_src/solver/garom.py +++ b/pina/_src/solver/garom.py @@ -42,7 +42,7 @@ def __init__( """ Initialization of the :class:`GAROM` class. - :param AbstractProblem problem: The formulation of the problem. + :param BaseProblem problem: The formulation of the problem. :param torch.nn.Module generator: The generator model. :param torch.nn.Module discriminator: The discriminator model. :param torch.nn.Module loss: The loss function to be minimized. diff --git a/pina/_src/solver/physics_informed_solver/causal_pinn.py b/pina/_src/solver/physics_informed_solver/causal_pinn.py index e7e97392b..0539af339 100644 --- a/pina/_src/solver/physics_informed_solver/causal_pinn.py +++ b/pina/_src/solver/physics_informed_solver/causal_pinn.py @@ -78,7 +78,7 @@ def __init__( """ Initialization of the :class:`CausalPINN` class. - :param AbstractProblem problem: The problem to be solved. It must + :param BaseProblem problem: The problem to be solved. It must inherit from at least :class:`~pina.problem.time_dependent_problem.TimeDependentProblem`. :param torch.nn.Module model: The neural network model to be used. diff --git a/pina/_src/solver/physics_informed_solver/competitive_pinn.py b/pina/_src/solver/physics_informed_solver/competitive_pinn.py index 287e0fd8d..cd80d5b2d 100644 --- a/pina/_src/solver/physics_informed_solver/competitive_pinn.py +++ b/pina/_src/solver/physics_informed_solver/competitive_pinn.py @@ -68,7 +68,7 @@ def __init__( """ Initialization of the :class:`CompetitivePINN` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. :param torch.nn.Module discriminator: The discriminator to be used. If ``None``, the discriminator is a deepcopy of the ``model``. diff --git a/pina/_src/solver/physics_informed_solver/gradient_pinn.py b/pina/_src/solver/physics_informed_solver/gradient_pinn.py index 9583c3025..be31d51e8 100644 --- a/pina/_src/solver/physics_informed_solver/gradient_pinn.py +++ b/pina/_src/solver/physics_informed_solver/gradient_pinn.py @@ -69,7 +69,7 @@ def __init__( """ Initialization of the :class:`GradientPINN` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. It must inherit from at least :class:`~pina.problem.spatial_problem.SpatialProblem` to compute the gradient of the loss. diff --git a/pina/_src/solver/physics_informed_solver/pinn.py b/pina/_src/solver/physics_informed_solver/pinn.py index dbea8cbe3..dc6243b50 100644 --- a/pina/_src/solver/physics_informed_solver/pinn.py +++ b/pina/_src/solver/physics_informed_solver/pinn.py @@ -61,7 +61,7 @@ def __init__( """ Initialization of the :class:`PINN` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. :param Optimizer optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. diff --git a/pina/_src/solver/physics_informed_solver/pinn_interface.py b/pina/_src/solver/physics_informed_solver/pinn_interface.py index 517b48082..b435cb77c 100644 --- a/pina/_src/solver/physics_informed_solver/pinn_interface.py +++ b/pina/_src/solver/physics_informed_solver/pinn_interface.py @@ -26,7 +26,7 @@ class PINNInterface(SupervisedSolverInterface, metaclass=ABCMeta): The `PINNInterface` class can be used to define PINNs that work with one or multiple optimizers and/or models. By default, it is compatible with - problems defined by :class:`~pina.problem.abstract_problem.AbstractProblem`, + problems defined by :class:`~pina.problem.base_problem.BaseProblem`, and users can choose the problem type the solver is meant to address. """ @@ -40,7 +40,7 @@ def __init__(self, **kwargs): """ Initialization of the :class:`PINNInterface` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. Default is `None`. diff --git a/pina/_src/solver/physics_informed_solver/rba_pinn.py b/pina/_src/solver/physics_informed_solver/rba_pinn.py index 7e7deda0a..5c7821120 100644 --- a/pina/_src/solver/physics_informed_solver/rba_pinn.py +++ b/pina/_src/solver/physics_informed_solver/rba_pinn.py @@ -79,7 +79,7 @@ def __init__( """ Initialization of the :class:`RBAPINN` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. :param Optimizer optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. diff --git a/pina/_src/solver/physics_informed_solver/self_adaptive_pinn.py b/pina/_src/solver/physics_informed_solver/self_adaptive_pinn.py index ee7f281e6..03ab795c2 100644 --- a/pina/_src/solver/physics_informed_solver/self_adaptive_pinn.py +++ b/pina/_src/solver/physics_informed_solver/self_adaptive_pinn.py @@ -121,7 +121,7 @@ def __init__( """ Initialization of the :class:`SelfAdaptivePINN` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The model to be used. :param torch.nn.Module weight_function: The Self-Adaptive mask model. Default is ``torch.nn.Sigmoid()``. diff --git a/pina/_src/solver/solver.py b/pina/_src/solver/solver.py index d6abd493b..d9d91c577 100644 --- a/pina/_src/solver/solver.py +++ b/pina/_src/solver/solver.py @@ -5,7 +5,7 @@ import torch from torch._dynamo import OptimizedModule -from pina._src.problem.abstract_problem import AbstractProblem +from pina._src.problem.base_problem import BaseProblem from pina._src.problem.inverse_problem import InverseProblem from pina._src.optim.optimizer_interface import Optimizer from pina._src.optim.scheduler_interface import Scheduler @@ -31,7 +31,7 @@ def __init__(self, problem, weighting, use_lt): """ Initialization of the :class:`SolverInterface` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param WeightingInterface weighting: The weighting schema to be used. If ``None``, no weighting schema is used. Default is ``None``. :param bool use_lt: If ``True``, the solver uses LabelTensors as input. @@ -39,7 +39,7 @@ def __init__(self, problem, weighting, use_lt): super().__init__() # check consistency of the problem - check_consistency(problem, AbstractProblem) + check_consistency(problem, BaseProblem) self._check_solver_consistency(problem) self._pina_problem = problem @@ -224,7 +224,7 @@ def _check_solver_consistency(self, problem): """ Check the consistency of the solver with the problem formulation. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. """ for condition in problem.conditions.values(): check_consistency(condition, self.accepted_conditions_types) @@ -337,7 +337,7 @@ def problem(self): The problem instance. :return: The problem instance. - :rtype: :class:`~pina.problem.abstract_problem.AbstractProblem` + :rtype: :class:`~pina.problem.base_problem.BaseProblem` """ return self._pina_problem @@ -379,7 +379,7 @@ def __init__( """ Initialization of the :class:`SingleSolverInterface` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. :param Optimizer optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is @@ -490,7 +490,7 @@ def __init__( """ Initialization of the :class:`MultiSolverInterface` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param models: The neural network models to be used. :type model: list[torch.nn.Module] | tuple[torch.nn.Module] :param list[Optimizer] optimizers: The optimizers to be used. diff --git a/pina/_src/solver/supervised_solver/reduced_order_model.py b/pina/_src/solver/supervised_solver/reduced_order_model.py index d9830d766..3687a3e2b 100644 --- a/pina/_src/solver/supervised_solver/reduced_order_model.py +++ b/pina/_src/solver/supervised_solver/reduced_order_model.py @@ -95,7 +95,7 @@ def __init__( """ Initialization of the :class:`ReducedOrderModelSolver` class. - :param AbstractProblem problem: The formualation of the problem. + :param BaseProblem problem: The formualation of the problem. :param torch.nn.Module reduction_network: The reduction network used for reducing the input space. It must contain two methods, namely ``encode`` for input encoding, and ``decode`` for decoding the diff --git a/pina/_src/solver/supervised_solver/supervised.py b/pina/_src/solver/supervised_solver/supervised.py index 65d438c01..cdbddffca 100644 --- a/pina/_src/solver/supervised_solver/supervised.py +++ b/pina/_src/solver/supervised_solver/supervised.py @@ -45,7 +45,7 @@ def __init__( """ Initialization of the :class:`SupervisedSolver` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. diff --git a/pina/_src/solver/supervised_solver/supervised_solver_interface.py b/pina/_src/solver/supervised_solver/supervised_solver_interface.py index 030fc3f82..e8cf9eeb6 100644 --- a/pina/_src/solver/supervised_solver/supervised_solver_interface.py +++ b/pina/_src/solver/supervised_solver/supervised_solver_interface.py @@ -19,7 +19,7 @@ class SupervisedSolverInterface(SolverInterface): The ``SupervisedSolverInterface`` class can be used to define Supervised solvers that work with one or multiple optimizers and/or models. By default, it is compatible with problems defined by - :class:`~pina.problem.abstract_problem.AbstractProblem`, + :class:`~pina.problem.base_problem.BaseProblem`, and users can choose the problem type the solver is meant to address. """ @@ -29,7 +29,7 @@ def __init__(self, loss=None, **kwargs): """ Initialization of the :class:`SupervisedSolver` class. - :param AbstractProblem problem: The problem to be solved. + :param BaseProblem problem: The problem to be solved. :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. Default is `None`. diff --git a/pina/problem/__init__.py b/pina/problem/__init__.py index b170bec21..dd8ae0950 100644 --- a/pina/problem/__init__.py +++ b/pina/problem/__init__.py @@ -1,15 +1,21 @@ """Module for the Problems.""" __all__ = [ - "AbstractProblem", + "AbstractProblem", # back-compatibility with version 0.2, to be removed soon + "ProblemInterface", + "BaseProblem", "SpatialProblem", "TimeDependentProblem", "ParametricProblem", "InverseProblem", ] -from pina._src.problem.abstract_problem import AbstractProblem +from pina._src.problem.problem_interface import ProblemInterface +from pina._src.problem.base_problem import BaseProblem from pina._src.problem.spatial_problem import SpatialProblem from pina._src.problem.time_dependent_problem import TimeDependentProblem from pina._src.problem.parametric_problem import ParametricProblem from pina._src.problem.inverse_problem import InverseProblem + +# Back-compatibility with version 0.2, to be removed soon +from pina._src.problem.base_problem import AbstractProblem diff --git a/pina/problem/zoo.py b/pina/problem/zoo.py index e5c23ae81..6c027ed54 100644 --- a/pina/problem/zoo.py +++ b/pina/problem/zoo.py @@ -11,13 +11,15 @@ "AcousticWaveProblem", ] +from pina._src.problem.zoo.acoustic_wave_problem import AcousticWaveProblem from pina._src.problem.zoo.supervised_problem import SupervisedProblem -from pina._src.problem.zoo.helmholtz import HelmholtzProblem -from pina._src.problem.zoo.allen_cahn import AllenCahnProblem -from pina._src.problem.zoo.advection import AdvectionProblem -from pina._src.problem.zoo.poisson_2d_square import Poisson2DSquareProblem -from pina._src.problem.zoo.diffusion_reaction import DiffusionReactionProblem -from pina._src.problem.zoo.inverse_poisson_2d_square import ( +from pina._src.problem.zoo.allen_cahn_problem import AllenCahnProblem +from pina._src.problem.zoo.advection_problem import AdvectionProblem +from pina._src.problem.zoo.helmholtz_problem import HelmholtzProblem +from pina._src.problem.zoo.poisson_problem import Poisson2DSquareProblem +from pina._src.problem.zoo.diffusion_reaction_problem import ( + DiffusionReactionProblem, +) +from pina._src.problem.zoo.inverse_poisson_problem import ( InversePoisson2DSquareProblem, ) -from pina._src.problem.zoo.acoustic_wave import AcousticWaveProblem diff --git a/tests/test_callback/test_normalizer_data_callback.py b/tests/test_callback/test_normalizer_data_callback.py index 7cdcc9510..431171bd7 100644 --- a/tests/test_callback/test_normalizer_data_callback.py +++ b/tests/test_callback/test_normalizer_data_callback.py @@ -6,7 +6,7 @@ from pina.solver import SupervisedSolver from pina.model import FeedForward from pina.callback import NormalizerDataCallback -from pina.problem import AbstractProblem +from pina.problem import BaseProblem from pina.problem.zoo import Poisson2DSquareProblem as Poisson from pina.solver import PINN from pina.graph import RadiusGraph @@ -25,7 +25,7 @@ target_2 = torch.rand(20, 1) * 5 -class LabelTensorProblem(AbstractProblem): +class LabelTensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { @@ -40,7 +40,7 @@ class LabelTensorProblem(AbstractProblem): } -class TensorProblem(AbstractProblem): +class TensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { @@ -53,7 +53,7 @@ class TensorProblem(AbstractProblem): output_graph = torch.rand(5, 1) -class GraphProblem(AbstractProblem): +class GraphProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { diff --git a/tests/test_problem.py b/tests/test_problem.py deleted file mode 100644 index 53ee3bc57..000000000 --- a/tests/test_problem.py +++ /dev/null @@ -1,94 +0,0 @@ -import torch -import pytest -from pina.problem.zoo import Poisson2DSquareProblem as Poisson -from pina import LabelTensor -from pina.domain import Union, CartesianDomain, EllipsoidDomain -from pina.condition import ( - Condition, - InputTargetCondition, - DomainEquationCondition, -) - - -def test_discretise_domain(): - n = 10 - poisson_problem = Poisson() - - poisson_problem.discretise_domain(n, "grid", domains="boundary") - assert poisson_problem.discretised_domains["boundary"].shape[0] == n - - poisson_problem.discretise_domain(n, "random", domains="boundary") - assert poisson_problem.discretised_domains["boundary"].shape[0] == n - - poisson_problem.discretise_domain(n, "grid", domains=["D"]) - assert poisson_problem.discretised_domains["D"].shape[0] == n**2 - - poisson_problem.discretise_domain(n, "random", domains=["D"]) - assert poisson_problem.discretised_domains["D"].shape[0] == n - - poisson_problem.discretise_domain(n, "latin", domains=["D"]) - assert poisson_problem.discretised_domains["D"].shape[0] == n - - poisson_problem.discretise_domain(n, "lh", domains=["D"]) - assert poisson_problem.discretised_domains["D"].shape[0] == n - - poisson_problem.discretise_domain(n) - - -def test_variables_correct_order_sampling(): - n = 10 - poisson_problem = Poisson() - poisson_problem.discretise_domain(n, "grid", domains=["D"]) - assert poisson_problem.discretised_domains["D"].labels == sorted( - poisson_problem.input_variables - ) - - poisson_problem.discretise_domain(n, "grid", domains=["D"]) - assert poisson_problem.discretised_domains["D"].labels == sorted( - poisson_problem.input_variables - ) - - -def test_add_points(): - poisson_problem = Poisson() - poisson_problem.discretise_domain(1, "random", domains=["D"]) - new_pts = LabelTensor(torch.tensor([[0.5, -0.5]]), labels=["x", "y"]) - poisson_problem.add_points({"D": new_pts}) - assert torch.allclose( - poisson_problem.discretised_domains["D"]["x"][-1], - new_pts["x"], - ) - assert torch.allclose( - poisson_problem.discretised_domains["D"]["y"][-1], - new_pts["y"], - ) - - -@pytest.mark.parametrize("mode", ["random", "grid"]) -def test_custom_sampling_logic(mode): - poisson_problem = Poisson() - sampling_rules = { - "x": {"n": 100, "mode": mode}, - "y": {"n": 50, "mode": mode}, - } - poisson_problem.discretise_domain(sample_rules=sampling_rules, domains="D") - assert poisson_problem.discretised_domains["D"].shape[0] == 100 * 50 - assert poisson_problem.discretised_domains["D"].labels == ["x", "y"] - - -@pytest.mark.parametrize("mode", ["random", "grid"]) -def test_wrong_custom_sampling_logic(mode): - d2 = CartesianDomain({"x": [1, 2], "y": [0, 1]}) - poisson_problem = Poisson() - poisson_problem.domains["D"] = Union([poisson_problem.domains["D"], d2]) - sampling_rules = { - "x": {"n": 100, "mode": mode}, - "y": {"n": 50, "mode": mode}, - } - with pytest.raises(RuntimeError): - poisson_problem.domains["new"] = EllipsoidDomain({"x": [0, 1]}) - poisson_problem.discretise_domain(sample_rules=sampling_rules) - - # Necessary cleanup - if "new" in poisson_problem.domains: - del poisson_problem.domains["new"] diff --git a/tests/test_problem/test_base_problem.py b/tests/test_problem/test_base_problem.py new file mode 100644 index 000000000..25acfcadd --- /dev/null +++ b/tests/test_problem/test_base_problem.py @@ -0,0 +1,158 @@ +import torch +import pytest +from pina import LabelTensor +from pina.problem.zoo import Poisson2DSquareProblem as Poisson + + +# Define sampling rules +rule1 = { + "x": {"n": 10, "mode": "random"}, + "y": {"n": 5, "mode": "grid"}, +} +rule2 = { + "x": {"n": 5, "mode": "lh"}, + "y": {"n": 10, "mode": "chebyshev"}, +} + + +@pytest.mark.parametrize("n", [2, 5]) +@pytest.mark.parametrize("mode", ["grid", "random", "latin", "chebyshev", "lh"]) +@pytest.mark.parametrize("domains", ["boundary", "D", ["boundary", "D"], None]) +@pytest.mark.parametrize("sample_rules", [None, rule1, rule2]) +def test_discretise_domain(n, mode, domains, sample_rules): + + # Define the problem + poisson_problem = Poisson() + + # Discretise domains + poisson_problem.discretise_domain( + n=n, mode=mode, domains=domains, sample_rules=sample_rules + ) + + # Transform domains to list for consistent processing + _as_list = lambda x: [x] if isinstance(x, str) else x + d_list = domains if domains is not None else ["boundary", "D"] + d_list = _as_list(d_list) + + # Check that the discretised domains have the expected number of points + for d in d_list: + + # Compute expected number of points if sample rules are provided + if sample_rules is not None: + n_tot = sample_rules["x"]["n"] * sample_rules["y"]["n"] + + # Otherwise, expect n pts or n^2 based on domain and mode + else: + n_tot = n**2 if mode in ["grid", "chebyshev"] and d == "D" else n + + # Check that the number of samples matches the expected number + assert poisson_problem.discretised_domains[d].shape[0] == n_tot + + # Check labels of the discretised domains + assert poisson_problem.discretised_domains[d].labels == sorted( + poisson_problem.input_variables + ) + + # Should fail if n is not a positive integer when sample rules not provided + if sample_rules is None: + with pytest.raises(AssertionError): + poisson_problem.discretise_domain( + n=-1, mode=mode, domains=domains, sample_rules=sample_rules + ) + + # Should fail if mode is not a string + with pytest.raises(ValueError): + poisson_problem.discretise_domain( + n=n, mode=123, domains=domains, sample_rules=sample_rules + ) + + # Should fail if domains is not a string or a list of strings + with pytest.raises(ValueError): + poisson_problem.discretise_domain( + n=n, mode=mode, domains=123, sample_rules=sample_rules + ) + + # Should fail if sample rules is not a dictionary + with pytest.raises(ValueError): + poisson_problem.discretise_domain( + n=n, mode=mode, domains=domains, sample_rules="not_a_dict" + ) + + # Should fail if the keys of sample rules do not match the input variables + with pytest.raises(ValueError): + wrong_sample_rules = {"wrong_var": {"n": 10, "mode": "random"}} + poisson_problem.discretise_domain( + n=n, mode=mode, domains=domains, sample_rules=wrong_sample_rules + ) + + # Should fail if the rules do not contain both 'n' and 'mode' keys + with pytest.raises(ValueError): + incomplete_sample_rules = {"x": {"n": 10}, "y": {"mode": "random"}} + poisson_problem.discretise_domain( + n=n, + mode=mode, + domains=domains, + sample_rules=incomplete_sample_rules, + ) + + +@pytest.mark.parametrize("domains", ["boundary", "D", ["boundary", "D"], None]) +def test_add_points(domains): + + # Store initial number of points in the domains and point to add + n_init, n_add = 5, 3 + n_tot = n_init + n_add + + # Define the problem and discretise the domain + poisson_problem = Poisson() + poisson_problem.discretise_domain(n=n_init, mode="random", domains=domains) + vars = poisson_problem.input_variables + + # Transform domains to list for consistent processing + _as_list = lambda x: [x] if isinstance(x, str) else x + d_list = domains if domains is not None else ["boundary", "D"] + d_list = _as_list(d_list) + + # Iterate over the domains and add points to each + for d in d_list: + + # Add new points to the domain + new_pts = LabelTensor(torch.rand(n_add, len(vars)), labels=vars) + poisson_problem.add_points({d: new_pts}) + + # Assert that the number of points in the domain is correct + assert poisson_problem.discretised_domains[d].shape[0] == n_tot + + # Assert that the new points are in the domain + assert torch.allclose( + poisson_problem.discretised_domains[d]["x"][-n_add:], new_pts["x"] + ) + assert torch.allclose( + poisson_problem.discretised_domains[d]["y"][-n_add:], new_pts["y"] + ) + + # Should fail if new points is not a dictionary + with pytest.raises(ValueError): + poisson_problem.add_points("not_a_dict") + + # Should fail if any of the values in new points is not a LabelTensor + with pytest.raises(ValueError): + poisson_problem.add_points({d_list[0]: torch.rand(n_add, len(vars))}) + + # Should fail if any of the keys does not match any of the existing domains + with pytest.raises(ValueError): + poisson_problem.add_points( + { + "not_a_domain": LabelTensor( + torch.rand(n_add, len(vars)), labels=vars + ) + } + ) + + # Should fail if any of the domains has not been discretised yet + with pytest.raises(ValueError): + poisson_problem = Poisson() + poisson_problem.discretise_domain(n=n_init, mode="random", domains="D") + poisson_problem.add_points( + {"boundary": LabelTensor(torch.rand(n_add, len(vars)), labels=vars)} + ) diff --git a/tests/test_problem/test_inverse_problem.py b/tests/test_problem/test_inverse_problem.py new file mode 100644 index 000000000..8a91cbac0 --- /dev/null +++ b/tests/test_problem/test_inverse_problem.py @@ -0,0 +1,27 @@ +import torch +from pina.problem import InverseProblem +from pina.domain import CartesianDomain + + +# Dummy inverse problem for testing +class DummyInverseProblem(InverseProblem): + + output_variables = ["u"] + conditions = {} + + # Define the unknown parameter domain + unknown_parameter_domain = CartesianDomain({"mu": [-1, 1]}) + + +def test_inverse_problem_initialization(): + + # Initialize the dummy inverse problem + problem = DummyInverseProblem() + + # Check that the inverse problem is initialized correctly + assert problem.unknown_variables == ["mu"] + assert isinstance(problem.unknown_parameters, dict) + for k, v in problem.unknown_parameters.items(): + assert isinstance(v, torch.nn.Parameter) + range_low, range_high = problem.unknown_parameter_domain._range[k] + assert range_low <= v.item() <= range_high diff --git a/tests/test_problem/test_parametric_problem.py b/tests/test_problem/test_parametric_problem.py new file mode 100644 index 000000000..00c4568e8 --- /dev/null +++ b/tests/test_problem/test_parametric_problem.py @@ -0,0 +1,22 @@ +from pina.problem import ParametricProblem +from pina.domain import CartesianDomain + + +# Dummy parametric problem for testing +class DummyParametricProblem(ParametricProblem): + + output_variables = ["u"] + conditions = {} + + # Define the parameter domain + parameter_domain = CartesianDomain({"mu": [-1, 1]}) + + +def test_parametric_problem_initialization(): + + # Initialize the dummy parametric problem + problem = DummyParametricProblem() + + # Check that the parametric problem is initialized correctly + assert problem.parameters == ["mu"] + assert problem.input_variables == problem.parameters diff --git a/tests/test_problem/test_spatial_problem.py b/tests/test_problem/test_spatial_problem.py new file mode 100644 index 000000000..4848db018 --- /dev/null +++ b/tests/test_problem/test_spatial_problem.py @@ -0,0 +1,22 @@ +from pina.problem import SpatialProblem +from pina.domain import CartesianDomain + + +# Dummy spatial problem for testing +class DummySpatialProblem(SpatialProblem): + + output_variables = ["u"] + conditions = {} + + # Define the spatial domain + spatial_domain = CartesianDomain({"x": [-1, 1]}) + + +def test_spatial_problem_initialization(): + + # Initialize the dummy spatial problem + problem = DummySpatialProblem() + + # Check that the spatial problem is initialized correctly + assert problem.spatial_variables == ["x"] + assert problem.input_variables == problem.spatial_variables diff --git a/tests/test_problem/test_time_dependent_problem.py b/tests/test_problem/test_time_dependent_problem.py new file mode 100644 index 000000000..e041f507b --- /dev/null +++ b/tests/test_problem/test_time_dependent_problem.py @@ -0,0 +1,22 @@ +from pina.problem import TimeDependentProblem +from pina.domain import CartesianDomain + + +# Dummy time-dependent problem for testing +class DummyTimeDependentProblem(TimeDependentProblem): + + output_variables = ["u"] + conditions = {} + + # Define the temporal domain + temporal_domain = CartesianDomain({"t": [0, 1]}) + + +def test_time_dependent_problem_initialization(): + + # Initialize the dummy time-dependent problem + problem = DummyTimeDependentProblem() + + # Check that the time-dependent problem is initialized correctly + assert problem.temporal_variables == ["t"] + assert problem.input_variables == problem.temporal_variables diff --git a/tests/test_problem_zoo/test_acoustic_wave.py b/tests/test_problem_zoo/test_acoustic_wave.py deleted file mode 100644 index 0cf794d18..000000000 --- a/tests/test_problem_zoo/test_acoustic_wave.py +++ /dev/null @@ -1,19 +0,0 @@ -import pytest -from pina.problem.zoo import AcousticWaveProblem -from pina.problem import SpatialProblem, TimeDependentProblem - - -@pytest.mark.parametrize("c", [0.1, 1]) -def test_constructor(c): - - problem = AcousticWaveProblem(c=c) - problem.discretise_domain(n=10, mode="random", domains="all") - assert problem.are_all_domains_discretised - assert isinstance(problem, SpatialProblem) - assert isinstance(problem, TimeDependentProblem) - assert hasattr(problem, "conditions") - assert isinstance(problem.conditions, dict) - - # Should fail if c is not a float or int - with pytest.raises(ValueError): - AcousticWaveProblem(c="invalid") diff --git a/tests/test_problem_zoo/test_acoustic_wave_problem.py b/tests/test_problem_zoo/test_acoustic_wave_problem.py new file mode 100644 index 000000000..a5102efae --- /dev/null +++ b/tests/test_problem_zoo/test_acoustic_wave_problem.py @@ -0,0 +1,36 @@ +import pytest +import torch +from pina.problem.zoo import AcousticWaveProblem +from pina.problem import SpatialProblem, TimeDependentProblem + + +@pytest.mark.parametrize("c", [0.1, 1]) +def test_constructor(c): + + problem = AcousticWaveProblem(c=c) + problem.discretise_domain(n=10, mode="random", domains=None) + assert problem.are_all_domains_discretised + assert isinstance(problem, SpatialProblem) + assert isinstance(problem, TimeDependentProblem) + assert hasattr(problem, "conditions") + assert isinstance(problem.conditions, dict) + + # Should fail if c is not a float or int + with pytest.raises(ValueError): + AcousticWaveProblem(c="invalid") + + +@pytest.mark.parametrize("c", [0.1, 1]) +def test_solution(c): + + # Find the solution to the problem + problem = AcousticWaveProblem(c=c) + problem.discretise_domain(n=10, mode="grid", domains=None) + pts = problem.discretised_domains["D"] + solution = problem.solution(pts.requires_grad_()) + + # Compute the residual + residual = problem.conditions["D"].equation.residual(pts, solution).tensor + + # Assert the residual of the PDE is close to zero + assert torch.allclose(residual, torch.zeros_like(residual), atol=5e-5) diff --git a/tests/test_problem_zoo/test_advection.py b/tests/test_problem_zoo/test_advection.py deleted file mode 100644 index e1a656a74..000000000 --- a/tests/test_problem_zoo/test_advection.py +++ /dev/null @@ -1,19 +0,0 @@ -import pytest -from pina.problem.zoo import AdvectionProblem -from pina.problem import SpatialProblem, TimeDependentProblem - - -@pytest.mark.parametrize("c", [1.5, 3]) -def test_constructor(c): - - problem = AdvectionProblem(c=c) - problem.discretise_domain(n=10, mode="random", domains="all") - assert problem.are_all_domains_discretised - assert isinstance(problem, SpatialProblem) - assert isinstance(problem, TimeDependentProblem) - assert hasattr(problem, "conditions") - assert isinstance(problem.conditions, dict) - - # Should fail if c is not a float or int - with pytest.raises(ValueError): - AdvectionProblem(c="invalid") diff --git a/tests/test_problem_zoo/test_advection_problem.py b/tests/test_problem_zoo/test_advection_problem.py new file mode 100644 index 000000000..0d7114771 --- /dev/null +++ b/tests/test_problem_zoo/test_advection_problem.py @@ -0,0 +1,36 @@ +import pytest +import torch +from pina.problem.zoo import AdvectionProblem +from pina.problem import SpatialProblem, TimeDependentProblem + + +@pytest.mark.parametrize("c", [1.5, 3]) +def test_constructor(c): + + problem = AdvectionProblem(c=c) + problem.discretise_domain(n=10, mode="random", domains=None) + assert problem.are_all_domains_discretised + assert isinstance(problem, SpatialProblem) + assert isinstance(problem, TimeDependentProblem) + assert hasattr(problem, "conditions") + assert isinstance(problem.conditions, dict) + + # Should fail if c is not a float or int + with pytest.raises(ValueError): + AdvectionProblem(c="invalid") + + +@pytest.mark.parametrize("c", [1.5, 3]) +def test_solution(c): + + # Find the solution to the problem + problem = AdvectionProblem(c=c) + problem.discretise_domain(n=10, mode="grid", domains=None) + pts = problem.discretised_domains["D"] + solution = problem.solution(pts.requires_grad_()) + + # Compute the residual + residual = problem.conditions["D"].equation.residual(pts, solution).tensor + + # Assert the residual of the PDE is close to zero + assert torch.allclose(residual, torch.zeros_like(residual), atol=5e-5) diff --git a/tests/test_problem_zoo/test_allen_cahn.py b/tests/test_problem_zoo/test_allen_cahn_problem.py similarity index 92% rename from tests/test_problem_zoo/test_allen_cahn.py rename to tests/test_problem_zoo/test_allen_cahn_problem.py index 80c11ce5c..2406e1f75 100644 --- a/tests/test_problem_zoo/test_allen_cahn.py +++ b/tests/test_problem_zoo/test_allen_cahn_problem.py @@ -8,7 +8,7 @@ def test_constructor(alpha, beta): problem = AllenCahnProblem(alpha=alpha, beta=beta) - problem.discretise_domain(n=10, mode="random", domains="all") + problem.discretise_domain(n=10, mode="random", domains=None) assert problem.are_all_domains_discretised assert isinstance(problem, SpatialProblem) assert isinstance(problem, TimeDependentProblem) diff --git a/tests/test_problem_zoo/test_diffusion_reaction.py b/tests/test_problem_zoo/test_diffusion_reaction_problem.py similarity index 50% rename from tests/test_problem_zoo/test_diffusion_reaction.py rename to tests/test_problem_zoo/test_diffusion_reaction_problem.py index 163d30f55..d8decf697 100644 --- a/tests/test_problem_zoo/test_diffusion_reaction.py +++ b/tests/test_problem_zoo/test_diffusion_reaction_problem.py @@ -1,4 +1,5 @@ import pytest +import torch from pina.problem.zoo import DiffusionReactionProblem from pina.problem import TimeDependentProblem, SpatialProblem @@ -7,7 +8,7 @@ def test_constructor(alpha): problem = DiffusionReactionProblem(alpha=alpha) - problem.discretise_domain(n=10, mode="random", domains="all") + problem.discretise_domain(n=10, mode="random", domains=None) assert problem.are_all_domains_discretised assert isinstance(problem, TimeDependentProblem) assert isinstance(problem, SpatialProblem) @@ -17,3 +18,19 @@ def test_constructor(alpha): # Should fail if alpha is not a float or int with pytest.raises(ValueError): problem = DiffusionReactionProblem(alpha="invalid") + + +@pytest.mark.parametrize("alpha", [0.1, 1]) +def test_solution(alpha): + + # Find the solution to the problem + problem = DiffusionReactionProblem(alpha=alpha) + problem.discretise_domain(n=10, mode="grid", domains=None) + pts = problem.discretised_domains["D"] + solution = problem.solution(pts.requires_grad_()) + + # Compute the residual + residual = problem.conditions["D"].equation.residual(pts, solution).tensor + + # Assert the residual of the PDE is close to zero + assert torch.allclose(residual, torch.zeros_like(residual), atol=5e-5) diff --git a/tests/test_problem_zoo/test_helmholtz.py b/tests/test_problem_zoo/test_helmholtz.py deleted file mode 100644 index 4668c6996..000000000 --- a/tests/test_problem_zoo/test_helmholtz.py +++ /dev/null @@ -1,19 +0,0 @@ -import pytest -from pina.problem.zoo import HelmholtzProblem -from pina.problem import SpatialProblem - - -@pytest.mark.parametrize("k", [1.5, 3]) -@pytest.mark.parametrize("alpha_x", [1, 3]) -@pytest.mark.parametrize("alpha_y", [1, 3]) -def test_constructor(k, alpha_x, alpha_y): - - problem = HelmholtzProblem(k=k, alpha_x=alpha_x, alpha_y=alpha_y) - problem.discretise_domain(n=10, mode="random", domains="all") - assert problem.are_all_domains_discretised - assert isinstance(problem, SpatialProblem) - assert hasattr(problem, "conditions") - assert isinstance(problem.conditions, dict) - - with pytest.raises(ValueError): - HelmholtzProblem(k=1, alpha_x=1.5, alpha_y=1) diff --git a/tests/test_problem_zoo/test_helmholtz_problem.py b/tests/test_problem_zoo/test_helmholtz_problem.py new file mode 100644 index 000000000..408e32a33 --- /dev/null +++ b/tests/test_problem_zoo/test_helmholtz_problem.py @@ -0,0 +1,38 @@ +import pytest +import torch +from pina.problem.zoo import HelmholtzProblem +from pina.problem import SpatialProblem + + +@pytest.mark.parametrize("k", [1.5, 3]) +@pytest.mark.parametrize("alpha_x", [1, 3]) +@pytest.mark.parametrize("alpha_y", [1, 3]) +def test_constructor(k, alpha_x, alpha_y): + + problem = HelmholtzProblem(k=k, alpha_x=alpha_x, alpha_y=alpha_y) + problem.discretise_domain(n=10, mode="random", domains=None) + assert problem.are_all_domains_discretised + assert isinstance(problem, SpatialProblem) + assert hasattr(problem, "conditions") + assert isinstance(problem.conditions, dict) + + with pytest.raises(ValueError): + HelmholtzProblem(k=1, alpha_x=1.5, alpha_y=1) + + +@pytest.mark.parametrize("k", [1.5, 3]) +@pytest.mark.parametrize("alpha_x", [1, 3]) +@pytest.mark.parametrize("alpha_y", [1, 3]) +def test_solution(k, alpha_x, alpha_y): + + # Find the solution to the problem + problem = HelmholtzProblem(k=k, alpha_x=alpha_x, alpha_y=alpha_y) + problem.discretise_domain(n=10, mode="grid", domains=None) + pts = problem.discretised_domains["D"] + solution = problem.solution(pts.requires_grad_()) + + # Compute the residual + residual = problem.conditions["D"].equation.residual(pts, solution).tensor + + # Assert the residual of the PDE is close to zero + assert torch.allclose(residual, torch.zeros_like(residual), atol=5e-5) diff --git a/tests/test_problem_zoo/test_inverse_poisson_2d_square.py b/tests/test_problem_zoo/test_inverse_poisson_problem.py similarity index 93% rename from tests/test_problem_zoo/test_inverse_poisson_2d_square.py rename to tests/test_problem_zoo/test_inverse_poisson_problem.py index 423d15d74..25af3ae9e 100644 --- a/tests/test_problem_zoo/test_inverse_poisson_2d_square.py +++ b/tests/test_problem_zoo/test_inverse_poisson_problem.py @@ -11,7 +11,7 @@ def test_constructor(load, data_size): problem = InversePoisson2DSquareProblem(load=load, data_size=data_size) # Discretise the domain - problem.discretise_domain(n=10, mode="random", domains="all") + problem.discretise_domain(n=10, mode="random", domains=None) # Check if the problem is correctly set up assert problem.are_all_domains_discretised diff --git a/tests/test_problem_zoo/test_poisson_2d_square.py b/tests/test_problem_zoo/test_poisson_2d_square.py deleted file mode 100644 index a9e6fa973..000000000 --- a/tests/test_problem_zoo/test_poisson_2d_square.py +++ /dev/null @@ -1,12 +0,0 @@ -from pina.problem.zoo import Poisson2DSquareProblem -from pina.problem import SpatialProblem - - -def test_constructor(): - - problem = Poisson2DSquareProblem() - problem.discretise_domain(n=10, mode="random", domains="all") - assert problem.are_all_domains_discretised - assert isinstance(problem, SpatialProblem) - assert hasattr(problem, "conditions") - assert isinstance(problem.conditions, dict) diff --git a/tests/test_problem_zoo/test_poisson_problem.py b/tests/test_problem_zoo/test_poisson_problem.py new file mode 100644 index 000000000..b093329bd --- /dev/null +++ b/tests/test_problem_zoo/test_poisson_problem.py @@ -0,0 +1,28 @@ +import torch +from pina.problem.zoo import Poisson2DSquareProblem +from pina.problem import SpatialProblem + + +def test_constructor(): + + problem = Poisson2DSquareProblem() + problem.discretise_domain(n=10, mode="random", domains=None) + assert problem.are_all_domains_discretised + assert isinstance(problem, SpatialProblem) + assert hasattr(problem, "conditions") + assert isinstance(problem.conditions, dict) + + +def test_solution(): + + # Find the solution to the problem + problem = Poisson2DSquareProblem() + problem.discretise_domain(n=10, mode="grid", domains=None) + pts = problem.discretised_domains["D"] + solution = problem.solution(pts.requires_grad_()) + + # Compute the residual + residual = problem.conditions["D"].equation.residual(pts, solution).tensor + + # Assert the residual of the PDE is close to zero + assert torch.allclose(residual, torch.zeros_like(residual), atol=5e-5) diff --git a/tests/test_problem_zoo/test_supervised_problem.py b/tests/test_problem_zoo/test_supervised_problem.py index da18d6146..e638623f3 100644 --- a/tests/test_problem_zoo/test_supervised_problem.py +++ b/tests/test_problem_zoo/test_supervised_problem.py @@ -1,5 +1,5 @@ import torch -from pina.problem import AbstractProblem +from pina.problem import BaseProblem from pina.condition import InputTargetCondition from pina.problem.zoo import SupervisedProblem from pina.graph import RadiusGraph @@ -9,7 +9,7 @@ def test_constructor(): input_ = torch.rand((100, 10)) output_ = torch.rand((100, 10)) problem = SupervisedProblem(input_=input_, output_=output_) - assert isinstance(problem, AbstractProblem) + assert isinstance(problem, BaseProblem) assert hasattr(problem, "conditions") assert isinstance(problem.conditions, dict) assert list(problem.conditions.keys()) == ["data"] @@ -25,7 +25,7 @@ def test_constructor_graph(): ] output_ = torch.rand((20, 100, 10)) problem = SupervisedProblem(input_=input_, output_=output_) - assert isinstance(problem, AbstractProblem) + assert isinstance(problem, BaseProblem) assert hasattr(problem, "conditions") assert isinstance(problem.conditions, dict) assert list(problem.conditions.keys()) == ["data"] diff --git a/tests/test_solver/test_autoregressive_solver.py b/tests/test_solver/test_autoregressive_solver.py index 2216be9bf..c35c6137e 100644 --- a/tests/test_solver/test_autoregressive_solver.py +++ b/tests/test_solver/test_autoregressive_solver.py @@ -6,7 +6,7 @@ from pina import Condition, Trainer, LabelTensor from pina.solver import AutoregressiveSolver from pina.condition import DataCondition -from pina.problem import AbstractProblem +from pina.problem import BaseProblem from pina.model import FeedForward @@ -47,7 +47,7 @@ def create_data(n_traj, t_steps, n_feats, unroll_length, n_unrolls, use_lt): # Problem -class Problem(AbstractProblem): +class Problem(BaseProblem): input_variables = [f"feat_{i}" for i in range(n_feats)] output_variables = [f"feat_{i}" for i in range(n_feats)] diff --git a/tests/test_solver/test_ensemble_supervised_solver.py b/tests/test_solver/test_ensemble_supervised_solver.py index 71c78690f..8359133d7 100644 --- a/tests/test_solver/test_ensemble_supervised_solver.py +++ b/tests/test_solver/test_ensemble_supervised_solver.py @@ -2,17 +2,16 @@ import pytest from torch._dynamo.eval_frame import OptimizedModule from torch_geometric.nn import GCNConv -from torch_geometric.utils import to_dense_batch from pina import Condition, LabelTensor from pina.condition import InputTargetCondition -from pina.problem import AbstractProblem +from pina.problem import BaseProblem from pina.solver import DeepEnsembleSupervisedSolver from pina.model import FeedForward from pina.trainer import Trainer from pina.graph import KNNGraph -class LabelTensorProblem(AbstractProblem): +class LabelTensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { @@ -23,7 +22,7 @@ class LabelTensorProblem(AbstractProblem): } -class TensorProblem(AbstractProblem): +class TensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { @@ -40,7 +39,7 @@ class TensorProblem(AbstractProblem): ] -class GraphProblem(AbstractProblem): +class GraphProblem(BaseProblem): output_variables = None conditions = {"data": Condition(input=input_, target=output_)} @@ -54,7 +53,7 @@ class GraphProblem(AbstractProblem): ] -class GraphProblemLT(AbstractProblem): +class GraphProblemLT(BaseProblem): output_variables = ["u"] input_variables = ["a", "b", "c", "d", "e"] conditions = {"data": Condition(input=input_, target=output_)} diff --git a/tests/test_solver/test_garom.py b/tests/test_solver/test_garom.py index 1c09b01b7..f73a5e557 100644 --- a/tests/test_solver/test_garom.py +++ b/tests/test_solver/test_garom.py @@ -5,13 +5,13 @@ from pina import Condition from pina.solver import GAROM from pina.condition import InputTargetCondition -from pina.problem import AbstractProblem +from pina.problem import BaseProblem from pina.model import FeedForward from pina.trainer import Trainer from torch._dynamo.eval_frame import OptimizedModule -class TensorProblem(AbstractProblem): +class TensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { diff --git a/tests/test_solver/test_reduced_order_model_solver.py b/tests/test_solver/test_reduced_order_model_solver.py index 5427ec7a2..5bda0a3ae 100644 --- a/tests/test_solver/test_reduced_order_model_solver.py +++ b/tests/test_solver/test_reduced_order_model_solver.py @@ -2,7 +2,7 @@ import pytest from pina import Condition, LabelTensor -from pina.problem import AbstractProblem +from pina.problem import BaseProblem from pina.condition import InputTargetCondition from pina.solver import ReducedOrderModelSolver from pina.trainer import Trainer @@ -11,7 +11,7 @@ from torch._dynamo.eval_frame import OptimizedModule -class LabelTensorProblem(AbstractProblem): +class LabelTensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { @@ -22,7 +22,7 @@ class LabelTensorProblem(AbstractProblem): } -class TensorProblem(AbstractProblem): +class TensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { diff --git a/tests/test_solver/test_supervised_solver.py b/tests/test_solver/test_supervised_solver.py index c39e6034e..921709faa 100644 --- a/tests/test_solver/test_supervised_solver.py +++ b/tests/test_solver/test_supervised_solver.py @@ -2,17 +2,16 @@ import pytest from torch._dynamo.eval_frame import OptimizedModule from torch_geometric.nn import GCNConv -from torch_geometric.utils import to_dense_batch from pina import Condition, LabelTensor from pina.condition import InputTargetCondition -from pina.problem import AbstractProblem +from pina.problem import BaseProblem from pina.solver import SupervisedSolver from pina.model import FeedForward from pina.trainer import Trainer from pina.graph import KNNGraph -class LabelTensorProblem(AbstractProblem): +class LabelTensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { @@ -23,7 +22,7 @@ class LabelTensorProblem(AbstractProblem): } -class TensorProblem(AbstractProblem): +class TensorProblem(BaseProblem): input_variables = ["u_0", "u_1"] output_variables = ["u"] conditions = { @@ -40,7 +39,7 @@ class TensorProblem(AbstractProblem): ] -class GraphProblem(AbstractProblem): +class GraphProblem(BaseProblem): output_variables = None conditions = {"data": Condition(input=input_, target=output_)} @@ -54,7 +53,7 @@ class GraphProblem(AbstractProblem): ] -class GraphProblemLT(AbstractProblem): +class GraphProblemLT(BaseProblem): output_variables = ["u"] input_variables = ["a", "b", "c", "d", "e"] conditions = {"data": Condition(input=input_, target=output_)}