diff --git a/sysidentpy/basis_function/_fourier.py b/sysidentpy/basis_function/_fourier.py index ae6d1f27..bcc1e55b 100644 --- a/sysidentpy/basis_function/_fourier.py +++ b/sysidentpy/basis_function/_fourier.py @@ -39,6 +39,9 @@ class Fourier(BaseBasisFunction): degree : int (max_degree), default=2 The maximum degree of the polynomial features. + include_bias : bool, default=True + Whether to include the bias (constant) term in the output feature matrix. + Notes ----- Be aware that the number of features in the output array scales @@ -47,11 +50,12 @@ class Fourier(BaseBasisFunction): """ def __init__( - self, n: int = 1, p: float = 2 * np.pi, degree: int = 1, ensemble: bool = True + self, n: int = 1, p: float = 2 * np.pi, degree: int = 1, include_bias: bool = True, ensemble: bool = True ): self.n = n self.p = p self.degree = degree + self.include_bias = include_bias self.ensemble = ensemble def _fourier_expansion(self, data: np.ndarray, n: int): @@ -125,6 +129,10 @@ def fit( else: psi = psi[:, 1:] + if self.include_bias: + bias_column = np.ones((psi.shape[0], 1)) + psi = np.hstack((bias_column, psi)) + if predefined_regressors is None: return psi diff --git a/sysidentpy/basis_function/_polynomial.py b/sysidentpy/basis_function/_polynomial.py index a9c02433..7acb926a 100644 --- a/sysidentpy/basis_function/_polynomial.py +++ b/sysidentpy/basis_function/_polynomial.py @@ -28,6 +28,9 @@ class Polynomial(BaseBasisFunction): degree : int (max_degree), default=2 The maximum degree of the polynomial features. + include_bias : bool, default=True + Whether to include the bias (constant) term in the output feature matrix. + Notes ----- Be aware that the number of features in the output array scales @@ -38,9 +41,10 @@ class Polynomial(BaseBasisFunction): def __init__( self, degree: int = 2, + include_bias: bool = True, ): self.degree = degree - # Cache combination indices per (n_features, degree) to avoid rebuilding + self.include_bias = include_bias self._combination_cache: Dict[Tuple[int, int], np.ndarray] = {} def _get_combination_indices(self, n_features: int) -> np.ndarray: @@ -115,9 +119,12 @@ def fit( The lagged matrix built in respect with each lag and column. """ - # Create combinations of all columns based on its index psi = self._evaluate_terms(data, predefined_regressors) - return psi[max_lag:, :] + psi = psi[max_lag:, :] + if self.include_bias: + bias_column = np.ones((psi.shape[0], 1)) + psi = np.hstack((bias_column, psi)) + return psi def transform( self, diff --git a/sysidentpy/basis_function/tests/test_basis_functions.py b/sysidentpy/basis_function/tests/test_basis_functions.py index f7d95851..e718ace0 100644 --- a/sysidentpy/basis_function/tests/test_basis_functions.py +++ b/sysidentpy/basis_function/tests/test_basis_functions.py @@ -9,7 +9,7 @@ def test_fit_polynomial(): basis_function = Polynomial(degree=2) data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3])) max_lag = 1 - output = np.array([[4, 6, 8, 9, 12, 16], [9, 9, 9, 9, 9, 9]]) + output = np.array([[1, 4, 6, 8, 9, 12, 16], [1, 9, 9, 9, 9, 9, 9]]) r = basis_function.fit(data=data, max_lag=max_lag) @@ -21,7 +21,7 @@ def test_fit_polynomial_predefined(): data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3])) max_lag = 1 predefined_regressors = np.array([0, 2, 4]) - output = np.array([[4, 8, 12], [9, 9, 9]]) + output = np.array([[1, 6, 9], [1, 9, 9]]) r = basis_function.fit( data=data, max_lag=max_lag, predefined_regressors=predefined_regressors @@ -34,13 +34,31 @@ def test_transform_polynomial(): basis_function = Polynomial(degree=2) data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3])) max_lag = 1 - output = np.array([[4, 6, 8, 9, 12, 16], [9, 9, 9, 9, 9, 9]]) + output = np.array([[1, 4, 6, 8, 9, 12, 16], [1, 9, 9, 9, 9, 9, 9]]) r = basis_function.transform(data=data, max_lag=max_lag) assert_array_equal(output, r) +def test_polynomial_include_bias(): + basis_function = Polynomial(degree=2, include_bias=True) + data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3])) + max_lag = 1 + + r = basis_function.fit(data=data, max_lag=max_lag) + assert np.all(r[:, 0] == 1) + + +def test_polynomial_no_bias(): + basis_function = Polynomial(degree=2, include_bias=False) + data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3])) + max_lag = 1 + + r = basis_function.fit(data=data, max_lag=max_lag) + assert not np.all(r[:, 0] == 1) + + def test_fit_fourier(): basis_function = Fourier(n=5, ensemble=False) data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3])) @@ -173,6 +191,24 @@ def test_transform_fourier(): assert_almost_equal(output, r, decimal=7) +def test_fourier_include_bias(): + basis_function = Fourier(n=5, ensemble=False, include_bias=True) + data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3])) + max_lag = 1 + + r = basis_function.fit(data=data, max_lag=max_lag) + assert np.all(r[:, 0] == 1) + + +def test_fourier_no_bias(): + basis_function = Fourier(n=5, ensemble=False, include_bias=False) + data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3])) + max_lag = 1 + + r = basis_function.fit(data=data, max_lag=max_lag) + assert not np.all(r[:, 0] == 1) + + class _DummyBasis(BaseBasisFunction): def __init__(self, degree=3): super().__init__(degree=degree)