Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion sysidentpy/basis_function/_fourier.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,9 @@ class Fourier(BaseBasisFunction):
degree : int (max_degree), default=2
The maximum degree of the polynomial features.

include_bias : bool, default=True
Whether to include the bias (constant) term in the output feature matrix.

Notes
-----
Be aware that the number of features in the output array scales
Expand All @@ -47,11 +50,12 @@ class Fourier(BaseBasisFunction):
"""

def __init__(
self, n: int = 1, p: float = 2 * np.pi, degree: int = 1, ensemble: bool = True
self, n: int = 1, p: float = 2 * np.pi, degree: int = 1, include_bias: bool = True, ensemble: bool = True
):
self.n = n
self.p = p
self.degree = degree
self.include_bias = include_bias
self.ensemble = ensemble

def _fourier_expansion(self, data: np.ndarray, n: int):
Expand Down Expand Up @@ -125,6 +129,10 @@ def fit(
else:
psi = psi[:, 1:]

if self.include_bias:
bias_column = np.ones((psi.shape[0], 1))
psi = np.hstack((bias_column, psi))

if predefined_regressors is None:
return psi

Expand Down
13 changes: 10 additions & 3 deletions sysidentpy/basis_function/_polynomial.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ class Polynomial(BaseBasisFunction):
degree : int (max_degree), default=2
The maximum degree of the polynomial features.

include_bias : bool, default=True
Whether to include the bias (constant) term in the output feature matrix.

Notes
-----
Be aware that the number of features in the output array scales
Expand All @@ -38,9 +41,10 @@ class Polynomial(BaseBasisFunction):
def __init__(
self,
degree: int = 2,
include_bias: bool = True,
):
self.degree = degree
# Cache combination indices per (n_features, degree) to avoid rebuilding
self.include_bias = include_bias
self._combination_cache: Dict[Tuple[int, int], np.ndarray] = {}

def _get_combination_indices(self, n_features: int) -> np.ndarray:
Expand Down Expand Up @@ -115,9 +119,12 @@ def fit(
The lagged matrix built in respect with each lag and column.

"""
# Create combinations of all columns based on its index
psi = self._evaluate_terms(data, predefined_regressors)
return psi[max_lag:, :]
psi = psi[max_lag:, :]
if self.include_bias:
bias_column = np.ones((psi.shape[0], 1))
psi = np.hstack((bias_column, psi))
return psi

def transform(
self,
Expand Down
42 changes: 39 additions & 3 deletions sysidentpy/basis_function/tests/test_basis_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ def test_fit_polynomial():
basis_function = Polynomial(degree=2)
data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
max_lag = 1
output = np.array([[4, 6, 8, 9, 12, 16], [9, 9, 9, 9, 9, 9]])
output = np.array([[1, 4, 6, 8, 9, 12, 16], [1, 9, 9, 9, 9, 9, 9]])

r = basis_function.fit(data=data, max_lag=max_lag)

Expand All @@ -21,7 +21,7 @@ def test_fit_polynomial_predefined():
data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
max_lag = 1
predefined_regressors = np.array([0, 2, 4])
output = np.array([[4, 8, 12], [9, 9, 9]])
output = np.array([[1, 6, 9], [1, 9, 9]])

r = basis_function.fit(
data=data, max_lag=max_lag, predefined_regressors=predefined_regressors
Expand All @@ -34,13 +34,31 @@ def test_transform_polynomial():
basis_function = Polynomial(degree=2)
data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
max_lag = 1
output = np.array([[4, 6, 8, 9, 12, 16], [9, 9, 9, 9, 9, 9]])
output = np.array([[1, 4, 6, 8, 9, 12, 16], [1, 9, 9, 9, 9, 9, 9]])

r = basis_function.transform(data=data, max_lag=max_lag)

assert_array_equal(output, r)


def test_polynomial_include_bias():
basis_function = Polynomial(degree=2, include_bias=True)
data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
max_lag = 1

r = basis_function.fit(data=data, max_lag=max_lag)
assert np.all(r[:, 0] == 1)


def test_polynomial_no_bias():
basis_function = Polynomial(degree=2, include_bias=False)
data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
max_lag = 1

r = basis_function.fit(data=data, max_lag=max_lag)
assert not np.all(r[:, 0] == 1)


def test_fit_fourier():
basis_function = Fourier(n=5, ensemble=False)
data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
Expand Down Expand Up @@ -173,6 +191,24 @@ def test_transform_fourier():
assert_almost_equal(output, r, decimal=7)


def test_fourier_include_bias():
basis_function = Fourier(n=5, ensemble=False, include_bias=True)
data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
max_lag = 1

r = basis_function.fit(data=data, max_lag=max_lag)
assert np.all(r[:, 0] == 1)


def test_fourier_no_bias():
basis_function = Fourier(n=5, ensemble=False, include_bias=False)
data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
max_lag = 1

r = basis_function.fit(data=data, max_lag=max_lag)
assert not np.all(r[:, 0] == 1)


class _DummyBasis(BaseBasisFunction):
def __init__(self, degree=3):
super().__init__(degree=degree)
Expand Down