diff --git a/formulaic/formula.py b/formulaic/formula.py index 3716d4a..b9e02b8 100644 --- a/formulaic/formula.py +++ b/formulaic/formula.py @@ -322,20 +322,17 @@ def differentiate( use_sympy: bool = False, ) -> _SelfType: """ - EXPERIMENTAL: Take the gradient of this formula. When used a linear - regression, evaluating a trained model on model matrices generated by - this formula is equivalent to estimating the gradient of that fitted - form with respect to `wrt`. + Take the gradient of this formula with respect to the variables in + `wrt`. + + When used a linear regression context, making predictions based on the + model matrices generated the differentiated formula is equivalent to + estimating the gradient of the fitted model with respect to `wrt`. Args: wrt: The variables with respect to which the gradient should be taken. use_sympy: Whether to use sympy to perform symbolic differentiation. - - - Notes: - This method is provisional and may be removed in any future major - version. """ @@ -482,27 +479,25 @@ def differentiate( # pylint: disable=redefined-builtin use_sympy: bool = False, ) -> SimpleFormula: """ - EXPERIMENTAL: Take the gradient of this formula. When used a linear - regression, evaluating a trained model on model matrices generated by - this formula is equivalent to estimating the gradient of that fitted - form with respect to `wrt`. + Take the gradient of this formula with respect to the variables in + `wrt`. + + When used a linear regression context, making predictions based on the + model matrices generated the differentiated formula is equivalent to + estimating the gradient of the fitted model with respect to `wrt`. Args: wrt: The variables with respect to which the gradient should be taken. use_sympy: Whether to use sympy to perform symbolic differentiation. - - - Notes: - This method is provisional and may be removed in any future major - version. """ return SimpleFormula( [ differentiate_term(term, wrt, use_sympy=use_sympy) for term in self.__terms ], - _ordering=self.ordering, + # Preserve term ordering even if differentiation modifies degrees/etc. + _ordering=OrderingMethod.NONE, ) def get_model_matrix( @@ -784,20 +779,17 @@ def differentiate( # pylint: disable=redefined-builtin use_sympy: bool = False, ) -> SimpleFormula: """ - EXPERIMENTAL: Take the gradient of this formula. When used a linear - regression, evaluating a trained model on model matrices generated by - this formula is equivalent to estimating the gradient of that fitted - form with respect to `wrt`. + Take the gradient of this formula with respect to the variables in + `wrt`. + + When used a linear regression context, making predictions based on the + model matrices generated the differentiated formula is equivalent to + estimating the gradient of the fitted model with respect to `wrt`. Args: wrt: The variables with respect to which the gradient should be taken. use_sympy: Whether to use sympy to perform symbolic differentiation. - - - Notes: - This method is provisional and may be removed in any future major - version. """ return cast( SimpleFormula, diff --git a/tests/test_formula.py b/tests/test_formula.py index 7ce6ad9..a1bf1f5 100644 --- a/tests/test_formula.py +++ b/tests/test_formula.py @@ -197,6 +197,9 @@ def test_differentiate(self): assert f.differentiate("a") == ["1", "0", "0"] assert f.differentiate("c") == ["0", "0", "0"] + g = Formula("a:b + b:c + c:d - 1") + assert g.differentiate("b") == ["a", "c", "0"] # order preserved + def test_differentiate_with_sympy(self): pytest.importorskip("sympy") f = Formula("a + b + log(c) - 1") @@ -208,6 +211,9 @@ def test_differentiate_with_sympy(self): "rhs": ["0", "(1/x)"], } + h = Formula("a + {a**2} + b - 1").differentiate("a", use_sympy=True) + assert h == ["1", "(2*a)", "0"] # order preserved + def test_repr(self, formula_expr, formula_exprs): assert repr(formula_expr) == "1 + a + b + c + a:b + a:c + b:c + a:b:c" assert repr(formula_exprs) == ".lhs:\n a\n.rhs:\n 1 + b"