Hi,

Indeed `GradientBoosting`

are not analytical function and thus could not be casted like that.

You need to rely on a `PythonFunction`

. Here an example for that purpose:

```
import openturns as ot
import numpy as np
class SklearnPyFunction(ot.OpenTURNSPythonFunction):
"""
Define a OpenTURNS Function using Machine learning algorithms from scikit.
Parameters
----------
algo : a scikit algo
Algo for response surface, already trained/validated
in_dim : int
Input dimension
out_dim: int
Output dimension
"""
def __init__(self, algo, in_dim, out_dim):
super(SklearnPyFunction, self).__init__(in_dim, out_dim)
self.algo = algo
def _exec(self, x):
X = np.reshape(x, (1, -1))
return self.algo.predict(X).ravel()
def _exec_sample(self, x):
X = np.array(x)
size = len(X)
return self.algo.predict(X).reshape(size, self.getOutputDimension())
class GradientBoosting(ot.Function):
"""
Define an OpenTURNS Function using sklearn algorithms
Parameters
----------
algo : a scikit algo
Algo for response surface, already trained/validated
in_dim : int
Input dimension
out_dim: int
Output dimension
"""
def __new__(self, algo, in_dim, out_dim):
python_function = SklearnPyFunction(algo, in_dim, out_dim)
return ot.Function(python_function)
```

As an example:

```
import openturns as ot
from sklearn.ensemble import GradientBoostingRegressor
size = 10
model = ot.SymbolicFunction("x", "(1.0 + sign(x)) * cos(x) - (sign(x) - 1) * sin(2*x)")
dataX = ot.Uniform().getSample(size)
dataY = model(dataX)
algo = GradientBoostingRegressor()
algo.fit(dataX, dataY)
f = GradientBoosting(algo, 1, 1)
print(f(dataX))
```

Hope this helps

BR

Sofiane