forked from openturns/openturns
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
1 changed file
with
266 additions
and
0 deletions.
There are no files selected for viewing
266 changes: 266 additions & 0 deletions
266
...c/examples/meta_modeling/polynomial_chaos_metamodel/plot_chaos_conditional_expectation.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,266 @@ | ||
""" | ||
Conditional expectation of a polynomial chaos expansion | ||
======================================================= | ||
""" | ||
# %% | ||
# | ||
# In this example, we compute the conditional expectation of a polynomial chaos for the :ref:`Ishigami function <use-case-ishigami>`. | ||
|
||
|
||
# %% | ||
import openturns as ot | ||
import openturns.viewer as otv | ||
from openturns.usecases import ishigami_function | ||
import matplotlib.pyplot as plt | ||
|
||
|
||
# %% | ||
def MeanParametricPCE(chaosResult, conditioningIndices): | ||
""" | ||
Return the parametric PCE of Y with other marginals set to the mean. | ||
All marginal inputs, except those in the conditioning indices | ||
are set to the mean of the input random vector. | ||
The resulting function is : | ||
g(xu) = PCE(xu, xnotu = E[Xnotu]) | ||
where xu is the input vector of conditioning indices, | ||
xnotu is the input vector fixed indices and | ||
E[Xnotu] is the expectation of the random vector of the components | ||
not in u. | ||
Parameters | ||
---------- | ||
chaosResult: ot.FunctionalChaosResult(inputDimension) | ||
The polynomial chaos expansion. | ||
conditioningIndices: ot.Indices(activeInputDimension, outputDimension) | ||
The conditioning indices. | ||
The number of indices in conditioningIndices is the active | ||
input dimension, i.e. the dimension of the input of the | ||
parametric function. | ||
Returns | ||
------- | ||
parametricPCEFunction : ot.ParametricFunction(activeInputDimension, outputDimension) | ||
The parametric PCE. | ||
""" | ||
inputDistribution = chaosResult.getDistribution() | ||
inputDimension = inputDistribution.getDimension() | ||
# Compute the list of fixed marginal indices | ||
allIndices = list(range(inputDimension)) | ||
inActiveIndices = [i for i in allIndices if i not in conditioningIndices] | ||
print(f"Inactive indices = {inActiveIndices}") | ||
# Create the parametric function | ||
pceFunction = chaosResult.getMetaModel() | ||
xMean = inputDistribution.getMean() | ||
referencePoint = [xMean[i] for i in inActiveIndices] | ||
parametricPCEFunction = ot.ParametricFunction( | ||
pceFunction, inActiveIndices, referencePoint | ||
) | ||
return parametricPCEFunction | ||
|
||
|
||
# %% | ||
def ComputeSparseLeastSquaresFunctionalChaos( | ||
inputTrain, | ||
outputTrain, | ||
multivariateBasis, | ||
basisSize, | ||
distribution, | ||
sparse=True, | ||
): | ||
if sparse: | ||
selectionAlgorithm = ot.LeastSquaresMetaModelSelectionFactory() | ||
else: | ||
selectionAlgorithm = ot.PenalizedLeastSquaresAlgorithmFactory() | ||
projectionStrategy = ot.LeastSquaresStrategy( | ||
inputTrain, outputTrain, selectionAlgorithm | ||
) | ||
adaptiveStrategy = ot.FixedStrategy(multivariateBasis, basisSize) | ||
chaosAlgorithm = ot.FunctionalChaosAlgorithm( | ||
inputTrain, outputTrain, distribution, adaptiveStrategy, projectionStrategy | ||
) | ||
chaosAlgorithm.run() | ||
chaosResult = chaosAlgorithm.getResult() | ||
return chaosResult | ||
|
||
|
||
# %% | ||
def plotXvsY(sampleX, sampleY): | ||
dimX = sampleX.getDimension() | ||
dimY = sampleY.getDimension() | ||
descriptionX = sampleX.getDescription() | ||
descriptionY = sampleY.getDescription() | ||
grid = ot.GridLayout(dimY, dimX) | ||
for i in range(dimY): | ||
for j in range(dimX): | ||
graph = ot.Graph("", descriptionX[j], descriptionY[i], True, "") | ||
cloud = ot.Cloud(sampleX[:, j], sampleY[:, i]) | ||
graph.add(cloud) | ||
if j == 0: | ||
graph.setYTitle(descriptionY[i]) | ||
else: | ||
graph.setYTitle("") | ||
if i == dimY - 1: | ||
graph.setXTitle(descriptionX[j]) | ||
else: | ||
graph.setXTitle("") | ||
grid.setGraph(i, j, graph) | ||
return grid | ||
|
||
|
||
|
||
# %% | ||
ot.Log.Show(ot.Log.NONE) | ||
ot.RandomGenerator.SetSeed(0) | ||
im = ishigami_function.IshigamiModel() | ||
input_names = im.distributionX.getDescription() | ||
|
||
sampleSize = 1000 | ||
inputSample = im.distributionX.getSample(sampleSize) | ||
outputSample = im.model(inputSample) | ||
|
||
|
||
# %% | ||
multivariateBasis = ot.OrthogonalProductPolynomialFactory([im.X1, im.X2, im.X3]) | ||
totalDegree = 12 | ||
enumerateFunction = multivariateBasis.getEnumerateFunction() | ||
basisSize = enumerateFunction.getBasisSizeFromTotalDegree(totalDegree) | ||
print("Basis size = ", basisSize) | ||
|
||
# %% | ||
chaosResult = ComputeSparseLeastSquaresFunctionalChaos( | ||
inputSample, | ||
outputSample, | ||
multivariateBasis, | ||
basisSize, | ||
im.distributionX, | ||
) | ||
print("Selected basis size = ", chaosResult.getIndices().getSize()) | ||
chaosResult | ||
|
||
|
||
# %% | ||
grid = plotXvsY(inputSample, outputSample) | ||
grid.setTitle(f"n = {sampleSize}") | ||
view = otv.View(grid, figure_kw={"figsize": (6.0, 2.5)}) | ||
plt.subplots_adjust(wspace=0.4) | ||
|
||
# %% | ||
# Create different parametric functions for the PCE and see the output | ||
pceFunction = chaosResult.getMetaModel() | ||
xMean = im.distributionX.getMean() | ||
|
||
|
||
# %% | ||
inputDimension = im.distributionX.getDimension() | ||
npPoints = 100 | ||
inputRange = im.distributionX.getRange() | ||
inputLowerBound = inputRange.getLowerBound() | ||
inputUpperBound = inputRange.getUpperBound() | ||
# Create the palette with transparency | ||
palette = ot.Drawable().BuildDefaultPalette(2) | ||
firstColor = palette[0] | ||
r, g, b, a = ot.Drawable.ConvertToRGBA(firstColor) | ||
newAlpha = 64 | ||
newColor = ot.Drawable.ConvertFromRGBA(r, g, b, newAlpha) | ||
palette[0] = newColor | ||
grid = plotXvsY(inputSample, outputSample) | ||
reducedBasisSize = chaosResult.getCoefficients().getSize() | ||
grid.setTitle( | ||
f"n = {sampleSize}, total degree = {totalDegree}, " | ||
f"basis = {basisSize}, selected = {reducedBasisSize}" | ||
) | ||
for i in range(inputDimension): | ||
graph = grid.getGraph(0, i) | ||
graph.setLegends(["Data"]) | ||
|
||
xiMin = inputLowerBound[i] | ||
xiMax = inputUpperBound[i] | ||
# Condition all indices except i | ||
conditioningIndices = [i] | ||
parametricPCEFunction = MeanParametricPCE(chaosResult, conditioningIndices) | ||
curve = parametricPCEFunction.draw(xiMin, xiMax, npPoints).getDrawable(0) | ||
curve.setLineWidth(2.0) | ||
curve.setLegend("$PCE(x_i, x_{-i} = \mathbb{E}[X_{-i}])$") | ||
graph.add(curve) | ||
if i < inputDimension - 1: | ||
graph.setLegends([""]) | ||
graph.setColors(palette) | ||
grid.setGraph(0, i, graph) | ||
|
||
grid.setLegendPosition("topright") | ||
view = otv.View( | ||
grid, | ||
figure_kw={"figsize": (7.0, 2.5)}, | ||
legend_kw={"bbox_to_anchor": (1.0, 1.0), "loc": "upper left"}, | ||
) | ||
plt.subplots_adjust(wspace=0.4, right=0.7) | ||
|
||
# %% Create the conditional expectation PCE: E(PCE|X0 = x0) | ||
conditioningIndices = ot.Indices([0]) | ||
conditionalPCE = chaosResult.getConditionalExpectation(conditioningIndices) | ||
conditionalPCE | ||
|
||
# %% Create the conditional expectation PCE: E(PCE|X1 = x1, X2 = x2) | ||
conditioningIndices = ot.Indices([1, 2]) | ||
conditionalPCE = chaosResult.getConditionalExpectation(conditioningIndices) | ||
conditionalPCE | ||
|
||
|
||
# %% | ||
inputDimension = im.distributionX.getDimension() | ||
npPoints = 100 | ||
inputRange = im.distributionX.getRange() | ||
inputLowerBound = inputRange.getLowerBound() | ||
inputUpperBound = inputRange.getUpperBound() | ||
# Create the palette with transparency | ||
palette = ot.Drawable().BuildDefaultPalette(3) | ||
firstColor = palette[0] | ||
r, g, b, a = ot.Drawable.ConvertToRGBA(firstColor) | ||
newAlpha = 64 | ||
newColor = ot.Drawable.ConvertFromRGBA(r, g, b, newAlpha) | ||
palette[0] = newColor | ||
grid = plotXvsY(inputSample, outputSample) | ||
grid.setTitle(f"n = {sampleSize}, total degree = {totalDegree}") | ||
for i in range(inputDimension): | ||
graph = grid.getGraph(0, i) | ||
graph.setLegends(["Data"]) | ||
xiMin = inputLowerBound[i] | ||
xiMax = inputUpperBound[i] | ||
# Set all indices except i to the mean | ||
conditioningIndices = [i] | ||
parametricPCEFunction = MeanParametricPCE(chaosResult, conditioningIndices) | ||
# Draw the parametric function | ||
curve = parametricPCEFunction.draw(xiMin, xiMax, npPoints).getDrawable(0) | ||
curve.setLineWidth(2.0) | ||
curve.setLineStyle("dashed") | ||
curve.setLegend(r"$PCE\left(x_i, x_{-i} = \mathbb{E}[X_{-i}]\right)$") | ||
graph.add(curve) | ||
# Compute conditional expectation given all indices except i | ||
conditionalPCE = chaosResult.getConditionalExpectation(conditioningIndices) | ||
print(f"i = {i}") | ||
print(conditionalPCE) | ||
conditionalPCEFunction = conditionalPCE.getMetaModel() | ||
curve = conditionalPCEFunction.draw(xiMin, xiMax, npPoints).getDrawable(0) | ||
curve.setLineWidth(2.0) | ||
curve.setLegend(r"$\mathbb{E}\left[PCE | X_i = x_i\right]$") | ||
graph.add(curve) | ||
if i < inputDimension - 1: | ||
graph.setLegends([""]) | ||
graph.setColors(palette) | ||
# Set the graph into the grid | ||
grid.setGraph(0, i, graph) | ||
|
||
grid.setLegendPosition("topright") | ||
view = otv.View( | ||
grid, | ||
figure_kw={"figsize": (7.0, 2.5)}, | ||
legend_kw={"bbox_to_anchor": (1.0, 1.0), "loc": "upper left"}, | ||
) | ||
plt.subplots_adjust(wspace=0.4, right=0.7) | ||
|
||
# %% | ||
otv.View.ShowAll() |