Add details to description
Browse files
app.py
CHANGED
@@ -127,7 +127,19 @@ def train_model(n_train, noise):
|
|
127 |
return fig, out_str
|
128 |
|
129 |
title = "Single estimator versus bagging: bias-variance decomposition ⚖️"
|
130 |
-
description = "This example illustrates and compares the bias-variance decomposition of the
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
131 |
with gr.Blocks() as demo:
|
132 |
gr.Markdown(f"## {title}")
|
133 |
gr.Markdown(description)
|
|
|
127 |
return fig, out_str
|
128 |
|
129 |
title = "Single estimator versus bagging: bias-variance decomposition ⚖️"
|
130 |
+
description = """This example illustrates and compares the bias-variance decomposition of the \
|
131 |
+
expected mean squared error of a single estimator (Decision Tree Regressor) \
|
132 |
+
against a bagging ensemble of Tree Regressors. \
|
133 |
+
|
134 |
+
The dataset used for this demo is a one-dimensional synthetic dataset generated \
|
135 |
+
for a regression problem. In the top two figures, the blue line represents the true \
|
136 |
+
function and the blue dots represent the training data that are obtained by adding some \
|
137 |
+
random noise (user selected). The prediction of the models is represented by the red line. \
|
138 |
+
The average prediction of each estimator is presented in cyan.
|
139 |
+
|
140 |
+
In the two lower figures, we can see the decomposition of the expected mean squared error \
|
141 |
+
(red) into the bias (blue) and variance (green), as well as the noise part of the error (cyan).
|
142 |
+
"""
|
143 |
with gr.Blocks() as demo:
|
144 |
gr.Markdown(f"## {title}")
|
145 |
gr.Markdown(description)
|