diff --git a/book/_config.yml b/book/_config.yml
index 011975c..42c4f03 100644
--- a/book/_config.yml
+++ b/book/_config.yml
@@ -11,6 +11,8 @@ only_build_toc_files: true # Replace this with false i
html:
favicon : "figures/ewatercycle_logo.png" # Replace this with your own favicon
baseurl : "https://www.ewatercycle.org/getting-started" # Replace this with your own URL
+ extra_js:
+ - _static/custom_launch_button.js # Adds the custom launch button to the page, so this is needed plus the custom js file, that's it.
sphinx: # Options passed on to the underlying sphinx-parser
config:
@@ -52,5 +54,6 @@ sphinx: # Options passed on to the
- sphinx_named_colors # Allows you to use custom colors in your book https://teachbooks.io/manual/external/Sphinx-Named-Colors/README.html
- sphinx_dropdown_toggle # Adds a button to toggle all dropdowns with one click
+
bibtex_bibfiles:
- references.bib # Required for references
diff --git a/book/_static/custom_launch_button.js b/book/_static/custom_launch_button.js
new file mode 100644
index 0000000..acea063
--- /dev/null
+++ b/book/_static/custom_launch_button.js
@@ -0,0 +1,69 @@
+// This code is used to make the launch button on top of the webpage
+
+window.addEventListener("DOMContentLoaded", () => {
+ // Create the modal HTML
+ const modalHTML = `
+
+
+
Enter your JupyterHub URL
+
+
+
+
+
+
+
+ `;
+ document.body.insertAdjacentHTML("beforeend", modalHTML);
+
+ // Create the custom button
+ const header = document.querySelector("header");
+ if (header) {
+ const customButton = document.createElement("a");
+ customButton.textContent = "🚀 Launch eWaterCycle JupyterHub";
+ customButton.className = "custom-launch-button";
+ customButton.style.backgroundColor = "#007ACC";
+ customButton.style.color = "white";
+ customButton.style.padding = "6px 12px";
+ customButton.style.marginLeft = "10px";
+ customButton.style.borderRadius = "4px";
+ customButton.style.textDecoration = "none";
+ customButton.style.fontWeight = "bold";
+ customButton.style.cursor = "pointer";
+
+ customButton.addEventListener("click", () => {
+ document.getElementById("customModal").style.display = "block";
+ });
+
+ header.appendChild(customButton);
+ }
+
+ // Modal button logic
+ const launchBtn = document.getElementById("launchBtn");
+ const cancelBtn = document.getElementById("cancelBtn");
+ const input = document.getElementById("jupyterUrlInput");
+ const modal = document.getElementById("customModal");
+
+ launchBtn.addEventListener("click", () => {
+ const baseUrl = input.value.trim();
+
+ if (baseUrl.startsWith("https://")) {
+ const repo = encodeURIComponent("https://github.com/eWaterCycle/getting-started");
+ const branch = "main";
+ // const notebookPath = "getting-started/book/content/first_model_run/first_run.ipynb"; // Change to your desired notebook
+ const notebookPath = "getting-started/book/some_content/first_model_run/first_run.ipynb"; // Change to your desired notebook
+
+ const nbgitpullerUrl = `${baseUrl}/hub/user-redirect/git-pull?repo=${repo}&branch=${branch}&urlpath=lab/tree/${notebookPath}`;
+ window.open(nbgitpullerUrl, "_blank");
+ modal.style.display = "none";
+ input.value = "";
+ } else {
+ alert("Please enter a valid HTTPS URL.");
+ }
+ });
+
+ cancelBtn.addEventListener("click", () => {
+ modal.style.display = "none";
+ input.value = "";
+ });
+});
diff --git a/book/_toc.yml b/book/_toc.yml
index f37193b..471f71f 100644
--- a/book/_toc.yml
+++ b/book/_toc.yml
@@ -4,70 +4,74 @@ root: intro.md
parts:
- caption: Getting Started With eWaterCycle
chapters:
- - file: some_content/why.md
+ - file: content/why.md
sections:
- - file: some_content/why/what.md
- - file: some_content/why/sowhat.md
+ - file: content/why/what.md
+ - file: content/why/sowhat.md
- - file: some_content/first_model_run.md
+ - file: content/first_model_run.md
sections:
- - file: some_content/first_model_run/interface.ipynb
- - file: some_content/first_model_run/first_run.ipynb
+ - file: content/first_model_run/interface.ipynb
+ - file: content/first_model_run/first_run.ipynb
- - file: some_content/generate_forcing.md
+ - file: content/generate_forcing.md
sections:
- - file: some_content/forcing/caravan_forcing.ipynb
- - file: some_content/forcing/era5_forcing_caravan_shapefile.ipynb
- - file: some_content/forcing/era5_forcing_own_shapefile.ipynb
- - file: some_content/forcing/cmip_historic.ipynb
- - file: some_content/forcing/cmip_future.ipynb
- - file: some_content/forcing/manual_forcing.ipynb
+ - file: content/forcing/caravan_forcing.ipynb
+ - file: content/forcing/era5_forcing_caravan_shapefile.ipynb
+ - file: content/forcing/era5_forcing_own_shapefile.ipynb
+ - file: content/forcing/cmip_historic.ipynb
+ - file: content/forcing/cmip_future.ipynb
+ - file: content/forcing/manual_forcing.ipynb
- - file: some_content/different_models.md
+ - file: content/different_models.md
sections:
- - file: some_content/models/hbv.md
+ - file: content/models/hbv.md
sections:
+ - file: content/models/hbv.ipynb
- external: https://github.com/eWaterCycle/projects/blob/main/book/tutorials_examples/1_HBV_Caravan_ERA5/example_model_run_HBV.ipynb
- external: https://github.com/eWaterCycle/ewatercycle-hbv/blob/main/docs/example_model_run_HBV_camels_catchment_ERA5_forcing.ipynb
- - file: some_content/models/pcr.md
+ - file: content/models/pcr.md
sections:
+ - file: content/models/pcrglobwb.md
- external: https://github.com/eWaterCycle/projects/blob/main/book/tutorials_examples/2_HBV_PCRGlobWB_ERA5/example_model_run_pcrglobwb.ipynb
- external: https://github.com/eWaterCycle/ewatercycle-pcrglobwb/blob/main/docs/Irrigation.ipynb
- - file: some_content/models/wflow.md
+ - file: content/models/wflow.md
sections:
- external: https://github.com/eWaterCycle/ewatercycle-wflowjl/blob/main/docs/demo.ipynb
- external: https://github.com/eWaterCycle/ewatercycle-wflow/blob/main/docs/model.ipynb
- - file: some_content/workflows.md
+ - file: content/workflows.md
sections:
- - file: some_content/workflows/running_a_model.md
+ - file: content/workflows/running_a_model.md
sections:
- - file: some_content/workflows/running_a_model/flooding.md
+ - file: content/workflows/running_a_model/flooding.md
sections:
- external: https://github.com/eWaterCycle/projects/blob/main/book/thesis_projects/BSc/2025_Q3_ThirzaVanEsch_CEG/Report/CHAPTER4.ipynb
- - file: some_content/workflows/running_a_model/droughts.md
+ - file: content/workflows/running_a_model/droughts.md
sections:
- external: https://github.com/eWaterCycle/projects/blob/main/book/thesis_projects/BSc/2025_Q3_IschaHollemans_CEG/ischa/CMIP_Future_prediction.ipynb
- - file: some_content/workflows/running_a_model/climate_change.md
+ - file: content/workflows/running_a_model/climate_change.md
# sections:
# - external: rolf stuff
- - file: some_content/workflows/calibrating_models.md
+ - file: content/workflows/calibrating_models.md
sections:
- - file: some_content/workflows/calibrating_models/calibrate_hbv.ipynb
+ - file: content/workflows/calibrating_models/calibrate_hbv.ipynb
- external: https://github.com/eWaterCycle/projects/blob/main/book/thesis_projects/BSc/2025_Q3_ThirzaVanEsch_CEG/Report/CHAPTER3OVER_ori.ipynb
# - external:
- - file: some_content/workflows/comparisons.md
+ - file: content/workflows/comparisons.md
sections:
- - file: some_content/workflows/comparisons/1_forcing_multiple_models.md
+ - file: content/workflows/comparisons/1_forcing_multiple_models.md
# sections:
# - external: https://github.com/eWaterCycle/projects/blob/main/book/tutorials_examples/2_HBV_PCRGlobWB_ERA5/example_model_run_pcrglobwb.ipynb
- - file: some_content/workflows/comparisons/1_model_multiple_forcings.ipynb
+ - file: content/workflows/comparisons/1_model_multiple_forcings.ipynb
- - file: some_content/workflows/model_coupling.md
+ - file: content/workflows/model_coupling.md
- - file: some_content/workflows/DA.md
+ - file: content/workflows/DA.md
+ - file: content/glossary/definitions.md
+ - file: content/glossary/workflow_chart.md
- file: references.md
- file: credits.md
diff --git a/book/some_content/different_models.md b/book/content/different_models.md
similarity index 99%
rename from book/some_content/different_models.md
rename to book/content/different_models.md
index c8e0fe9..1ad467c 100644
--- a/book/some_content/different_models.md
+++ b/book/content/different_models.md
@@ -216,7 +216,7 @@ PCRGlobWB has many different outputs:
* [Parameter set](https://ewatercycle.readthedocs.io/en/latest/system_setup.html#prepare-other-parameter-sets)
* cloneMap
* We have the needed data stored, you just need to give it your region.
-
+ * .ini file with the settings, a standard.ini file will be provided.
A [GitHub page](https://github.com/UU-Hydro/PCR-GLOBWB_input_example) on the inputs for PCRGlobWB 2.0.
## Wflow
diff --git a/book/some_content/first_model_run.md b/book/content/first_model_run.md
similarity index 81%
rename from book/some_content/first_model_run.md
rename to book/content/first_model_run.md
index 1bf76cb..5ecc9f6 100644
--- a/book/some_content/first_model_run.md
+++ b/book/content/first_model_run.md
@@ -11,8 +11,9 @@ Below you will find a short overview of the content of this part.
## Interface
+**First** you need to get familiar with the interface of eWaterCycle.
The eWaterCycle interface is a Jupyter Hub environment, which means you can run Jupyter notebooks in your browser.
-And the heavy computations are done at a super computer, so you do not need a powerful computer to run the models.
+And the heavy computations are done at a cloud/super computer, so you do not need a powerful computer to run the models.
This super computer is hosted by SURF, the Dutch national supercomputer provider.
So the jupyter hub sends the requests to the super computer, which runs the models and sends the results back to your browser.
@@ -21,6 +22,7 @@ You also need basic knowledge of Python to be able to use eWaterCycle.
## Running an HBV Model
+We will start with running a lumped bucket hydrological model: HBV with eWaterCycle.
Here we will pick a catchment and run an HBV model with forcing data.
To show you how eWaterCycle works in practice.
This is a simple example, but it will give you an idea of how to use eWaterCycle.
diff --git a/book/some_content/first_model_run/first_run.ipynb b/book/content/first_model_run/first_run.ipynb
similarity index 100%
rename from book/some_content/first_model_run/first_run.ipynb
rename to book/content/first_model_run/first_run.ipynb
diff --git a/book/some_content/first_model_run/interface.ipynb b/book/content/first_model_run/interface.ipynb
similarity index 100%
rename from book/some_content/first_model_run/interface.ipynb
rename to book/content/first_model_run/interface.ipynb
diff --git a/book/some_content/forcing/available_climate_datasets.json b/book/content/forcing/available_climate_datasets.json
similarity index 100%
rename from book/some_content/forcing/available_climate_datasets.json
rename to book/content/forcing/available_climate_datasets.json
diff --git a/book/some_content/forcing/caravan_forcing.ipynb b/book/content/forcing/caravan_forcing.ipynb
similarity index 100%
rename from book/some_content/forcing/caravan_forcing.ipynb
rename to book/content/forcing/caravan_forcing.ipynb
diff --git a/book/some_content/forcing/cmip_future.ipynb b/book/content/forcing/cmip_future.ipynb
similarity index 96%
rename from book/some_content/forcing/cmip_future.ipynb
rename to book/content/forcing/cmip_future.ipynb
index 0da703b..cd3c123 100644
--- a/book/some_content/forcing/cmip_future.ipynb
+++ b/book/content/forcing/cmip_future.ipynb
@@ -245,179 +245,179 @@
"\n"
],
"text/plain": [
- "Valid CMIP6 datasets \u001b[1m(\u001b[0mas of July \u001b[1;36m2025\u001b[0m\u001b[1m)\u001b[0m:\n",
- "\u001b[1m{\u001b[0m\n",
- " \u001b[32m'EC-Earth3-AerChem'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p4f1'\u001b[0m, \u001b[32m'r3i1p1f1'\u001b[0m, \u001b[32m'r4i1p1f1'\u001b[0m, \u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'TaiESM1'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'AWI-ESM-1-1-LR'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'CanESM5'\u001b[0m: \u001b[1m{\u001b[0m\n",
- " \u001b[32m'r19i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r21i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r12i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r10i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r8i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r7i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r10i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r5i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r7i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r24i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r25i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r11i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r15i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r23i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r17i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r23i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r18i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r4i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r20i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r13i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r22i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r8i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r14i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r5i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r17i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r1i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r11i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r13i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r6i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r12i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r4i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r24i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r2i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r21i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r1i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r16i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r18i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r3i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r9i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r9i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r3i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r22i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r20i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r6i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r16i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r2i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r25i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r14i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r19i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r15i1p1f1'\u001b[0m\n",
- " \u001b[1m}\u001b[0m,\n",
- " \u001b[32m'EC-Earth3-Veg-LR'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'CanESM5-1'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p2f1'\u001b[0m, \u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'ACCESS-CM2'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r5i1p1f1'\u001b[0m, \u001b[32m'r4i1p1f1'\u001b[0m, \u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'CMCC-CM2-SR5'\u001b[0m: \u001b[1m{\u001b[0m\n",
- " \u001b[32m'r11i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r7i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r6i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r10i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r4i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r5i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r2i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r8i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r1i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r3i1p2f1'\u001b[0m,\n",
- " \u001b[32m'r9i1p2f1'\u001b[0m\n",
- " \u001b[1m}\u001b[0m,\n",
- " \u001b[32m'BCC-ESM1'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r3i1p1f1'\u001b[0m, \u001b[32m'r2i1p1f1'\u001b[0m, \u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'FGOALS-g3'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r6i1p1f1'\u001b[0m, \u001b[32m'r3i1p1f1'\u001b[0m, \u001b[32m'r5i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'CMCC-ESM2'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'ACCESS-ESM1-5'\u001b[0m: \u001b[1m{\u001b[0m\n",
- " \u001b[32m'r21i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r12i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r10i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r8i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r7i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r24i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r40i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r34i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r11i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r28i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r17i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r23i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r20i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r4i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r27i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r38i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r13i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r32i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r29i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r14i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r36i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r5i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r39i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r26i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r35i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r2i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r1i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r18i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r33i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r9i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r3i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r22i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r6i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r30i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r16i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r37i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r25i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r31i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r19i1p1f1'\u001b[0m,\n",
- " \u001b[32m'r15i1p1f1'\u001b[0m\n",
- " \u001b[1m}\u001b[0m,\n",
- " \u001b[32m'IITM-ESM'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'CMCC-CM2-HR4'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'CNRM-CM6-1'\u001b[0m: \u001b[1m{\u001b[0m\n",
- " \u001b[32m'r19i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r22i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r24i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r29i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r23i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r26i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r1i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r17i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r4i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r3i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r11i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r15i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r27i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r30i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r10i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r2i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r25i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r12i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r20i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r13i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r18i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r8i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r6i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r7i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r21i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r9i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r14i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r16i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r5i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r28i1p1f2'\u001b[0m\n",
- " \u001b[1m}\u001b[0m,\n",
- " \u001b[32m'CNRM-ESM2-1'\u001b[0m: \u001b[1m{\u001b[0m\n",
- " \u001b[32m'r11i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r15i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r10i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r9i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r13i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r1i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r14i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r2i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r8i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r4i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r5i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r6i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r12i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r7i1p1f2'\u001b[0m,\n",
- " \u001b[32m'r3i1p1f2'\u001b[0m\n",
- " \u001b[1m}\u001b[0m,\n",
- " \u001b[32m'EC-Earth3-Veg'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r5i1p1f1'\u001b[0m, \u001b[32m'r1i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'EC-Earth3'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r12i1p1f1'\u001b[0m, \u001b[32m'r10i1p1f1'\u001b[0m, \u001b[32m'r17i1p1f1'\u001b[0m, \u001b[32m'r16i1p1f1'\u001b[0m, \u001b[32m'r14i1p1f1'\u001b[0m, \u001b[32m'r18i1p1f1'\u001b[0m\u001b[1m}\u001b[0m,\n",
- " \u001b[32m'CNRM-CM6-1-HR'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'r1i1p1f2'\u001b[0m\u001b[1m}\u001b[0m\n",
- "\u001b[1m}\u001b[0m\n"
+ "Valid CMIP6 datasets \u001B[1m(\u001B[0mas of July \u001B[1;36m2025\u001B[0m\u001B[1m)\u001B[0m:\n",
+ "\u001B[1m{\u001B[0m\n",
+ " \u001B[32m'EC-Earth3-AerChem'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p4f1'\u001B[0m, \u001B[32m'r3i1p1f1'\u001B[0m, \u001B[32m'r4i1p1f1'\u001B[0m, \u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'TaiESM1'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'AWI-ESM-1-1-LR'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'CanESM5'\u001B[0m: \u001B[1m{\u001B[0m\n",
+ " \u001B[32m'r19i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r21i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r12i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r10i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r8i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r7i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r10i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r5i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r7i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r24i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r25i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r11i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r15i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r23i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r17i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r23i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r18i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r4i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r20i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r13i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r22i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r8i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r14i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r5i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r17i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r1i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r11i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r13i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r6i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r12i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r4i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r24i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r2i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r21i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r1i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r16i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r18i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r3i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r9i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r9i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r3i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r22i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r20i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r6i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r16i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r2i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r25i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r14i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r19i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r15i1p1f1'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'EC-Earth3-Veg-LR'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'CanESM5-1'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p2f1'\u001B[0m, \u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'ACCESS-CM2'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r5i1p1f1'\u001B[0m, \u001B[32m'r4i1p1f1'\u001B[0m, \u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'CMCC-CM2-SR5'\u001B[0m: \u001B[1m{\u001B[0m\n",
+ " \u001B[32m'r11i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r7i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r6i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r10i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r4i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r5i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r2i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r8i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r1i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r3i1p2f1'\u001B[0m,\n",
+ " \u001B[32m'r9i1p2f1'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'BCC-ESM1'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r3i1p1f1'\u001B[0m, \u001B[32m'r2i1p1f1'\u001B[0m, \u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'FGOALS-g3'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r6i1p1f1'\u001B[0m, \u001B[32m'r3i1p1f1'\u001B[0m, \u001B[32m'r5i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'CMCC-ESM2'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'ACCESS-ESM1-5'\u001B[0m: \u001B[1m{\u001B[0m\n",
+ " \u001B[32m'r21i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r12i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r10i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r8i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r7i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r24i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r40i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r34i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r11i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r28i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r17i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r23i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r20i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r4i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r27i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r38i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r13i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r32i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r29i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r14i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r36i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r5i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r39i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r26i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r35i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r2i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r1i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r18i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r33i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r9i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r3i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r22i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r6i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r30i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r16i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r37i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r25i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r31i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r19i1p1f1'\u001B[0m,\n",
+ " \u001B[32m'r15i1p1f1'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'IITM-ESM'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'CMCC-CM2-HR4'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'CNRM-CM6-1'\u001B[0m: \u001B[1m{\u001B[0m\n",
+ " \u001B[32m'r19i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r22i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r24i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r29i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r23i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r26i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r1i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r17i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r4i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r3i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r11i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r15i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r27i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r30i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r10i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r2i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r25i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r12i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r20i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r13i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r18i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r8i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r6i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r7i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r21i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r9i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r14i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r16i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r5i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r28i1p1f2'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'CNRM-ESM2-1'\u001B[0m: \u001B[1m{\u001B[0m\n",
+ " \u001B[32m'r11i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r15i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r10i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r9i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r13i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r1i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r14i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r2i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r8i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r4i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r5i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r6i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r12i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r7i1p1f2'\u001B[0m,\n",
+ " \u001B[32m'r3i1p1f2'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'EC-Earth3-Veg'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r5i1p1f1'\u001B[0m, \u001B[32m'r1i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'EC-Earth3'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r12i1p1f1'\u001B[0m, \u001B[32m'r10i1p1f1'\u001B[0m, \u001B[32m'r17i1p1f1'\u001B[0m, \u001B[32m'r16i1p1f1'\u001B[0m, \u001B[32m'r14i1p1f1'\u001B[0m, \u001B[32m'r18i1p1f1'\u001B[0m\u001B[1m}\u001B[0m,\n",
+ " \u001B[32m'CNRM-CM6-1-HR'\u001B[0m: \u001B[1m{\u001B[0m\u001B[32m'r1i1p1f2'\u001B[0m\u001B[1m}\u001B[0m\n",
+ "\u001B[1m}\u001B[0m\n"
]
},
"metadata": {},
@@ -453,7 +453,7 @@
"shape_file_name = \"camelsgb_33039\" # river: Bedford Ouse at Roxton, England\n",
"\n",
"# The path to the shapefiles\n",
- "shapefile_path = Path.home() / \"getting-started/book/some_content/forcing/shapefiles\" / f\"{shape_file_name}.shp\" # check this directory yourself!\n",
+ "shapefile_path = Path.home() / \"getting-started/book/content/forcing/shapefiles\" / f\"{shape_file_name}.shp\" # check this directory yourself!\n",
"\n",
"# The time-window of the experiment\n",
"future_experiment_start_date = \"2050-01-01T00:00:00Z\"\n",
@@ -617,19 +617,19 @@
"\n"
],
"text/plain": [
- "\u001b[1;35mLumpedMakkinkForcing\u001b[0m\u001b[1m(\u001b[0m\n",
- " \u001b[33mstart_time\u001b[0m=\u001b[32m'2050-01-01T00:00:00Z'\u001b[0m,\n",
- " \u001b[33mend_time\u001b[0m=\u001b[32m'2074-12-31T00:00:00Z'\u001b[0m,\n",
- " \u001b[33mdirectory\u001b[0m=\u001b[1;35mPosixPath\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP126/work/diagnostic/script'\u001b[0m\u001b[1m)\u001b[0m,\n",
- " \u001b[33mshape\u001b[0m=\u001b[1;35mPosixPath\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP126/work/diagnostic/script/camelsgb_33039.shp'\u001b[0m\u001b[1m)\u001b[0m\n",
+ "\u001B[1;35mLumpedMakkinkForcing\u001B[0m\u001B[1m(\u001B[0m\n",
+ " \u001B[33mstart_time\u001B[0m=\u001B[32m'2050-01-01T00:00:00Z'\u001B[0m,\n",
+ " \u001B[33mend_time\u001B[0m=\u001B[32m'2074-12-31T00:00:00Z'\u001B[0m,\n",
+ " \u001B[33mdirectory\u001B[0m=\u001B[1;35mPosixPath\u001B[0m\u001B[1m(\u001B[0m\u001B[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP126/work/diagnostic/script'\u001B[0m\u001B[1m)\u001B[0m,\n",
+ " \u001B[33mshape\u001B[0m=\u001B[1;35mPosixPath\u001B[0m\u001B[1m(\u001B[0m\u001B[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP126/work/diagnostic/script/camelsgb_33039.shp'\u001B[0m\u001B[1m)\u001B[0m\n",
",\n",
- " \u001b[33mfilenames\u001b[0m=\u001b[1m{\u001b[0m\n",
- " \u001b[32m'pr'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp126_r1i1p1f1_pr_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'tas'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp126_r1i1p1f1_tas_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'rsds'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp126_r1i1p1f1_rsds_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'evspsblpot'\u001b[0m: \u001b[32m'Derived_Makkink_evspsblpot.nc'\u001b[0m\n",
- " \u001b[1m}\u001b[0m\n",
- "\u001b[1m)\u001b[0m\n"
+ " \u001B[33mfilenames\u001B[0m=\u001B[1m{\u001B[0m\n",
+ " \u001B[32m'pr'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp126_r1i1p1f1_pr_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'tas'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp126_r1i1p1f1_tas_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'rsds'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp126_r1i1p1f1_rsds_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'evspsblpot'\u001B[0m: \u001B[32m'Derived_Makkink_evspsblpot.nc'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m\n",
+ "\u001B[1m)\u001B[0m\n"
]
},
"metadata": {},
@@ -654,19 +654,19 @@
"\n"
],
"text/plain": [
- "\u001b[1;35mLumpedMakkinkForcing\u001b[0m\u001b[1m(\u001b[0m\n",
- " \u001b[33mstart_time\u001b[0m=\u001b[32m'2050-01-01T00:00:00Z'\u001b[0m,\n",
- " \u001b[33mend_time\u001b[0m=\u001b[32m'2074-12-31T00:00:00Z'\u001b[0m,\n",
- " \u001b[33mdirectory\u001b[0m=\u001b[1;35mPosixPath\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP245/work/diagnostic/script'\u001b[0m\u001b[1m)\u001b[0m,\n",
- " \u001b[33mshape\u001b[0m=\u001b[1;35mPosixPath\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP245/work/diagnostic/script/camelsgb_33039.shp'\u001b[0m\u001b[1m)\u001b[0m\n",
+ "\u001B[1;35mLumpedMakkinkForcing\u001B[0m\u001B[1m(\u001B[0m\n",
+ " \u001B[33mstart_time\u001B[0m=\u001B[32m'2050-01-01T00:00:00Z'\u001B[0m,\n",
+ " \u001B[33mend_time\u001B[0m=\u001B[32m'2074-12-31T00:00:00Z'\u001B[0m,\n",
+ " \u001B[33mdirectory\u001B[0m=\u001B[1;35mPosixPath\u001B[0m\u001B[1m(\u001B[0m\u001B[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP245/work/diagnostic/script'\u001B[0m\u001B[1m)\u001B[0m,\n",
+ " \u001B[33mshape\u001B[0m=\u001B[1;35mPosixPath\u001B[0m\u001B[1m(\u001B[0m\u001B[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP245/work/diagnostic/script/camelsgb_33039.shp'\u001B[0m\u001B[1m)\u001B[0m\n",
",\n",
- " \u001b[33mfilenames\u001b[0m=\u001b[1m{\u001b[0m\n",
- " \u001b[32m'pr'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp245_r1i1p1f1_pr_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'tas'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp245_r1i1p1f1_tas_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'rsds'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp245_r1i1p1f1_rsds_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'evspsblpot'\u001b[0m: \u001b[32m'Derived_Makkink_evspsblpot.nc'\u001b[0m\n",
- " \u001b[1m}\u001b[0m\n",
- "\u001b[1m)\u001b[0m\n"
+ " \u001B[33mfilenames\u001B[0m=\u001B[1m{\u001B[0m\n",
+ " \u001B[32m'pr'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp245_r1i1p1f1_pr_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'tas'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp245_r1i1p1f1_tas_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'rsds'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp245_r1i1p1f1_rsds_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'evspsblpot'\u001B[0m: \u001B[32m'Derived_Makkink_evspsblpot.nc'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m\n",
+ "\u001B[1m)\u001B[0m\n"
]
},
"metadata": {},
@@ -691,19 +691,19 @@
"\n"
],
"text/plain": [
- "\u001b[1;35mLumpedMakkinkForcing\u001b[0m\u001b[1m(\u001b[0m\n",
- " \u001b[33mstart_time\u001b[0m=\u001b[32m'2050-01-01T00:00:00Z'\u001b[0m,\n",
- " \u001b[33mend_time\u001b[0m=\u001b[32m'2074-12-31T00:00:00Z'\u001b[0m,\n",
- " \u001b[33mdirectory\u001b[0m=\u001b[1;35mPosixPath\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP585/work/diagnostic/script'\u001b[0m\u001b[1m)\u001b[0m,\n",
- " \u001b[33mshape\u001b[0m=\u001b[1;35mPosixPath\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP585/work/diagnostic/script/camelsgb_33039.shp'\u001b[0m\u001b[1m)\u001b[0m\n",
+ "\u001B[1;35mLumpedMakkinkForcing\u001B[0m\u001B[1m(\u001B[0m\n",
+ " \u001B[33mstart_time\u001B[0m=\u001B[32m'2050-01-01T00:00:00Z'\u001B[0m,\n",
+ " \u001B[33mend_time\u001B[0m=\u001B[32m'2074-12-31T00:00:00Z'\u001B[0m,\n",
+ " \u001B[33mdirectory\u001B[0m=\u001B[1;35mPosixPath\u001B[0m\u001B[1m(\u001B[0m\u001B[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP585/work/diagnostic/script'\u001B[0m\u001B[1m)\u001B[0m,\n",
+ " \u001B[33mshape\u001B[0m=\u001B[1;35mPosixPath\u001B[0m\u001B[1m(\u001B[0m\u001B[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/SSP585/work/diagnostic/script/camelsgb_33039.shp'\u001B[0m\u001B[1m)\u001B[0m\n",
",\n",
- " \u001b[33mfilenames\u001b[0m=\u001b[1m{\u001b[0m\n",
- " \u001b[32m'pr'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp585_r1i1p1f1_pr_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'tas'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp585_r1i1p1f1_tas_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'rsds'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_ssp585_r1i1p1f1_rsds_gn_2050-2074.nc'\u001b[0m,\n",
- " \u001b[32m'evspsblpot'\u001b[0m: \u001b[32m'Derived_Makkink_evspsblpot.nc'\u001b[0m\n",
- " \u001b[1m}\u001b[0m\n",
- "\u001b[1m)\u001b[0m\n"
+ " \u001B[33mfilenames\u001B[0m=\u001B[1m{\u001B[0m\n",
+ " \u001B[32m'pr'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp585_r1i1p1f1_pr_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'tas'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp585_r1i1p1f1_tas_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'rsds'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_ssp585_r1i1p1f1_rsds_gn_2050-2074.nc'\u001B[0m,\n",
+ " \u001B[32m'evspsblpot'\u001B[0m: \u001B[32m'Derived_Makkink_evspsblpot.nc'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m\n",
+ "\u001B[1m)\u001B[0m\n"
]
},
"metadata": {},
diff --git a/book/some_content/forcing/cmip_historic.ipynb b/book/content/forcing/cmip_historic.ipynb
similarity index 99%
rename from book/some_content/forcing/cmip_historic.ipynb
rename to book/content/forcing/cmip_historic.ipynb
index 1124218..e14cd8f 100644
--- a/book/some_content/forcing/cmip_historic.ipynb
+++ b/book/content/forcing/cmip_historic.ipynb
@@ -78,7 +78,7 @@
"shape_file_name = \"camelsgb_33039\" # river: Bedford Ouse at Roxton, England\n",
"\n",
"# The path to the shapefiles\n",
- "shapefile_path = Path.home() / \"getting-started/book/some_content/forcing/shapefiles\" / f\"{shape_file_name}.shp\" # check this directory yourself!\n",
+ "shapefile_path = Path.home() / \"getting-started/book/content/forcing/shapefiles\" / f\"{shape_file_name}.shp\" # check this directory yourself!\n",
"\n",
"# The time-window of the experiment\n",
"experiment_start_date = \"2000-08-01T00:00:00Z\"\n",
@@ -166,19 +166,19 @@
"\n"
],
"text/plain": [
- "\u001b[1;35mLumpedMakkinkForcing\u001b[0m\u001b[1m(\u001b[0m\n",
- " \u001b[33mstart_time\u001b[0m=\u001b[32m'2000-08-01T00:00:00Z'\u001b[0m,\n",
- " \u001b[33mend_time\u001b[0m=\u001b[32m'2005-08-31T00:00:00Z'\u001b[0m,\n",
- " \u001b[33mdirectory\u001b[0m=\u001b[1;35mPosixPath\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/historical/work/diagnostic/script'\u001b[0m\u001b[1m)\u001b[0m,\n",
- " \u001b[33mshape\u001b[0m=\u001b[1;35mPosixPath\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/historical/work/diagnostic/script/camelsgb_33039.s\u001b[0m\n",
- "\u001b[32mhp'\u001b[0m\u001b[1m)\u001b[0m,\n",
- " \u001b[33mfilenames\u001b[0m=\u001b[1m{\u001b[0m\n",
- " \u001b[32m'pr'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_historical_r1i1p1f1_pr_gn_2000-2005.nc'\u001b[0m,\n",
- " \u001b[32m'tas'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_historical_r1i1p1f1_tas_gn_2000-2005.nc'\u001b[0m,\n",
- " \u001b[32m'rsds'\u001b[0m: \u001b[32m'CMIP6_MPI-ESM1-2-HR_day_historical_r1i1p1f1_rsds_gn_2000-2005.nc'\u001b[0m,\n",
- " \u001b[32m'evspsblpot'\u001b[0m: \u001b[32m'Derived_Makkink_evspsblpot.nc'\u001b[0m\n",
- " \u001b[1m}\u001b[0m\n",
- "\u001b[1m)\u001b[0m\n"
+ "\u001B[1;35mLumpedMakkinkForcing\u001B[0m\u001B[1m(\u001B[0m\n",
+ " \u001B[33mstart_time\u001B[0m=\u001B[32m'2000-08-01T00:00:00Z'\u001B[0m,\n",
+ " \u001B[33mend_time\u001B[0m=\u001B[32m'2005-08-31T00:00:00Z'\u001B[0m,\n",
+ " \u001B[33mdirectory\u001B[0m=\u001B[1;35mPosixPath\u001B[0m\u001B[1m(\u001B[0m\u001B[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/historical/work/diagnostic/script'\u001B[0m\u001B[1m)\u001B[0m,\n",
+ " \u001B[33mshape\u001B[0m=\u001B[1;35mPosixPath\u001B[0m\u001B[1m(\u001B[0m\u001B[32m'/home/mmelotto/forcing/camelsgb_33039/CMIP6/historical/work/diagnostic/script/camelsgb_33039.s\u001B[0m\n",
+ "\u001B[32mhp'\u001B[0m\u001B[1m)\u001B[0m,\n",
+ " \u001B[33mfilenames\u001B[0m=\u001B[1m{\u001B[0m\n",
+ " \u001B[32m'pr'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_historical_r1i1p1f1_pr_gn_2000-2005.nc'\u001B[0m,\n",
+ " \u001B[32m'tas'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_historical_r1i1p1f1_tas_gn_2000-2005.nc'\u001B[0m,\n",
+ " \u001B[32m'rsds'\u001B[0m: \u001B[32m'CMIP6_MPI-ESM1-2-HR_day_historical_r1i1p1f1_rsds_gn_2000-2005.nc'\u001B[0m,\n",
+ " \u001B[32m'evspsblpot'\u001B[0m: \u001B[32m'Derived_Makkink_evspsblpot.nc'\u001B[0m\n",
+ " \u001B[1m}\u001B[0m\n",
+ "\u001B[1m)\u001B[0m\n"
]
},
"metadata": {},
diff --git a/book/some_content/forcing/era5_forcing_caravan_shapefile.ipynb b/book/content/forcing/era5_forcing_caravan_shapefile.ipynb
similarity index 100%
rename from book/some_content/forcing/era5_forcing_caravan_shapefile.ipynb
rename to book/content/forcing/era5_forcing_caravan_shapefile.ipynb
diff --git a/book/some_content/forcing/era5_forcing_own_shapefile.ipynb b/book/content/forcing/era5_forcing_own_shapefile.ipynb
similarity index 99%
rename from book/some_content/forcing/era5_forcing_own_shapefile.ipynb
rename to book/content/forcing/era5_forcing_own_shapefile.ipynb
index 7c99bb2..fe28b54 100644
--- a/book/some_content/forcing/era5_forcing_own_shapefile.ipynb
+++ b/book/content/forcing/era5_forcing_own_shapefile.ipynb
@@ -93,7 +93,7 @@
"\n",
"# The path save directory of the ERA5 data\n",
"forcing_path_ERA5 = Path.home() / \"forcing\" / shape_file_name / \"ERA5\" / \"own_shapefile\"\n",
- "forcing_path_ERA5.mkdir(exist_ok=True)"
+ "forcing_path_ERA5.mkdir(exist_ok=True, parents=True)"
]
},
{
@@ -112,7 +112,7 @@
"outputs": [],
"source": [
"# The path to the shapefiles\n",
- "shapefile_path = Path.home() / \"getting-started/book/some_content/forcing/shapefiles\" / f\"{shape_file_name}.shp\" # check this directory yourself!"
+ "shapefile_path = Path.home() / \"getting-started/book/content/forcing/shapefiles\" / f\"{shape_file_name}.shp\" # check this directory yourself!"
]
},
{
diff --git a/book/some_content/forcing/manual_forcing.ipynb b/book/content/forcing/manual_forcing.ipynb
similarity index 100%
rename from book/some_content/forcing/manual_forcing.ipynb
rename to book/content/forcing/manual_forcing.ipynb
diff --git a/book/some_content/forcing/shapefiles/camelsgb_33039.cpg b/book/content/forcing/shapefiles/camelsgb_33039.cpg
similarity index 100%
rename from book/some_content/forcing/shapefiles/camelsgb_33039.cpg
rename to book/content/forcing/shapefiles/camelsgb_33039.cpg
diff --git a/book/some_content/forcing/shapefiles/camelsgb_33039.dbf b/book/content/forcing/shapefiles/camelsgb_33039.dbf
similarity index 100%
rename from book/some_content/forcing/shapefiles/camelsgb_33039.dbf
rename to book/content/forcing/shapefiles/camelsgb_33039.dbf
diff --git a/book/some_content/forcing/shapefiles/camelsgb_33039.prj b/book/content/forcing/shapefiles/camelsgb_33039.prj
similarity index 100%
rename from book/some_content/forcing/shapefiles/camelsgb_33039.prj
rename to book/content/forcing/shapefiles/camelsgb_33039.prj
diff --git a/book/some_content/forcing/shapefiles/camelsgb_33039.shp b/book/content/forcing/shapefiles/camelsgb_33039.shp
similarity index 100%
rename from book/some_content/forcing/shapefiles/camelsgb_33039.shp
rename to book/content/forcing/shapefiles/camelsgb_33039.shp
diff --git a/book/some_content/forcing/shapefiles/camelsgb_33039.shx b/book/content/forcing/shapefiles/camelsgb_33039.shx
similarity index 100%
rename from book/some_content/forcing/shapefiles/camelsgb_33039.shx
rename to book/content/forcing/shapefiles/camelsgb_33039.shx
diff --git a/book/content/generate_forcing.md b/book/content/generate_forcing.md
new file mode 100644
index 0000000..b94f30b
--- /dev/null
+++ b/book/content/generate_forcing.md
@@ -0,0 +1,67 @@
+# Generating Forcing Data
+
+There are different ways to get forcing data for your model run that are supported by eWaterCycle.
+They also differ per model, so you will have to check the documentation of the model you want to use.
+
+Every model needs forcing data, there are several possible ways to get this forcing data:
+- Camels Forcing using Caravan
+- ERA5 reanalysis
+ - Shapefile you made yourself
+ - Shapefile from Caravan dataset
+- CMIP6 historical data
+- CMIP6 future data
+- Manual data input
+
+eWaterCycle supports different types of forcings, currently it supports:
+
+
+
+
CaravanForcing
+
DistributedMakkinkForcing
+
DistributedUserForcing
+
GenericDistributedForcing
+
+
+
GenericLumpedForcing
+
HypeForcing
+
LisfloodForcing
+
LumpedMakkinkForcing
+
+
+
LumpedUserForcing
+
MarrmotForcing
+
PCRGlobWBForcing
+
WflowForcing
+
+
+
+
+The type of forcing needed is model dependent!
+
+The forcing object in eWaterCycle has some properties:
+- start time
+- end time
+- directory, which is a path
+- shapefile, also a path pointing to the shapefile, it also needs the accompanying files (so .shp + .cpg, .prj, .dbf, .shx)
+- filenames, a dictionary containing the paths to the netCDF files where the data is stored for that variable
+- dataset, which I will cover in more detail below.
+
+### Technical Details
+
+This is for advanced users that will need to use different datasets.
+The dataset parameter is used for different recipes from ESMValTool and where the data is situated at our data disk.
+So for the ERA5 data we use ``dataset='ERA5'`` which calls this dataset object:
+```python
+{
+ "ERA5": Dataset(
+ dataset="ERA5",
+ project="OBS6",
+ tier=3,
+ type="reanaly",
+ version=1,
+ )
+}
+```
+
+When using your own forcing, different datasets, or you need Eday data like evaporation from ERA5 you might need to check this out, it is listed [here on GitHub](https://github.com/eWaterCycle/ewatercycle/blob/main/src/ewatercycle/esmvaltool/datasets.py).
+You can always as the maintainers for help.
diff --git a/book/content/glossary/definitions.md b/book/content/glossary/definitions.md
new file mode 100644
index 0000000..b1157f8
--- /dev/null
+++ b/book/content/glossary/definitions.md
@@ -0,0 +1,15 @@
+# Definitions eWaterCycle
+
+GRPC4BMI
+: Basic Model Interface is what we use to communicate between the JupyterHub environment and the model containers.
+
+BMI
+: Basic Model Interface — a standardized interface that defines how models expose their variables, time steps, and methods for initialization, updating, and finalization.
+It allows models to be run and controlled in a consistent way, making them easier to integrate into larger workflows like eWaterCycle.
+
+Containers
+: Lightweight, portable units that package software and its dependencies together, ensuring consistent execution across different computing environments.
+In eWaterCycle, containers are used to run models reliably and reproducibly, regardless of the underlying system.
+
+
+
diff --git a/book/content/glossary/workflow_chart.md b/book/content/glossary/workflow_chart.md
new file mode 100644
index 0000000..435ce5c
--- /dev/null
+++ b/book/content/glossary/workflow_chart.md
@@ -0,0 +1,50 @@
+# Workflow Chart
+
+Here is a mermaid chart, that is currently broken in teachbooks.
+But can be found rendered on [GitHub](https://github.com/eWaterCycle/getting-started/blob/main/book/content/glossary/workflow_chart.md).
+
+```mermaid
+graph TD
+ A[Working with eWatercycle] --> B(Beginner)
+ A --> F{Choosing your Workflow \nChoose Model and accompanying Forcing}
+
+ B --> |Understand why we use eWaterCycle|C(Hello World example)
+
+
+ C --> D[(Learn different forcings)]
+ C --> E[Learn about different models]
+
+ D --> F
+ E --> F
+
+ F --> G(Future Studies)
+ G --> N(Climate Change)
+ N --> O
+ G --> L(Flooding)
+ G --> M(Droughts)
+ L --> AA[[No Calibration Needed]]
+ AA --> O[(Get Relevant CMIP Forcing)]
+ L --> BB[[Calibration Needed?]]
+ BB --> H
+ M --> AA
+
+ M --> BB
+ O --> P(Analyse Results)
+
+
+
+ F --> H(Model Calibration)
+
+ H --> O
+
+ F --> I(Comparisons)
+ I --> R(1 Model, Multiple Forcings) --> T
+ I --> S(1 Forcing, Multiple Models) --> T
+
+ F --> J(Model Coupling)
+ J --> T(Work In Progress)
+
+ F --> K(Data Assimilation)
+ K --> T
+
+```
\ No newline at end of file
diff --git a/book/some_content/models/external.md b/book/content/models/external.md
similarity index 77%
rename from book/some_content/models/external.md
rename to book/content/models/external.md
index f7861cb..5fe8ac4 100644
--- a/book/some_content/models/external.md
+++ b/book/content/models/external.md
@@ -1,3 +1,4 @@
# Info On Next Notebooks
-The next notebooks are pulled externally from their respective eWaterCycle GitHub pages!
\ No newline at end of file
+The next notebooks are pulled externally from their respective eWaterCycle GitHub pages!
+
diff --git a/book/content/models/hbv.ipynb b/book/content/models/hbv.ipynb
new file mode 100644
index 0000000..59a56cd
--- /dev/null
+++ b/book/content/models/hbv.ipynb
@@ -0,0 +1,347 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "c174c2d93784aea8",
+ "metadata": {},
+ "source": [
+ "# HBV\n",
+ "\n",
+ "Let us say we want to do research into the future of the Speyside distileries using HBV.\n",
+ "We see that, using the [caravan map](https://www.ewatercycle.org/caravan-map/caravan_catchments_map_Scotland.html), Scotland is part of Caravan, and some of Speyside is in there.\n",
+ "So we will use that region to do our research on, using the HBV model."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "f7f26aef2fa9a76d",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# General python\n",
+ "import warnings\n",
+ "warnings.filterwarnings(\"ignore\", category=UserWarning)\n",
+ "\n",
+ "import numpy as np\n",
+ "from pathlib import Path\n",
+ "import pandas as pd\n",
+ "import matplotlib.pyplot as plt\n",
+ "import xarray as xr\n",
+ "\n",
+ "# Niceties\n",
+ "from rich import print\n",
+ "\n",
+ "# General eWaterCycle\n",
+ "import ewatercycle\n",
+ "import ewatercycle.models\n",
+ "import ewatercycle.forcing"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "f393cf461572531b",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Region selection using Caravan\n",
+ "\"\"\"Avon at Delnashaugh, in Scotland\"\"\"\n",
+ "caravan_id = \"camelsgb_8004\"\n",
+ "\n",
+ "# Time periods for experiment, note they are hydrological years\n",
+ "experiment_start_date = \"2010-08-01T00:00:00Z\"\n",
+ "experiment_end_date = \"2014-08-31T00:00:00Z\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3d28ba10f3c52989",
+ "metadata": {},
+ "source": [
+ "Now we will get the forcing data for our selected region."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "e805116d43678aad",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Location of caravan forcing\n",
+ "forcing_path_caravan = Path.home() / \"forcing\" / caravan_id / \"caravan\"\n",
+ "forcing_path_caravan.mkdir(exist_ok=True, parents=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "8518157762bcd157",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "for file_name in [pcrglobwb_forcing.temperatureNC, pcrglobwb_forcing.precipitationNC]:\n",
+ " dataset = xr.load_dataset(f\"{pcrglobwb_forcing.directory}/{file_name}\")\n",
+ " print(dataset)\n",
+ " print(\"------------------------\")\n",
+ " var = list(dataset.data_vars.keys())[0]\n",
+ " dataset[var].isel(time=-1).plot(cmap=\"coolwarm\", robust=True, size=5)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "59c123571924fd45",
+ "metadata": {},
+ "source": [
+ "## The clonemap\n",
+ "\n",
+ "Be careful here with the forcing resolution.\n",
+ "If the model run throws an error where the array size does not add up to raster, play around with the forcing resolution."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "id": "9367c8a11bf3fe95",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def create_clonemap(lonmin, latmin, lonmax, latmax, forcing_resolution, catchment):\n",
+ " \"\"\"Create new clonemap compatible with forcing data resolution.\"\"\"\n",
+ " dlon = lonmax - lonmin\n",
+ " dlat = latmax - latmin\n",
+ "\n",
+ " msg = (\n",
+ " \"The clonemap extent divided by the forcing resolution must yield\"\n",
+ " \"an integer number of grid cells.\"\n",
+ " )\n",
+ " assert dlon % forcing_resolution == 0, f\"Longitudes not compatible. {msg}\"\n",
+ " assert dlat % forcing_resolution == 0, f\"Latitudes not compatible. {msg}\"\n",
+ "\n",
+ " clonemap_dir = (\n",
+ " \"/data/shared/parameter-sets/pcrglobwb_global/global_05min/cloneMaps\"\n",
+ " )\n",
+ " globalclonemap = clonemap_dir + \"/clone_global_05min.map\"\n",
+ " # outputclonemap = forcing_path_pcrglobwb / f\"{catchment.lower()}_05min.map\" # copy to clonemap dir after ensuring it is correct\n",
+ " outputclonemap = f\"{catchment.lower()}_05min.map\" # copy to clonemap dir after ensuring it is correct\n",
+ "\n",
+ "\n",
+ " subprocess.call(\n",
+ " f\"gdal_translate -of PCRaster {globalclonemap} -projwin \"\n",
+ " f\"{lonmin} {latmax} {lonmax} {latmin} {outputclonemap}\",\n",
+ " shell=True,\n",
+ " )\n",
+ " return outputclonemap"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "id": "628479e317c26bb9",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Input file size is 4320, 2160\n",
+ "0...10...20...30...40...50...60...70...80...90...100 - done.\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ "
speyside_05min.map\n",
+ "
\n"
+ ],
+ "text/plain": [
+ "speyside_05min.map\n"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# Catchment bounding boxes\n",
+ "clonemap_extents = {\n",
+ " \"Speyside\": {\"latitude\": (57, 58), \"longitude\": (-4.5, -2.5)},\n",
+ "}\n",
+ "\n",
+ "forcing_resolution = 0.5\n",
+ "for (catchment, extents) in clonemap_extents.items():\n",
+ " latmin, latmax = extents[\"latitude\"]\n",
+ " lonmin, lonmax = extents[\"longitude\"]\n",
+ " print(\n",
+ " create_clonemap(lonmin, latmin, lonmax, latmax, forcing_resolution, catchment)\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f271a64a33cd5083",
+ "metadata": {},
+ "source": [
+ "## Parameter set"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "91f89863-cc10-427c-9053-225387da6a46",
+ "metadata": {},
+ "source": [
+ "For the parameter set we require a .ini file.\n",
+ "This is a file you have to edit yourself!\n",
+ "Here we put it in the same directory as this notebook to make it easier to edit on the fly.\n",
+ "\n",
+ "This .ini file is very tricky for version 1.0, we overwrite most of the global parameters, but you have to change the **cloneMap** and **landMask**.\n",
+ "Also note that everything is relative to the inputDir, which is: /data/shared/parameter-sets/pcrglobwb_global.\n",
+ "**This also means that you have to ask an administrator to put your cloneMap in the correct folder!!!**"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "2634cd83-2ac7-4559-9184-8cedf7bb5721",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# Get the discharge data\n",
+ "timeseries.plot()\n",
+ "plt.ylabel(\"Discharge $[m^3/s]$\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e127f4cee22f06f6",
+ "metadata": {},
+ "source": [
+ "## Finalize the model"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "id": "f7cab2e9f16f61b3",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "pcrglob.finalize()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "bfa423e8-c028-4415-a4d3-28dc989b7b05",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/book/content/models/speyside_05min.ini b/book/content/models/speyside_05min.ini
new file mode 100644
index 0000000..2d112cd
--- /dev/null
+++ b/book/content/models/speyside_05min.ini
@@ -0,0 +1,470 @@
+[globalOptions]
+
+# Set the pcrglobwb output directory in an absolute path.
+outputDir = /home/mmelotto/forcing/camelsgb_8004/pcrglobwb
+
+# Set the input directory map in an absolute path.
+# - The input forcing and parameter directories and files will be relative to this.
+inputDir = /data/shared/parameter-sets/pcrglobwb_global
+
+
+# Map of clone (must be provided in PCRaster maps)
+# - Spatial resolution and coverage are based on this map:
+cloneMap = global_05min/cloneMaps/speyside_05min.map
+#cloneMap = /home/mmelotto/forcing/camelsgb_8004/pcrglobwb/speyside_05min.map
+#cloneMap = ./speyside_05min.map
+
+# The area/landmask of interest:
+# If None, area/landmask is limited for cells with ldd value.
+landmask = global_05min/cloneMaps/speyside_05min.map
+#landmask = ./speyside_05min.map
+
+# netcdf attributes for output files:
+institution = Department of Physical Geography, Utrecht University
+title = PCR-GLOBWB 2 output (not coupled to MODFLOW)
+description = by Edwin H. Sutanudjaja (contact: e.h.sutanudjaja@uu.nl)
+
+startTime = 2005-01-01
+endTime = 2008-12-31
+# Format: YYYY-MM-DD ; The model runs on daily time step.
+
+
+# spinning up options:
+maxSpinUpsInYears = 0
+minConvForSoilSto = 0.0
+minConvForGwatSto = 0.0
+minConvForChanSto = 0.0
+minConvForTotlSto = 0.0
+
+
+[meteoOptions]
+
+# Set the forcing temperature and precipitation files (relative to inputDir)
+precipitationNC = global_05min/meteo/pcrglobwb_OBS6_ERA5_reanaly_1_day_pr_2002-2016_merrimack.nc
+temperatureNC = global_05min/meteo/pcrglobwb_OBS6_ERA5_reanaly_1_day_tas_2002-2016_merrimack.nc
+
+# Method to calculate referencePotETP (reference potential evaporation+transpiration)
+# options are "Hamon" and "Input" ; If "Input", the netcdf input file must be given:
+referenceETPotMethod = Hamon
+refETPotFileNC = 'test'
+
+# variable names in the forcing files (optional)
+precipitationVariableName = pr
+temperatureVariableName = tas
+referenceEPotVariableName = potentialEvaporation
+
+# conversion constants and factors to correct forcing values (optional) so that the units are in m.day-1 and degree Celcius
+precipitationConstant = 0.0
+precipitationFactor = 1.0
+temperatureConstant = -273.15
+temperatureFactor = 1.0
+referenceEPotConstant = 0.0
+referenceEPotFactor = 1.0
+
+
+[meteoDownscalingOptions]
+# This section is for a 5 arcmin run, for downscaling meteorological forcing at 30 arcmin to 5 arcmin.
+
+downscalePrecipitation = True
+downscaleTemperature = True
+downscaleReferenceETPot = True
+
+# downscaling (based on the digital elevation model):
+# The downscaling will be performed by providing the "cellIds" (meteoDownscaleIds) of lower resolution cells.
+meteoDownscaleIds = global_05min/meteo/downscaling_from_30min/uniqueIds_30min.map
+highResolutionDEM = global_05min/meteo/downscaling_from_30min/gtopo05min.map
+
+# lapse rates:
+temperLapseRateNC = global_05min/meteo/downscaling_from_30min/temperature_slope.nc
+precipLapseRateNC = global_05min/meteo/downscaling_from_30min/precipitation_slope.nc
+
+# downscaling criteria (TODO: remove these):
+temperatCorrelNC = global_05min/meteo/downscaling_from_30min/temperature_correl.nc
+precipitCorrelNC = global_05min/meteo/downscaling_from_30min/precipitation_correl.nc
+
+# windows length (unit: arc-degree) for smoothing/averaging forcing data (not recommended):
+smoothingWindowsLength = 0
+
+
+[landSurfaceOptions]
+debugWaterBalance = True
+
+numberOfUpperSoilLayers = 2
+
+# soil and parameters
+# - they are used for all land cover types, unless they are are defined in certain land cover type options
+# (e.g. different/various soil types for agriculture areas)
+topographyNC = global_05min/landSurface/topography/topography_parameters_5_arcmin_october_2015.nc
+soilPropertiesNC = global_05min/landSurface/soil/soilProperties5ArcMin.nc
+
+
+includeIrrigation = True
+
+# netcdf time series for historical expansion of irrigation areas (unit: hectares).
+# Note: The resolution of this map must be consisten with the resolution of cellArea.
+historicalIrrigationArea = global_05min/waterUse/irrigation/irrigated_areas/irrigationArea05ArcMin.nc
+
+# a pcraster map/value defining irrigation efficiency (dimensionless) - optional
+irrigationEfficiency = global_30min/waterUse/irrigation/irrigation_efficiency/efficiency.map
+
+
+includeDomesticWaterDemand = True
+includeIndustryWaterDemand = True
+includeLivestockWaterDemand = True
+
+# domestic, industrial and livestock water demand data (unit must be in m.day-1)
+domesticWaterDemandFile = global_05min/waterUse/waterDemand/domestic/domestic_water_demand_version_april_2015.nc
+industryWaterDemandFile = global_05min/waterUse/waterDemand/industry/industry_water_demand_version_april_2015.nc
+livestockWaterDemandFile = global_05min/waterUse/waterDemand/livestock/livestock_water_demand_version_april_2015.nc
+
+
+# desalination water supply (maximum/potential/capacity)
+desalinationWater = global_05min/waterUse/desalination/desalination_water_version_april_2015.nc
+
+
+# zone IDs (scale) at which allocations of groundwater and surface water (as well as desalinated water) are performed
+allocationSegmentsForGroundSurfaceWater = global_05min/waterUse/abstraction_zones/abstraction_zones_60min_05min.map
+
+# pcraster maps defining the partitioning of groundwater - surface water source
+#
+# - predefined surface water - groundwater partitioning for irrigation demand (e.g. based on Siebert, Global Map of Irrigation Areas version 5)
+irrigationSurfaceWaterAbstractionFractionData = global_05min/waterUse/source_partitioning/surface_water_fraction_for_irrigation/AEI_SWFRAC.map
+# -- quality map
+irrigationSurfaceWaterAbstractionFractionDataQuality = global_05min/waterUse/source_partitioning/surface_water_fraction_for_irrigation/AEI_QUAL.map
+#
+# - threshold values defining the preference for surface water source for irrigation purpose
+# -- treshold to maximize surface water irrigation use (cells with irrSurfaceWaterAbstractionFraction above this will prioritize irrigation surface water use)
+treshold_to_maximize_irrigation_surface_water = 0.50
+# -- treshold to minimize fossil water withdrawal for irrigation (cells with irrSurfaceWaterAbstractionFraction below this have no fossil withdrawal for irrigation)
+treshold_to_minimize_fossil_groundwater_irrigation = 0.70
+#
+# - predefined surface water - groundwater partitioning for non irrigation demand (e.g. based on McDonald, 2014)
+maximumNonIrrigationSurfaceWaterAbstractionFractionData = global_30min/waterUse/source_partitioning/surface_water_fraction_for_non_irrigation/max_city_sw_fraction.map
+
+
+[forestOptions]
+
+name = forest
+debugWaterBalance = True
+
+# snow module properties
+snowModuleType = Simple
+freezingT = 0.0
+degreeDayFactor = 0.0025
+snowWaterHoldingCap = 0.1
+refreezingCoeff = 0.05
+
+# other paramater values
+minTopWaterLayer = 0.0
+minCropKC = 0.2
+
+cropCoefficientNC = global_05min/landSurface/landCover/naturalTall/cropCoefficientForest.nc
+interceptCapNC = global_05min/landSurface/landCover/naturalTall/interceptCapInputForest.nc
+coverFractionNC = global_05min/landSurface/landCover/naturalTall/coverFractionInputForest.nc
+
+landCoverMapsNC = None
+# If NC file is not provided, we have to provide the following pcraster maps:
+fracVegCover = global_05min/landSurface/landCover/naturalTall/vegf_tall.map
+minSoilDepthFrac = global_30min/landSurface/landCover/naturalTall/minf_tall_permafrost.map
+maxSoilDepthFrac = global_30min/landSurface/landCover/naturalTall/maxf_tall.map
+rootFraction1 = global_05min/landSurface/landCover/naturalTall/rfrac1_tall.map
+rootFraction2 = global_05min/landSurface/landCover/naturalTall/rfrac2_tall.map
+maxRootDepth = 1.0
+# Note: The maxRootDepth is not used for non irrigated land cover type.
+#
+# Parameters for the Arno's scheme:
+arnoBeta = None
+# If arnoBeta is defined, the soil water capacity distribution is based on this.
+# If arnoBeta is NOT defined, maxSoilDepthFrac must be defined such that arnoBeta will be calculated based on maxSoilDepthFrac and minSoilDepthFrac.
+
+# initial conditions:
+interceptStorIni = global_05min/initialConditions/interceptStor_forest_1999-12-31.map
+snowCoverSWEIni = global_05min/initialConditions/snowCoverSWE_forest_1999-12-31.map
+snowFreeWaterIni = global_05min/initialConditions/snowFreeWater_forest_1999-12-31.map
+topWaterLayerIni = global_05min/initialConditions/topWaterLayer_forest_1999-12-31.map
+storUppIni = global_05min/initialConditions/storUpp_forest_1999-12-31.map
+storLowIni = global_05min/initialConditions/storLow_forest_1999-12-31.map
+interflowIni = global_05min/initialConditions/interflow_forest_1999-12-31.map
+
+
+[grasslandOptions]
+
+name = grassland
+debugWaterBalance = True
+
+# snow module properties
+snowModuleType = Simple
+freezingT = 0.0
+degreeDayFactor = 0.0025
+snowWaterHoldingCap = 0.1
+refreezingCoeff = 0.05
+
+# other paramater values
+minTopWaterLayer = 0.0
+minCropKC = 0.2
+
+cropCoefficientNC = global_05min/landSurface/landCover/naturalShort/cropCoefficientGrassland.nc
+interceptCapNC = global_05min/landSurface/landCover/naturalShort/interceptCapInputGrassland.nc
+coverFractionNC = global_05min/landSurface/landCover/naturalShort/coverFractionInputGrassland.nc
+
+landCoverMapsNC = None
+# If NC file is not provided, we have to provide the following values:
+fracVegCover = global_05min/landSurface/landCover/naturalShort/vegf_short.map
+minSoilDepthFrac = global_30min/landSurface/landCover/naturalShort/minf_short_permafrost.map
+maxSoilDepthFrac = global_30min/landSurface/landCover/naturalShort/maxf_short.map
+rootFraction1 = global_05min/landSurface/landCover/naturalShort/rfrac1_short.map
+rootFraction2 = global_05min/landSurface/landCover/naturalShort/rfrac2_short.map
+maxRootDepth = 0.5
+# Note: The maxRootDepth is not used for non irrigated land cover type.
+#
+# Parameters for the Arno's scheme:
+arnoBeta = None
+# If arnoBeta is defined, the soil water capacity distribution is based on this.
+# If arnoBeta is NOT defined, maxSoilDepthFrac must be defined such that arnoBeta will be calculated based on maxSoilDepthFrac and minSoilDepthFrac.
+
+# initial conditions:
+interceptStorIni = global_05min/initialConditions/interceptStor_grassland_1999-12-31.map
+snowCoverSWEIni = global_05min/initialConditions/snowCoverSWE_grassland_1999-12-31.map
+snowFreeWaterIni = global_05min/initialConditions/snowFreeWater_grassland_1999-12-31.map
+topWaterLayerIni = global_05min/initialConditions/topWaterLayer_grassland_1999-12-31.map
+storUppIni = global_05min/initialConditions/storUpp_grassland_1999-12-31.map
+storLowIni = global_05min/initialConditions/storLow_grassland_1999-12-31.map
+interflowIni = global_05min/initialConditions/interflow_grassland_1999-12-31.map
+
+
+[irrPaddyOptions]
+
+name = irrPaddy
+debugWaterBalance = True
+
+# snow module properties
+snowModuleType = Simple
+freezingT = 0.0
+degreeDayFactor = 0.0025
+snowWaterHoldingCap = 0.1
+refreezingCoeff = 0.05
+#
+landCoverMapsNC = None
+# If NC file is not provided, we have to provide the following values:
+fracVegCover = global_05min/landSurface/landCover/irrPaddy/fractionPaddy.map
+minSoilDepthFrac = global_30min/landSurface/landCover/irrPaddy/minf_paddy_permafrost.map
+maxSoilDepthFrac = global_30min/landSurface/landCover/irrPaddy/maxf_paddy.map
+rootFraction1 = global_30min/landSurface/landCover/irrPaddy/rfrac1_paddy.map
+rootFraction2 = global_30min/landSurface/landCover/irrPaddy/rfrac2_paddy.map
+maxRootDepth = 0.5
+#
+# Parameters for the Arno's scheme:
+arnoBeta = None
+# If arnoBeta is defined, the soil water capacity distribution is based on this.
+# If arnoBeta is NOT defined, maxSoilDepthFrac must be defined such that arnoBeta will be calculated based on maxSoilDepthFrac and minSoilDepthFrac.
+#
+# other paramater values
+minTopWaterLayer = 0.05
+minCropKC = 0.2
+cropDeplFactor = 0.2
+minInterceptCap = 0.0002
+
+cropCoefficientNC = global_30min/landSurface/landCover/irrPaddy/Global_CropCoefficientKc-IrrPaddy_30min.nc
+
+# initial conditions:
+interceptStorIni = global_05min/initialConditions/interceptStor_irrPaddy_1999-12-31.map
+snowCoverSWEIni = global_05min/initialConditions/snowCoverSWE_irrPaddy_1999-12-31.map
+snowFreeWaterIni = global_05min/initialConditions/snowFreeWater_irrPaddy_1999-12-31.map
+topWaterLayerIni = global_05min/initialConditions/topWaterLayer_irrPaddy_1999-12-31.map
+storUppIni = global_05min/initialConditions/storUpp_irrPaddy_1999-12-31.map
+storLowIni = global_05min/initialConditions/storLow_irrPaddy_1999-12-31.map
+interflowIni = global_05min/initialConditions/interflow_irrPaddy_1999-12-31.map
+
+
+[irrNonPaddyOptions]
+
+name = irrNonPaddy
+debugWaterBalance = True
+
+# snow module properties
+snowModuleType = Simple
+freezingT = 0.0
+degreeDayFactor = 0.0025
+snowWaterHoldingCap = 0.1
+refreezingCoeff = 0.05
+#
+landCoverMapsNC = None
+# If NC file is not provided, we have to provide the following values:
+fracVegCover = global_05min/landSurface/landCover/irrNonPaddy/fractionNonPaddy.map
+minSoilDepthFrac = global_30min/landSurface/landCover/irrNonPaddy/minf_nonpaddy_permafrost.map
+maxSoilDepthFrac = global_30min/landSurface/landCover/irrNonPaddy/maxf_nonpaddy.map
+rootFraction1 = global_30min/landSurface/landCover/irrNonPaddy/rfrac1_nonpaddy.map
+rootFraction2 = global_30min/landSurface/landCover/irrNonPaddy/rfrac2_nonpaddy.map
+maxRootDepth = 1.0
+#
+# Parameters for the Arno's scheme:
+arnoBeta = None
+# If arnoBeta is defined, the soil water capacity distribution is based on this.
+# If arnoBeta is NOT defined, maxSoilDepthFrac must be defined such that arnoBeta will be calculated based on maxSoilDepthFrac and minSoilDepthFrac.
+#
+# other paramater values
+minTopWaterLayer = 0.0
+minCropKC = 0.2
+cropDeplFactor = 0.5
+minInterceptCap = 0.0002
+
+cropCoefficientNC = global_30min/landSurface/landCover/irrNonPaddy/Global_CropCoefficientKc-IrrNonPaddy_30min.nc
+
+# initial conditions:
+interceptStorIni = global_05min/initialConditions/interceptStor_irrNonPaddy_1999-12-31.map
+snowCoverSWEIni = global_05min/initialConditions/snowCoverSWE_irrNonPaddy_1999-12-31.map
+snowFreeWaterIni = global_05min/initialConditions/snowFreeWater_irrNonPaddy_1999-12-31.map
+topWaterLayerIni = global_05min/initialConditions/topWaterLayer_irrNonPaddy_1999-12-31.map
+storUppIni = global_05min/initialConditions/storUpp_irrNonPaddy_1999-12-31.map
+storLowIni = global_05min/initialConditions/storLow_irrNonPaddy_1999-12-31.map
+interflowIni = global_05min/initialConditions/interflow_irrNonPaddy_1999-12-31.map
+
+
+
+
+[groundwaterOptions]
+
+debugWaterBalance = True
+
+groundwaterPropertiesNC = global_05min/groundwater/properties/groundwaterProperties5ArcMin.nc
+# The file will containspecificYield (m3.m-3), kSatAquifer, recessionCoeff (day-1)
+#
+# - minimum value for groundwater recession coefficient (day-1)
+minRecessionCoeff = 1.0e-4
+
+# some options for constraining groundwater abstraction
+limitFossilGroundWaterAbstraction = True
+estimateOfRenewableGroundwaterCapacity = 0.0
+estimateOfTotalGroundwaterThickness = global_05min/groundwater/aquifer_thickness_estimate/thickness_05min.map
+# minimum and maximum total groundwater thickness
+minimumTotalGroundwaterThickness = 100.
+maximumTotalGroundwaterThickness = None
+
+# annual pumping capacity for each region (unit: billion cubic meter per year), should be given in a netcdf file
+pumpingCapacityNC = global_30min/waterUse/groundwater_pumping_capacity/regional_abstraction_limit.nc
+
+# initial conditions:
+storGroundwaterIni = global_05min/initialConditions/storGroundwater_1999-12-31.map
+storGroundwaterFossilIni = global_05min/initialConditions/storGroundwaterFossil_1999-12-31.map
+#
+# additional initial conditions for pumping behaviors
+avgNonFossilGroundwaterAllocationLongIni = global_05min/initialConditions/avgNonFossilGroundwaterAllocationLong_1999-12-31.map
+avgNonFossilGroundwaterAllocationShortIni = global_05min/initialConditions/avgNonFossilGroundwaterAllocationShort_1999-12-31.map
+avgTotalGroundwaterAbstractionIni = global_05min/initialConditions/avgTotalGroundwaterAbstraction_1999-12-31.map
+avgTotalGroundwaterAllocationLongIni = global_05min/initialConditions/avgTotalGroundwaterAllocationLong_1999-12-31.map
+avgTotalGroundwaterAllocationShortIni = global_05min/initialConditions/avgTotalGroundwaterAllocationShort_1999-12-31.map
+#
+# additional initial conditions (needed only for MODFLOW run)
+relativeGroundwaterHeadIni = global_05min/initialConditions/relativeGroundwaterHead_1999-12-31.map
+baseflowIni = global_05min/initialConditions/baseflow_1999-12-31.map
+
+# zonal IDs (scale) at which zonal allocation of groundwater is performed
+allocationSegmentsForGroundwater = global_05min/waterUse/abstraction_zones/abstraction_zones_30min_05min.map
+
+# assumption for the minimum transmissivity value (unit: m2/day) that can be extracted (via capillary rise and/or groundwater abstraction) - optional
+minTransimissivityForProductiveAquifer = 50.0
+# - if None, abstraction and/or capillary rise can always occur everywhere and unlimited
+
+
+
+[routingOptions]
+
+debugWaterBalance = True
+
+# drainage direction map
+lddMap = global_05min/routing/ldd_and_cell_area/lddsound_05min.map
+
+# cell area (unit: m2)
+cellAreaMap = global_05min/routing/ldd_and_cell_area/cellsize05min.correct.map
+
+# routing method: #accuTravelTime or kinematicWave
+routingMethod = accuTravelTime
+
+# manning coefficient
+manningsN = 0.04
+
+# Option for flood plain simulation
+dynamicFloodPlain = False
+
+# manning coefficient for floodplain
+floodplainManningsN = 0.07
+
+
+# channel gradient
+gradient = global_05min/routing/channel_properties/channel_gradient.map
+
+# constant channel depth
+constantChannelDepth = global_05min/routing/channel_properties/bankfull_depth.map
+
+# constant channel width (optional)
+constantChannelWidth = global_05min/routing/channel_properties/bankfull_width.map
+
+# minimum channel width (optional)
+minimumChannelWidth = global_05min/routing/channel_properties/bankfull_width.map
+
+# channel properties for flooding
+bankfullCapacity = None
+# - If None, it will be estimated from (bankfull) channel depth (m) and width (m)
+
+
+# files for relative elevation (above minimum dem)
+relativeElevationFiles = global_05min/routing/channel_properties/dzRel%04d.map
+relativeElevationLevels = 0.0, 0.01, 0.05, 0.10, 0.20, 0.30, 0.40, 0.50, 0.60, 0.70, 0.80, 0.90, 1.00
+
+
+# composite crop factors for WaterBodies:
+cropCoefficientWaterNC = global_30min/routing/kc_surface_water/cropCoefficientForOpenWater.nc
+minCropWaterKC = 1.00
+
+
+# lake and reservoir parameters
+waterBodyInputNC = global_05min/routing/surface_water_bodies/waterBodies5ArcMin.nc
+onlyNaturalWaterBodies = False
+
+
+# initial conditions:
+waterBodyStorageIni = global_05min/initialConditions/waterBodyStorage_1999-12-31.map
+channelStorageIni = global_05min/initialConditions/channelStorage_1999-12-31.map
+readAvlChannelStorageIni = global_05min/initialConditions/readAvlChannelStorage_1999-12-31.map
+avgDischargeLongIni = global_05min/initialConditions/avgDischargeLong_1999-12-31.map
+avgDischargeShortIni = global_05min/initialConditions/avgDischargeShort_1999-12-31.map
+m2tDischargeLongIni = global_05min/initialConditions/m2tDischargeLong_1999-12-31.map
+avgBaseflowLongIni = global_05min/initialConditions/avgBaseflowLong_1999-12-31.map
+riverbedExchangeIni = global_05min/initialConditions/riverbedExchange_1999-12-31.map
+#
+# initial condition of sub-time step discharge (needed for estimating number of time steps in kinematic wave methods)
+subDischargeIni = global_05min/initialConditions/subDischarge_1999-12-31.map
+#
+avgLakeReservoirInflowShortIni = global_05min/initialConditions/avgLakeReservoirInflowShort_1999-12-31.map
+avgLakeReservoirOutflowLongIni = global_05min/initialConditions/avgLakeReservoirOutflowLong_1999-12-31.map
+#
+# number of days (timesteps) that have been performed for spinning up initial conditions in the routing module (i.e. channelStorageIni, avgDischargeLongIni, avgDischargeShortIni, etc.)
+timestepsToAvgDischargeIni = global_05min/initialConditions/timestepsToAvgDischarge_1999-12-31.map
+# Note that:
+# - maximum number of days (timesteps) to calculate long term average flow values (default: 5 years = 5 * 365 days = 1825)
+# - maximum number of days (timesteps) to calculate short term average values (default: 1 month = 1 * 30 days = 30)
+
+
+
+
+[reportingOptions]
+
+# output files that will be written in the disk in netcdf files:
+# - daily resolution
+#outDailyTotNC = temperature,precipitation,totalEvaporation,gwRecharge,totalRunoff,totalGroundwaterAbstraction,discharge,surfaceWaterStorage,interceptStor,snowFreeWater,snowCoverSWE,topWaterLayer,storUppTotal,storLowTotal,storGroundwater,totalWaterStorageThickness,channelStorage,surfaceWaterAbstraction,nonIrrGrossDemand,nonIrrWaterConsumption,irrPaddyWaterWithdrawal,domesticWaterWithdrawal,industryWaterWithdrawal,precipitation_at_irrigation,evaporation_from_irrigation,surfaceWaterStorage,dynamicFracWat,floodVolume,floodDepth,surfaceWaterLevel
+outDailyTotNC = temperature,precipitation,totalEvaporation,gwRecharge,totalRunoff,totalGroundwaterAbstraction,discharge,surfaceWaterStorage,interceptStor,snowFreeWater,snowCoverSWE,topWaterLayer,storUppTotal,storLowTotal,storGroundwater,totalWaterStorageThickness,channelStorage,surfaceWaterAbstraction,nonIrrGrossDemand,nonIrrWaterConsumption,irrPaddyWaterWithdrawal,domesticWaterWithdrawal,industryWaterWithdrawal,precipitation_at_irrigation,evaporation_from_irrigation,surfaceWaterStorage,dynamicFracWat,surfaceWaterLevel
+# - monthly resolution
+outMonthTotNC = None
+outMonthAvgNC = None
+outMonthEndNC = None
+# - annual resolution
+outAnnuaTotNC = None
+outAnnuaAvgNC = None
+outAnnuaEndNC = None
+# - monthly and annual maxima
+outMonthMaxNC = None
+outAnnuaMaxNC = None
+
+# netcdf format and zlib setup
+formatNetCDF = NETCDF4
+zlib = True
\ No newline at end of file
diff --git a/book/content/models/speyside_05min.map b/book/content/models/speyside_05min.map
new file mode 100644
index 0000000..e044b0c
Binary files /dev/null and b/book/content/models/speyside_05min.map differ
diff --git a/book/content/models/wflow.md b/book/content/models/wflow.md
new file mode 100644
index 0000000..26760bb
--- /dev/null
+++ b/book/content/models/wflow.md
@@ -0,0 +1,4 @@
+# Wflow
+
+Here we will show use cases of Wflow (eWaterCycle v2.4).
+They are Wflowjl and Wflow.
\ No newline at end of file
diff --git a/book/some_content/placeholder.md b/book/content/placeholder.md
similarity index 100%
rename from book/some_content/placeholder.md
rename to book/content/placeholder.md
diff --git a/book/some_content/why.md b/book/content/why.md
similarity index 88%
rename from book/some_content/why.md
rename to book/content/why.md
index 42ea2a9..b57b57d 100644
--- a/book/some_content/why.md
+++ b/book/content/why.md
@@ -8,7 +8,8 @@ There was a clear need for a modern platform that supports transparent, reproduc
eWaterCycle has been developed to take away programming and compatibility issues within hydrological modelling.
It is designed in a FAIR (Findable Accessible Interoperable Reusable) way.
Many models use different input/outputs formats, making interoperability hard.
+Different models use different programming languages.
This is what eWaterCycle changes.
-It utilizes the Basic Model Interface (BMI) to improve interoperability between all the models.
+It utilizes the Basic Model Interface (BMI) to improve interoperability between all the models we support currently.
diff --git a/book/content/why/sowhat.md b/book/content/why/sowhat.md
new file mode 100644
index 0000000..1a55a74
--- /dev/null
+++ b/book/content/why/sowhat.md
@@ -0,0 +1,15 @@
+# So What?
+
+eWaterCycle helps researchers & students to accelerate scientific discovery, improve collaboration, and enhance decision-making.
+By making models and workflows reproducible and shareable, it bridges the gap between individual research and global water challenges.
+It also helps in validation your own models, by easily being able to switch between models.
+
+It supports education (BSc + MSc + PhD Theses & Master courses) with research.
+Enabling students to do hydrological research at any level, being scalable up the academic ladder.
+Meaning that the research done by a BSc can be used and improved by MSc, PhD and professors.
+It also promotes openness, and provides a foundation for more reliable water management, especially in the context of climate change and sustainable development.
+
+Everyone is also encouraged to share their models/data with the eWaterCycle team to make them compatible.
+We would gladly help you with this!
+
+
diff --git a/book/some_content/why/what.md b/book/content/why/what.md
similarity index 94%
rename from book/some_content/why/what.md
rename to book/content/why/what.md
index 9e66257..4c6bd71 100644
--- a/book/some_content/why/what.md
+++ b/book/content/why/what.md
@@ -17,8 +17,10 @@ The coupling of models paired with our seamless integration of data sources, vis
- Access and preprocess input data seamlessly.
- Reproduce results and share workflows across institutions.
- Follow FAIR data principles, enhancing trust and transparency.
+- Share their analysis via jupyter notebooks that can be shown in teachbooks, like this one.
The platform is designed with researchers in mind, enabling both simple experiments and complex coupled simulations.
+With Jupyter notebooks for easy Python analysis.
## More Technical Details
diff --git a/book/content/workflows.md b/book/content/workflows.md
new file mode 100644
index 0000000..ca43272
--- /dev/null
+++ b/book/content/workflows.md
@@ -0,0 +1,54 @@
+# Different Types of Workflows
+
+Here we will link to research applications using eWaterCycle v2.4.
+These are examples of workflows, they might not work with the current version of eWaterCycle.
+
+The workflows are work that has been done using eWaterCycle, and is provided here to help you kickstart your research!
+Here is a quick overview:
+
+## Running a Model
+
+### Flooding
+
+Using CMIP6 future data together with a calibrated HBV model, Thirza van Esch did research into the flooding of the Wien River.
+As part of [her BSc thesis](https://www.ewatercycle.org/projects/main/thesis_projects/BSc/2025_Q3_ThirzaVanEsch_CEG/BSc_ThirzaVanEsch.html).
+
+### Droughts
+
+Using a calibrated HBV model with CMIP 6 future data, Ischa Hollemans looked at droughts of the Loire River.
+As part of [his BSc thesis](https://www.ewatercycle.org/projects/main/thesis_projects/BSc/2025_Q3_IschaHollemans_CEG/BSc_IschaHollemans.html).
+
+### Climate Change
+
+[Being worked on]
+
+## Calibrating Models
+
+### HBV
+
+[Here](https://www.ewatercycle.org/projects/main/thesis_projects/BSc/2025_Q3_ThirzaVanEsch_CEG/Report/CHAPTER3OVER.html#calibration) Thirza van Esch used RMSE and the Nelder-Mead optimization to calibrate her HBV model for floodings.
+
+## Comparisons
+
+### 1 Model, Multiple Forcings
+
+In a climate change analysis we use multiple forcings with the same HBV model.
+The model is calibrated on the ERA5 dataset as input data which is checked against the discharge that is provided by the CAMELS dataset.
+This calibrated HBV model is then used together with CMIP6 climate scenarios, and the results are analyzed.
+See the results [here](https://www.ewatercycle.org/projects/main/thesis_projects/Research/eWaterCycle-ClimateChangeImpact/second_results/hysets_02481510/step_4_analyse_executed.html).
+
+### 1 Forcing, Multiple Models
+
+[Being worked on]
+
+## Model Coupling
+
+[Being worked on]
+
+## Data Assimilation
+
+Dr. Jerom Aerts has used Data Assimilation (DA) to implement an uncertainty-aware model evaluation framework within eWaterCycle to assess hydrological model performance while explicitly accounting for discharge observation uncertainty.
+Using 299 catchments from CAMELS-GB, he demonstrated that many apparent improvements from calibration or model differences fall within observational uncertainty bounds, underscoring the need for uncertainty-aware approaches such as data assimilation.
+This can be found [here](https://hess.copernicus.org/articles/28/5011/2024/) and [here2](https://research-portal.uu.nl/en/publications/on-the-importance-of-discharge-observation-uncertainty-when-inter/).
+With his code here on [GitHub](https://github.com/jeromaerts/CAMELS-GB_Comparison_Uncertainty).
+
diff --git a/book/some_content/workflows/DA.md b/book/content/workflows/DA.md
similarity index 100%
rename from book/some_content/workflows/DA.md
rename to book/content/workflows/DA.md
diff --git a/book/some_content/workflows/calibrating_models.md b/book/content/workflows/calibrating_models.md
similarity index 100%
rename from book/some_content/workflows/calibrating_models.md
rename to book/content/workflows/calibrating_models.md
diff --git a/book/some_content/workflows/calibrating_models/calibrate_hbv.ipynb b/book/content/workflows/calibrating_models/calibrate_hbv.ipynb
similarity index 100%
rename from book/some_content/workflows/calibrating_models/calibrate_hbv.ipynb
rename to book/content/workflows/calibrating_models/calibrate_hbv.ipynb
diff --git a/book/some_content/workflows/comparisons.md b/book/content/workflows/comparisons.md
similarity index 100%
rename from book/some_content/workflows/comparisons.md
rename to book/content/workflows/comparisons.md
diff --git a/book/some_content/workflows/comparisons/1_forcing_multiple_models.md b/book/content/workflows/comparisons/1_forcing_multiple_models.md
similarity index 100%
rename from book/some_content/workflows/comparisons/1_forcing_multiple_models.md
rename to book/content/workflows/comparisons/1_forcing_multiple_models.md
diff --git a/book/some_content/workflows/comparisons/1_model_multiple_forcings.ipynb b/book/content/workflows/comparisons/1_model_multiple_forcings.ipynb
similarity index 100%
rename from book/some_content/workflows/comparisons/1_model_multiple_forcings.ipynb
rename to book/content/workflows/comparisons/1_model_multiple_forcings.ipynb
diff --git a/book/some_content/workflows/model_coupling.md b/book/content/workflows/model_coupling.md
similarity index 100%
rename from book/some_content/workflows/model_coupling.md
rename to book/content/workflows/model_coupling.md
diff --git a/book/some_content/workflows/running_a_model.md b/book/content/workflows/running_a_model.md
similarity index 100%
rename from book/some_content/workflows/running_a_model.md
rename to book/content/workflows/running_a_model.md
diff --git a/book/some_content/workflows/running_a_model/climate_change.ipynb b/book/content/workflows/running_a_model/climate_change.ipynb
similarity index 100%
rename from book/some_content/workflows/running_a_model/climate_change.ipynb
rename to book/content/workflows/running_a_model/climate_change.ipynb
diff --git a/book/some_content/workflows/running_a_model/climate_change.md b/book/content/workflows/running_a_model/climate_change.md
similarity index 100%
rename from book/some_content/workflows/running_a_model/climate_change.md
rename to book/content/workflows/running_a_model/climate_change.md
diff --git a/book/some_content/workflows/running_a_model/discharge.ipynb b/book/content/workflows/running_a_model/discharge.ipynb
similarity index 100%
rename from book/some_content/workflows/running_a_model/discharge.ipynb
rename to book/content/workflows/running_a_model/discharge.ipynb
diff --git a/book/some_content/workflows/running_a_model/droughts.md b/book/content/workflows/running_a_model/droughts.md
similarity index 100%
rename from book/some_content/workflows/running_a_model/droughts.md
rename to book/content/workflows/running_a_model/droughts.md
diff --git a/book/some_content/workflows/running_a_model/flooding.ipynb b/book/content/workflows/running_a_model/flooding.ipynb
similarity index 100%
rename from book/some_content/workflows/running_a_model/flooding.ipynb
rename to book/content/workflows/running_a_model/flooding.ipynb
diff --git a/book/some_content/workflows/running_a_model/flooding.md b/book/content/workflows/running_a_model/flooding.md
similarity index 100%
rename from book/some_content/workflows/running_a_model/flooding.md
rename to book/content/workflows/running_a_model/flooding.md
diff --git a/book/intro.md b/book/intro.md
index c8e922a..085c811 100644
--- a/book/intro.md
+++ b/book/intro.md
@@ -1,9 +1,9 @@
# Getting Started With eWaterCycle
-Welcome to eWaterCycle!
-The [why](https://www.ewatercycle.org/getting-started/main/some_content/why.html), [what](https://www.ewatercycle.org/getting-started/main/some_content/why/what.html) and [so what](https://www.ewatercycle.org/getting-started/main/some_content/why/sowhat.html) explain what the platform is in more detail.
+##### Welcome to eWaterCycle!
+A more detailed introduction can be found in the [why](https://www.ewatercycle.org/getting-started/main/some_content/why.html), [what](https://www.ewatercycle.org/getting-started/main/some_content/why/what.html) and [so what](https://www.ewatercycle.org/getting-started/main/some_content/why/sowhat.html) explaining what the platform is in more detail.
The quick version; eWaterCycle is a platform for hydrological modelling developed by hydrologists and research software engineers.
-This is done to take away interoperability/compatibility issues from the hydrologists, so they can perform their research more easily.
+This is done to take away interoperability/compatibility issues hydrologists face, so they can perform their research more easily.
We also offer an easy way to generate the forcing data for your model, we standardized the workflow for generating forcing and host ERA5 data ourselves.
The workflow of eWaterCycle, usually, goes as follows:
@@ -12,33 +12,43 @@ $
\text{Choose region & design experiment} \rightarrow \text{get forcing} \rightarrow \text{setup model} \rightarrow \text{analyse results}
$
-First in designing your experiment you need to think about the model(s) you want use.
-This defines the 2 essential steps in eWaterCycle: getting the forcing data & running the models.
+First, in designing your experiment you need to think about the model(s) you want use, the region or regions you want to do research in are also important.
+This then leads to the 2 essential steps in eWaterCycle: getting the forcing (input) data for your region & running the model(s).
The input for different models differ, a quick example of using eWaterCycle can be found [here](https://www.ewatercycle.org/getting-started/main/some_content/first_model_run.html), and it is recommended that you start here.
-It explains the default workflow quickly and from there you can make alterations.
+It explains the default workflow quickly and from there you can make alterations to learn to work with the platform.
-Once you understand the basics you can change the region ([using Caravan](https://www.ewatercycle.org/getting-started/main/some_content/forcing/era5_forcing_caravan_shapefile.html), or [your own shapefile](https://www.ewatercycle.org/getting-started/main/some_content/forcing/era5_forcing_own_shapefile.html)) and change the model you use.
+Once you understand the basics you can change the region ([using Caravan](https://www.ewatercycle.org/getting-started/main/some_content/forcing/era5_forcing_caravan_shapefile.html), or [your own shapefile](https://www.ewatercycle.org/getting-started/main/some_content/forcing/era5_forcing_own_shapefile.html) for example) and change the model you use.
Analyzing your results will depend on your workflow, but some examples can be found in the [workflows](https://www.ewatercycle.org/getting-started/main/some_content/workflows.html).
For advanced workflows one can also couple models.
+This GitHub repository will host some basic workflows, and it will link to external, more complicated, workflows.
+These workflows are meant to kickstart your journey with eWaterCycle.
More info on the different models that we support and what they need can be found [here](https://www.ewatercycle.org/getting-started/main/some_content/different_models.html).
Generating the forcing data is done shown [here](https://www.ewatercycle.org/getting-started/main/some_content/generate_forcing.html).
This is the same for every model only the user needs to know what type of forcing and variables are needed for their chosen model(s).
After the forcing is generated the user can use different workflows, explained [here](https://www.ewatercycle.org/getting-started/main/some_content/workflows.html).
-### Youtube video of eWaterCycle 1.0 (currently 2.4)
+### YouTube video of eWaterCycle 1.0 (currently 2.4)
This video showcases the thought train behind eWaterCycle!
-[](https://www.youtube.com/watch?v=eE75dtIJ1lk)
+[](https://www.youtube.com/watch?v=eE75dtIJ1lk)
### How To Get On eWaterCycle
-Follow the rocket in the top right:  and click on JupyterHub.
-(Note: this has to be inside a jupyter notebook page on teachbooks **NOTE** this cannot be used in external pages yet)
-This will take you to  where you need to click Jupyter.
-From here you need to use your login.
+1. Click the *Launch eWaterCycle JupyterHub* button at the top of your screen.
+It will then ask you to provide a link to your server (it defaults to a server for students).
+Enter your username and password, it will then pull the *getting-started* GitHub page to your account and start at the **first run** notebook.
+
+**OR**
+
+2. Follow the rocket in the top right:  and click on JupyterHub.
+ (Note: this has to be inside a jupyter notebook page on teachbooks **NOTE** this cannot be used in external pages yet)
+ This will take you to  where you need to click Jupyter.
+ From here you need to use your login.
## Contents
-[TO DO]
+
+A quick overview, this can be seen on the left bar of this teachbook.
+
- Why eWaterCycle?
- What is eWaterCycle?
- So Why Use eWaterCycle?
diff --git a/book/some_content/generate_forcing.md b/book/some_content/generate_forcing.md
deleted file mode 100644
index ccc3dc6..0000000
--- a/book/some_content/generate_forcing.md
+++ /dev/null
@@ -1,45 +0,0 @@
-# Generating Forcing Data
-
-There are different ways to get forcing data for your model run that are supported by eWaterCycle.
-They also differ per model, so you will have to check the documentation of the model you want to use.
-
-Every model needs forcing data, there are several possible ways to get this forcing data:
-- Camels Forcing using Caravan
-- ERA5 reanalysis
- - Shapefile you made yourself
- - Shapefile from Caravan dataset
-- CMIP6 historical data
-- CMIP6 future data
-- Manual data input
-
-eWaterCycle supports different types of forcings, currently it supports:
-
-
-
-
CaravanForcing
-
DistributedMakkinkForcing
-
DistributedUserForcing
-
GenericDistributedForcing
-
-
-
GenericLumpedForcing
-
HypeForcing
-
LisfloodForcing
-
LumpedMakkinkForcing
-
-
-
LumpedUserForcing
-
MarrmotForcing
-
PCRGlobWBForcing
-
WflowForcing
-
-
-
-The type of forcing needed is model dependent!
-
-The forcing object in eWaterCycle has some properties:
-- start time
-- end time
-- directory, which is a path
-- shapefile, also a path pointing to the shapefile, it also needs the accompanying files (so .shp + .cpg, .prj, .dbf, .shx)
-- filenames, a dictionary containing the paths to the netCDF files where the data is stored for that variable
diff --git a/book/some_content/models/hbv.ipynb b/book/some_content/models/hbv.ipynb
deleted file mode 100644
index fd96fd7..0000000
--- a/book/some_content/models/hbv.ipynb
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "cells": [
- {
- "metadata": {},
- "cell_type": "markdown",
- "source": "# HBV",
- "id": "c174c2d93784aea8"
- },
- {
- "metadata": {},
- "cell_type": "code",
- "outputs": [],
- "execution_count": null,
- "source": "",
- "id": "f7f26aef2fa9a76d"
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 2
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.6"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/book/some_content/models/hbv.md b/book/some_content/models/hbv.md
deleted file mode 100644
index 9e8acf4..0000000
--- a/book/some_content/models/hbv.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# HBV
-
-Here we will show an example of 2 HBV cases.
\ No newline at end of file
diff --git a/book/some_content/models/pcr.md b/book/some_content/models/pcr.md
deleted file mode 100644
index ed12b31..0000000
--- a/book/some_content/models/pcr.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# PCRGlobWB 1.0
-
-Here are 2 cases of using PCRGlobWB
-
-The first one also has extra notebooks in its directory to show you how to make the clonemaps and generate forcing.
\ No newline at end of file
diff --git a/book/some_content/models/pcrglobwb.ipynb b/book/some_content/models/pcrglobwb.ipynb
deleted file mode 100644
index b6d24f2..0000000
--- a/book/some_content/models/pcrglobwb.ipynb
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "cells": [
- {
- "metadata": {},
- "cell_type": "markdown",
- "source": "# PCRGlobWB",
- "id": "cfd6d6bae8f37e43"
- },
- {
- "metadata": {},
- "cell_type": "code",
- "outputs": [],
- "execution_count": null,
- "source": "",
- "id": "9a67e737e06396ba"
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 2
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.6"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/book/some_content/models/wflow.md b/book/some_content/models/wflow.md
deleted file mode 100644
index 718aac7..0000000
--- a/book/some_content/models/wflow.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# Wflow
-
-Here we will show use cases of Wflow.
-They are Wflowjl and Wflow.
\ No newline at end of file
diff --git a/book/some_content/overview.md b/book/some_content/overview.md
deleted file mode 100644
index ec79e50..0000000
--- a/book/some_content/overview.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# Some content
-
-Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
\ No newline at end of file
diff --git a/book/some_content/text_and_code.ipynb b/book/some_content/text_and_code.ipynb
deleted file mode 100644
index 9abffd6..0000000
--- a/book/some_content/text_and_code.ipynb
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Text and code\n",
- "\n",
- "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "hello world\n"
- ]
- }
- ],
- "source": [
- "print('hello world')"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "base",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.9.18"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/book/some_content/why/sowhat.md b/book/some_content/why/sowhat.md
deleted file mode 100644
index 806b7bd..0000000
--- a/book/some_content/why/sowhat.md
+++ /dev/null
@@ -1,12 +0,0 @@
-# So What
-
-eWaterCycle helps hydrologists & students to accelerate scientific discovery, improve collaboration, and enhance decision-making.
-By making models and workflows reproducible and shareable, it bridges the gap between individual research and global water challenges.
-It also helps in validation your own models, by easily being able to switch between models.
-
-It supports education (BSc + MSc + PhD Theses & Master courses), promotes openness, and provides a foundation for more reliable water management, especially in the context of climate change and sustainable development.
-
-Everyone is also encouraged to share their models/data with the eWaterCycle team to make them compatible.
-We would gladly help you with this!
-
-
diff --git a/book/some_content/workflows.md b/book/some_content/workflows.md
deleted file mode 100644
index afa7fa8..0000000
--- a/book/some_content/workflows.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# Different Types of Workflows
-
-Here we will link to research applications using eWaterCycle.
-These are examples of workflows, they might not work with the current version of eWaterCycle.
-
-Here is a quick overview:
-
-## Running a Model
-
-### Flooding
-
-Using CMIP6 future data together with a calibrated HBV model, Thirza van Esch did research into the flooding of the Wien River.
-As part of her BSc thesis.
-
-### Droughts
-
-Using a calibrated HBV model with CMIP 6 future data, Ischa Hollemans looked at droughts of the Loire River.
-As part of his BSc thesis.
-
-### Climate Change
-
-[Being worked on]
-
-## Calibrating Models
-
-### HBV
-
-Here Thirza van Esch used RMSE and the Nelder-Mead optimization to calibrate her HBV model for floodings.
-
-## Comparisons
-
-### 1 Model, Multiple Forcings
-
-[Being worked on]
-
-### 1 Forcing, Multiple Models
-
-[Being worked on]
-
-## Model Coupling
-
-[Being worked on]
-
-## Data Analysis
-
-[Being worked on]
-
-
-