From 6a323320b1705a87436f6a881f92a48e9a26e363 Mon Sep 17 00:00:00 2001 From: Aaron Spring <aaron.spring@mpimet.mpg.de> Date: Thu, 27 May 2021 08:28:36 +0000 Subject: [PATCH] rename print_RPS_skill to skill_by_year --- notebooks/ML_forecast_template.ipynb | 18 +- notebooks/ML_train_and_predict.ipynb | 60 +---- notebooks/mean_bias_reduction.ipynb | 338 ++++++++++++++++++++++++--- notebooks/scripts.py | 4 +- 4 files changed, 324 insertions(+), 96 deletions(-) diff --git a/notebooks/ML_forecast_template.ipynb b/notebooks/ML_forecast_template.ipynb index 05b455a..732bb37 100644 --- a/notebooks/ML_forecast_template.ipynb +++ b/notebooks/ML_forecast_template.ipynb @@ -77,7 +77,7 @@ "\n", " - [ ] We didnt use 2020 observations in training (explicit overfitting and cheating)\n", " - [ ] We didnt repeatedly verify my model on 2020 observations and incrementally improved my RPSS (implicit overfitting)\n", - " - [ ] We provide RPS scores for the training period with script `print_RPS_per_year`, see in section 6.3 `predict`.\n", + " - [ ] We provide RPSS scores for the training period with script `skill_by_year`, see in section 6.3 `predict`.\n", " - [ ] We tried our best to prevent [data leakage](https://en.wikipedia.org/wiki/Leakage_(machine_learning)?wprov=sfti1).\n", " - [ ] We honor the `train-validate-test` [split principle](https://en.wikipedia.org/wiki/Training,_validation,_and_test_sets). This means that the hindcast data is split into `train` and `validate`, whereas `test` is withheld.\n", " - [ ] We did use `test` explicitly in training or implicitly in incrementally adjusting parameters.\n", @@ -349,7 +349,7 @@ "source": [ "## `predict`\n", "\n", - "Create predictions and print `mean(variable, lead_time, longitude, weighted latitude)` RPS for all years as calculated by `print_RPS_per_year`." + "Create predictions and print `mean(variable, lead_time, longitude, weighted latitude)` RPSS for all years as calculated by `skill_by_year`. For now RPS, todo: change to RPSS." ] }, { @@ -358,7 +358,7 @@ "metadata": {}, "outputs": [], "source": [ - "from scripts import print_RPS_per_year" + "from scripts import skill_by_year" ] }, { @@ -397,7 +397,7 @@ "metadata": {}, "outputs": [], "source": [ - "print_RPS_per_year(preds_is)" + "skill_by_year(preds_is)" ] }, { @@ -422,7 +422,7 @@ "metadata": {}, "outputs": [], "source": [ - "print_RPS_per_year(preds_os)" + "skill_by_year(preds_os)" ] }, { @@ -447,7 +447,7 @@ "metadata": {}, "outputs": [], "source": [ - "print_RPS_per_year(preds_test)" + "skill_by_year(preds_test)" ] }, { @@ -594,9 +594,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python [conda env:s2s-ai]", + "display_name": "Python 3", "language": "python", - "name": "conda-env-s2s-ai-py" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -608,7 +608,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.10" + "version": "3.8.6" }, "toc-autonumbering": true }, diff --git a/notebooks/ML_train_and_predict.ipynb b/notebooks/ML_train_and_predict.ipynb index 37c93b1..fb6833c 100644 --- a/notebooks/ML_train_and_predict.ipynb +++ b/notebooks/ML_train_and_predict.ipynb @@ -83,7 +83,7 @@ "\n", " - [x] We didnt use 2020 observations in training (explicit overfitting and cheating)\n", " - [x] We didnt repeatedly verify my model on 2020 observations and incrementally improved my RPSS (implicit overfitting)\n", - " - [x] We provide RPS scores for the training period with script `print_RPS_per_year`, see in section 6.3 `predict`.\n", + " - [x] We provide RPSS scores for the training period with script `print_RPS_per_year`, see in section 6.3 `predict`.\n", " - [x] We tried our best to prevent [data leakage](https://en.wikipedia.org/wiki/Leakage_(machine_learning)?wprov=sfti1).\n", " - [x] We honor the `train-validate-test` [split principle](https://en.wikipedia.org/wiki/Training,_validation,_and_test_sets). This means that the hindcast data is split into `train` and `validate`, whereas `test` is withheld.\n", " - [x] We did use `test` explicitly in training or implicitly in incrementally adjusting parameters.\n", @@ -758,7 +758,7 @@ "source": [ "## `predict`\n", "\n", - "Create predictions and print `mean(variable, lead_time, longitude, weighted latitude)` RPS for all years as calculated by `print_RPS_per_year`." + "Create predictions and print `mean(variable, lead_time, longitude, weighted latitude)` RPSS for all years as calculated by `skill_by_year`. For now RPS, todo: change to RPSS." ] }, { @@ -918,7 +918,7 @@ "metadata": {}, "outputs": [], "source": [ - "from scripts import print_RPS_per_year" + "from scripts import skill_by_year" ] }, { @@ -967,7 +967,7 @@ "step = 3\n", "for year in np.arange(int(time_train_start), int(time_train_end) -1, step): # loop over years to consume less memory on renku\n", " preds_is = create_predictions(cnn, hind_2000_2019, obs_2000_2019, time=slice(str(year), str(year+step-1))).compute()\n", - " print(print_RPS_per_year(preds_is))" + " print(skill_by_year(preds_is))" ] }, { @@ -977,8 +977,8 @@ "outputs": [], "source": [ "# not on renkulab, simply do\n", - "#preds_is = create_predictions(cnn, hind_2000_2019, obs_2000_2019, time=slice(time_train_start, time_train_end))\n", - "#print_RPS_per_year(preds_is)" + "# preds_is = create_predictions(cnn, hind_2000_2019, obs_2000_2019, time=slice(time_train_start, time_train_end))\n", + "# skill_by_year(preds_is)" ] }, { @@ -1049,7 +1049,7 @@ "source": [ "preds_os = create_predictions(cnn, hind_2000_2019, obs_2000_2019, time=slice(time_valid_start, time_valid_end))\n", "\n", - "print_RPS_per_year(preds_os)" + "skill_by_year(preds_os)" ] }, { @@ -1125,46 +1125,6 @@ "# Submission" ] }, - { - "cell_type": "code", - "execution_count": 40, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Frozen(SortedKeysDict({'category': 3, 'lead_time': 2, 'forecast_time': 53, 'latitude': 121, 'longitude': 240}))" - ] - }, - "execution_count": 40, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "preds_test.sizes # expect: category(3), longitude, latitude, lead_time(2), forecast_time (53)" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'140.92 MiB'" - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "format_bytes(preds_test.nbytes)" - ] - }, { "cell_type": "code", "execution_count": null, @@ -1293,9 +1253,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python [conda env:s2s-ai]", + "display_name": "Python 3", "language": "python", - "name": "conda-env-s2s-ai-py" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -1307,7 +1267,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.10" + "version": "3.8.6" }, "toc-autonumbering": true }, diff --git a/notebooks/mean_bias_reduction.ipynb b/notebooks/mean_bias_reduction.ipynb index 304db87..1c4fd68 100644 --- a/notebooks/mean_bias_reduction.ipynb +++ b/notebooks/mean_bias_reduction.ipynb @@ -84,7 +84,7 @@ "\n", " - [x] We didnt use 2020 observations in training (explicit overfitting and cheating)\n", " - [x] We didnt repeatedly verify my model on 2020 observations and incrementally improved my RPSS (implicit overfitting)\n", - " - [x] We provide RPS scores for the training period with script `print_RPS_per_year`, see in section 6.3 `predict`.\n", + " - [x] We provide RPSS scores for the training period with script `skill_by_year`, see in section 6.3 `predict`.\n", " - [x] We tried our best to prevent [data leakage](https://en.wikipedia.org/wiki/Leakage_(machine_learning)?wprov=sfti1).\n", " - [x] We honor the `train-validate-test` [split principle](https://en.wikipedia.org/wiki/Training,_validation,_and_test_sets). This means that the hindcast data is split into `train` and `validate`, whereas `test` is withheld.\n", " - [x] We did use `test` explicitly in training or implicitly in incrementally adjusting parameters.\n", @@ -269,13 +269,10 @@ ] }, { - "cell_type": "code", - "execution_count": 7, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# mask, needed?\n", - "hind_2000_2019 = hind_2000_2019.where(obs_2000_2019.isel(forecast_time=0, lead_time=0,drop=True).notnull())" + "Here, we just remove the mean bias from the ensemble mean forecast." ] }, { @@ -304,7 +301,7 @@ "source": [ "## `predict`\n", "\n", - "Create predictions and print `mean(variable, lead_time, longitude, weighted latitude)` RPS for all years as calculated by `print_RPS_per_year`." + "Create predictions and print `mean(variable, lead_time, longitude, weighted latitude)` RPSS for all years as calculated by `skill_by_year`. For now RPS, todo: change to RPSS." ] }, { @@ -391,7 +388,7 @@ "metadata": {}, "outputs": [], "source": [ - "from scripts import print_RPS_per_year" + "from scripts import skill_by_year" ] }, { @@ -563,7 +560,7 @@ } ], "source": [ - "print_RPS_per_year(preds_is)" + "skill_by_year(preds_is)" ] }, { @@ -647,7 +644,7 @@ } ], "source": [ - "print_RPS_per_year(preds_test)" + "skill_by_year(preds_test)" ] }, { @@ -657,26 +654,6 @@ "# Submission" ] }, - { - "cell_type": "code", - "execution_count": 41, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'147.76 MB'" - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "format_bytes(preds_test.nbytes)" - ] - }, { "cell_type": "code", "execution_count": null, @@ -847,9 +824,300 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# packages in environment at /opt/conda:\n", + "#\n", + "# Name Version Build Channel\n", + "_libgcc_mutex 0.1 conda_forge conda-forge\n", + "_openmp_mutex 4.5 1_gnu conda-forge\n", + "_tflow_select 2.3.0 mkl defaults\n", + "absl-py 0.12.0 py38h06a4308_0 defaults\n", + "aiobotocore 1.2.2 pyhd3eb1b0_0 defaults\n", + "aiohttp 3.7.4.post0 pypi_0 pypi\n", + "aioitertools 0.7.1 pyhd3eb1b0_0 defaults\n", + "alembic 1.4.3 pyh9f0ad1d_0 conda-forge\n", + "ansiwrap 0.8.4 pypi_0 pypi\n", + "appdirs 1.4.4 pypi_0 pypi\n", + "argcomplete 1.12.2 pypi_0 pypi\n", + "argon2-cffi 20.1.0 py38h497a2fe_2 conda-forge\n", + "argparse 1.4.0 pypi_0 pypi\n", + "asciitree 0.3.3 py_2 defaults\n", + "astunparse 1.6.3 py_0 defaults\n", + "async-timeout 3.0.1 pypi_0 pypi\n", + "async_generator 1.10 py_0 conda-forge\n", + "attrs 20.3.0 pyhd3deb0d_0 conda-forge\n", + "backcall 0.2.0 pyh9f0ad1d_0 conda-forge\n", + "backports 1.0 py_2 conda-forge\n", + "backports.functools_lru_cache 1.6.1 py_0 conda-forge\n", + "binutils_impl_linux-64 2.35.1 h193b22a_1 conda-forge\n", + "binutils_linux-64 2.35 h67ddf6f_30 conda-forge\n", + "black 20.8b1 pypi_0 pypi\n", + "blas 1.0 mkl defaults\n", + "bleach 3.2.1 pyh9f0ad1d_0 conda-forge\n", + "blinker 1.4 py_1 conda-forge\n", + "bokeh 2.3.2 py38h06a4308_0 defaults\n", + "botocore 1.20.78 pyhd3eb1b0_1 defaults\n", + "bottleneck 1.3.2 py38heb32a55_1 defaults\n", + "branca 0.3.1 pypi_0 pypi\n", + "brotlipy 0.7.0 py38h497a2fe_1001 conda-forge\n", + "bzip2 1.0.8 h7f98852_4 conda-forge\n", + "c-ares 1.17.1 h36c2ea0_0 conda-forge\n", + "ca-certificates 2021.4.13 h06a4308_1 defaults\n", + "cachetools 4.2.2 pyhd3eb1b0_0 defaults\n", + "cdsapi 0.5.1 pypi_0 pypi\n", + "certifi 2020.12.5 py38h06a4308_0 defaults\n", + "certipy 0.1.3 py_0 conda-forge\n", + "cffi 1.14.4 py38ha65f79e_1 conda-forge\n", + "cfgrib 0.9.9.0 pyhd8ed1ab_1 conda-forge\n", + "cftime 1.5.0 py38h6323ea4_0 defaults\n", + "chardet 4.0.0 py38h578d9bd_1 conda-forge\n", + "click 7.1.2 pypi_0 pypi\n", + "climetlab 0.7.0 pypi_0 pypi\n", + "climetlab-s2s-ai-challenge 0.6.2 pypi_0 pypi\n", + "cloudpickle 1.6.0 py_0 defaults\n", + "colorama 0.4.4 pypi_0 pypi\n", + "conda 4.9.2 py38h578d9bd_0 conda-forge\n", + "conda-package-handling 1.7.2 py38h8df0ef7_0 conda-forge\n", + "configargparse 1.4.1 pypi_0 pypi\n", + "configurable-http-proxy 1.3.0 0 conda-forge\n", + "coverage 5.5 py38h27cfd23_2 defaults\n", + "cryptography 3.3.1 py38h2b97feb_1 conda-forge\n", + "curl 7.71.1 he644dc0_8 conda-forge\n", + "cycler 0.10.0 py38_0 defaults\n", + "cython 0.29.23 py38h2531618_0 defaults\n", + "cytoolz 0.11.0 py38h7b6447c_0 defaults\n", + "dask 2021.4.0 pyhd3eb1b0_0 defaults\n", + "dask-core 2021.4.0 pyhd3eb1b0_0 defaults\n", + "decorator 4.4.2 py_0 conda-forge\n", + "defusedxml 0.6.0 py_0 conda-forge\n", + "distributed 2021.5.0 py38h06a4308_0 defaults\n", + "distro 1.5.0 pypi_0 pypi\n", + "eccodes 2.18.0 hf05d9b7_0 conda-forge\n", + "ecmwf-api-client 1.6.1 pypi_0 pypi\n", + "ecmwflibs 0.3.7 pypi_0 pypi\n", + "entrypoints 0.3 pyhd8ed1ab_1003 conda-forge\n", + "fasteners 0.16 pyhd3eb1b0_0 defaults\n", + "findlibs 0.0.2 pypi_0 pypi\n", + "folium 0.12.1 pypi_0 pypi\n", + "freetype 2.10.4 h5ab3b9f_0 defaults\n", + "fsspec 0.9.0 pyhd3eb1b0_0 defaults\n", + "gast 0.4.0 py_0 defaults\n", + "gcc_impl_linux-64 9.3.0 h70c0ae5_18 conda-forge\n", + "gcc_linux-64 9.3.0 hf25ea35_30 conda-forge\n", + "gitdb 4.0.7 pypi_0 pypi\n", + "gitpython 3.1.14 pypi_0 pypi\n", + "google-auth 1.30.1 pyhd3eb1b0_0 defaults\n", + "google-auth-oauthlib 0.4.4 pyhd3eb1b0_0 defaults\n", + "google-pasta 0.2.0 py_0 defaults\n", + "grpcio 1.36.1 py38h2157cd5_1 defaults\n", + "gxx_impl_linux-64 9.3.0 hd87eabc_18 conda-forge\n", + "gxx_linux-64 9.3.0 h3fbe746_30 conda-forge\n", + "h5py 2.10.0 py38hd6299e0_1 defaults\n", + "hdf4 4.2.13 h3ca952b_2 defaults\n", + "hdf5 1.10.6 nompi_h3c11f04_101 conda-forge\n", + "heapdict 1.0.1 py_0 defaults\n", + "icu 68.1 h58526e2_0 conda-forge\n", + "idna 2.10 pyh9f0ad1d_0 conda-forge\n", + "importlib-metadata 3.4.0 py38h578d9bd_0 conda-forge\n", + "importlib_metadata 3.4.0 hd8ed1ab_0 conda-forge\n", + "intel-openmp 2021.2.0 h06a4308_610 defaults\n", + "ipykernel 5.4.2 py38h81c977d_0 conda-forge\n", + "ipython 7.19.0 py38h81c977d_2 conda-forge\n", + "ipython_genutils 0.2.0 py_1 conda-forge\n", + "jasper 1.900.1 hd497a04_4 defaults\n", + "jedi 0.17.2 py38h578d9bd_1 conda-forge\n", + "jinja2 2.11.2 pyh9f0ad1d_0 conda-forge\n", + "jmespath 0.10.0 py_0 defaults\n", + "joblib 1.0.1 pyhd3eb1b0_0 defaults\n", + "jpeg 9d h36c2ea0_0 conda-forge\n", + "json5 0.9.5 pyh9f0ad1d_0 conda-forge\n", + "jsonschema 3.2.0 py_2 conda-forge\n", + "jupyter-server-proxy 1.6.0 pypi_0 pypi\n", + "jupyter_client 6.1.11 pyhd8ed1ab_1 conda-forge\n", + "jupyter_core 4.7.0 py38h578d9bd_0 conda-forge\n", + "jupyter_telemetry 0.1.0 pyhd8ed1ab_1 conda-forge\n", + "jupyterhub 1.2.2 pypi_0 pypi\n", + "jupyterlab 2.2.9 py_0 conda-forge\n", + "jupyterlab-git 0.23.3 pypi_0 pypi\n", + "jupyterlab_pygments 0.1.2 pyh9f0ad1d_0 conda-forge\n", + "jupyterlab_server 1.2.0 py_0 conda-forge\n", + "keras-preprocessing 1.1.2 pyhd3eb1b0_0 defaults\n", + "kernel-headers_linux-64 2.6.32 h77966d4_13 conda-forge\n", + "kiwisolver 1.3.1 py38h2531618_0 defaults\n", + "krb5 1.17.2 h926e7f8_0 conda-forge\n", + "lcms2 2.12 h3be6417_0 defaults\n", + "ld_impl_linux-64 2.35.1 hea4e1c9_1 conda-forge\n", + "libaec 1.0.4 he6710b0_1 defaults\n", + "libcurl 7.71.1 hcdd3856_8 conda-forge\n", + "libedit 3.1.20191231 he28a2e2_2 conda-forge\n", + "libev 4.33 h516909a_1 conda-forge\n", + "libffi 3.3 h58526e2_2 conda-forge\n", + "libgcc-devel_linux-64 9.3.0 h7864c58_18 conda-forge\n", + "libgcc-ng 9.3.0 h2828fa1_18 conda-forge\n", + "libgfortran-ng 7.3.0 hdf63c60_0 defaults\n", + "libgomp 9.3.0 h2828fa1_18 conda-forge\n", + "libllvm10 10.0.1 hbcb73fb_5 defaults\n", + "libnetcdf 4.7.4 nompi_h56d31a8_107 conda-forge\n", + "libnghttp2 1.41.0 h8cfc5f6_2 conda-forge\n", + "libpng 1.6.37 hbc83047_0 defaults\n", + "libprotobuf 3.14.0 h8c45485_0 defaults\n", + "libsodium 1.0.18 h36c2ea0_1 conda-forge\n", + "libssh2 1.9.0 hab1572f_5 conda-forge\n", + "libstdcxx-devel_linux-64 9.3.0 hb016644_18 conda-forge\n", + "libstdcxx-ng 9.3.0 h6de172a_18 conda-forge\n", + "libtiff 4.1.0 h2733197_1 defaults\n", + "libuv 1.40.0 h7f98852_0 conda-forge\n", + "llvmlite 0.36.0 py38h612dafd_4 defaults\n", + "locket 0.2.1 py38h06a4308_1 defaults\n", + "lz4-c 1.9.3 h2531618_0 defaults\n", + "magics 1.5.6 pypi_0 pypi\n", + "mako 1.1.4 pyh44b312d_0 conda-forge\n", + "markdown 3.3.4 py38h06a4308_0 defaults\n", + "markupsafe 1.1.1 py38h497a2fe_3 conda-forge\n", + "matplotlib-base 3.3.4 py38h62a2d02_0 defaults\n", + "mistune 0.8.4 py38h497a2fe_1003 conda-forge\n", + "mkl 2021.2.0 h06a4308_296 defaults\n", + "mkl-service 2.3.0 py38h27cfd23_1 defaults\n", + "mkl_fft 1.3.0 py38h42c9631_2 defaults\n", + "mkl_random 1.2.1 py38ha9443f7_2 defaults\n", + "monotonic 1.5 py_0 defaults\n", + "msgpack-python 1.0.2 py38hff7bd54_1 defaults\n", + "multidict 5.1.0 py38h27cfd23_2 defaults\n", + "mypy-extensions 0.4.3 pypi_0 pypi\n", + "nbclient 0.5.0 pypi_0 pypi\n", + "nbconvert 6.0.7 py38h578d9bd_3 conda-forge\n", + "nbdime 2.1.0 pypi_0 pypi\n", + "nbformat 5.1.2 pyhd8ed1ab_1 conda-forge\n", + "nbresuse 0.4.0 pypi_0 pypi\n", + "ncurses 6.2 h58526e2_4 conda-forge\n", + "nest-asyncio 1.4.3 pyhd8ed1ab_0 conda-forge\n", + "netcdf4 1.5.6 pypi_0 pypi\n", + "nodejs 15.3.0 h25f6087_0 conda-forge\n", + "notebook 6.2.0 py38h578d9bd_0 conda-forge\n", + "numba 0.53.1 py38ha9443f7_0 defaults\n", + "numcodecs 0.7.3 py38h2531618_0 defaults\n", + "numpy 1.20.2 py38h2d18471_0 defaults\n", + "numpy-base 1.20.2 py38hfae3a4d_0 defaults\n", + "oauthlib 3.0.1 py_0 conda-forge\n", + "olefile 0.46 py_0 defaults\n", + "openssl 1.1.1k h27cfd23_0 defaults\n", + "opt_einsum 3.3.0 pyhd3eb1b0_1 defaults\n", + "packaging 20.8 pyhd3deb0d_0 conda-forge\n", + "pamela 1.0.0 py_0 conda-forge\n", + "pandas 1.2.4 py38h2531618_0 defaults\n", + "pandoc 2.11.3.2 h7f98852_0 conda-forge\n", + "pandocfilters 1.4.2 py_1 conda-forge\n", + "papermill 2.3.1 pypi_0 pypi\n", + "parso 0.7.1 pyh9f0ad1d_0 conda-forge\n", + "partd 1.2.0 pyhd3eb1b0_0 defaults\n", + "pathspec 0.8.1 pypi_0 pypi\n", + "pdbufr 0.8.2 pypi_0 pypi\n", + "pexpect 4.8.0 pyh9f0ad1d_2 conda-forge\n", + "pickleshare 0.7.5 py_1003 conda-forge\n", + "pillow 8.2.0 py38he98fc37_0 defaults\n", + "pip 21.0.1 pypi_0 pypi\n", + "pipx 0.16.1.0 pypi_0 pypi\n", + "powerline-shell 0.7.0 pypi_0 pypi\n", + "prometheus_client 0.9.0 pyhd3deb0d_0 conda-forge\n", + "prompt-toolkit 3.0.10 pyha770c72_0 conda-forge\n", + "properscoring 0.1 py_0 conda-forge\n", + "protobuf 3.14.0 py38h2531618_1 defaults\n", + "psutil 5.8.0 py38h27cfd23_1 defaults\n", + "ptyprocess 0.7.0 pyhd3deb0d_0 conda-forge\n", + "pyasn1 0.4.8 py_0 defaults\n", + "pyasn1-modules 0.2.8 py_0 defaults\n", + "pycosat 0.6.3 py38h497a2fe_1006 conda-forge\n", + "pycparser 2.20 pyh9f0ad1d_2 conda-forge\n", + "pycurl 7.43.0.6 py38h996a351_1 conda-forge\n", + "pygments 2.7.4 pyhd8ed1ab_0 conda-forge\n", + "pyjwt 2.0.1 pyhd8ed1ab_0 conda-forge\n", + "pyodc 1.0.3 pypi_0 pypi\n", + "pyopenssl 20.0.1 pyhd8ed1ab_0 conda-forge\n", + "pyparsing 2.4.7 pyh9f0ad1d_0 conda-forge\n", + "pyrsistent 0.17.3 py38h497a2fe_2 conda-forge\n", + "pysocks 1.7.1 py38h578d9bd_3 conda-forge\n", + "python 3.8.6 hffdb5ce_4_cpython conda-forge\n", + "python-dateutil 2.8.1 py_0 conda-forge\n", + "python-eccodes 2021.03.0 py38hb5d20a5_0 conda-forge\n", + "python-editor 1.0.4 py_0 conda-forge\n", + "python-flatbuffers 1.12 pyhd3eb1b0_0 defaults\n", + "python-json-logger 2.0.1 pyh9f0ad1d_0 conda-forge\n", + "python_abi 3.8 1_cp38 conda-forge\n", + "pytz 2021.1 pyhd3eb1b0_0 defaults\n", + "pyyaml 5.4.1 pypi_0 pypi\n", + "pyzmq 21.0.1 py38h3d7ac18_0 conda-forge\n", + "readline 8.0 he28a2e2_2 conda-forge\n", + "regex 2021.4.4 pypi_0 pypi\n", + "requests 2.25.1 pyhd3deb0d_0 conda-forge\n", + "requests-oauthlib 1.3.0 py_0 defaults\n", + "rsa 4.7.2 pyhd3eb1b0_1 defaults\n", + "ruamel.yaml 0.16.12 py38h497a2fe_2 conda-forge\n", + "ruamel.yaml.clib 0.2.2 py38h497a2fe_2 conda-forge\n", + "ruamel_yaml 0.15.80 py38h497a2fe_1003 conda-forge\n", + "s3fs 0.6.0 pyhd3eb1b0_0 defaults\n", + "scikit-learn 0.24.2 py38ha9443f7_0 defaults\n", + "scipy 1.6.2 py38had2a1c9_1 defaults\n", + "send2trash 1.5.0 py_0 conda-forge\n", + "setuptools 49.6.0 py38h578d9bd_3 conda-forge\n", + "simpervisor 0.4 pypi_0 pypi\n", + "six 1.15.0 pyh9f0ad1d_0 conda-forge\n", + "sklearn-xarray 0.4.0 pypi_0 pypi\n", + "smmap 4.0.0 pypi_0 pypi\n", + "sortedcontainers 2.3.0 pyhd3eb1b0_0 defaults\n", + "sqlalchemy 1.3.22 py38h497a2fe_1 conda-forge\n", + "sqlite 3.34.0 h74cdb3f_0 conda-forge\n", + "sysroot_linux-64 2.12 h77966d4_13 conda-forge\n", + "tbb 2020.3 hfd86e86_0 defaults\n", + "tblib 1.7.0 py_0 defaults\n", + "tenacity 7.0.0 pypi_0 pypi\n", + "tensorboard 2.4.0 pyhc547734_0 defaults\n", + "tensorboard-plugin-wit 1.6.0 py_0 defaults\n", + "tensorflow 2.4.1 mkl_py38hb2083e0_0 defaults\n", + "tensorflow-base 2.4.1 mkl_py38h43e0292_0 defaults\n", + "tensorflow-estimator 2.4.1 pyheb71bc4_0 defaults\n", + "termcolor 1.1.0 py38h06a4308_1 defaults\n", + "terminado 0.9.2 py38h578d9bd_0 conda-forge\n", + "testpath 0.4.4 py_0 conda-forge\n", + "textwrap3 0.9.2 pypi_0 pypi\n", + "threadpoolctl 2.1.0 pyh5ca1d4c_0 defaults\n", + "tini 0.18.0 h14c3975_1001 conda-forge\n", + "tk 8.6.10 h21135ba_1 conda-forge\n", + "toml 0.10.2 pypi_0 pypi\n", + "toolz 0.11.1 pyhd3eb1b0_0 defaults\n", + "tornado 6.1 py38h497a2fe_1 conda-forge\n", + "tqdm 4.56.0 pyhd8ed1ab_0 conda-forge\n", + "traitlets 5.0.5 py_0 conda-forge\n", + "typed-ast 1.4.2 pypi_0 pypi\n", + "typing-extensions 3.7.4.3 hd3eb1b0_0 defaults\n", + "typing_extensions 3.7.4.3 pyh06a4308_0 defaults\n", + "urllib3 1.26.2 pyhd8ed1ab_0 conda-forge\n", + "userpath 1.4.2 pypi_0 pypi\n", + "wcwidth 0.2.5 pyh9f0ad1d_2 conda-forge\n", + "webencodings 0.5.1 py_1 conda-forge\n", + "werkzeug 1.0.1 pyhd3eb1b0_0 defaults\n", + "wheel 0.36.2 pyhd3deb0d_0 conda-forge\n", + "wrapt 1.12.1 py38h7b6447c_1 defaults\n", + "xarray 0.18.0 pyhd3eb1b0_1 defaults\n", + "xhistogram 0.1.2 pyhd8ed1ab_0 conda-forge\n", + "xskillscore 0.0.20 pyhd8ed1ab_1 conda-forge\n", + "xz 5.2.5 h516909a_1 conda-forge\n", + "yaml 0.2.5 h516909a_0 conda-forge\n", + "yarl 1.6.3 py38h27cfd23_0 defaults\n", + "zarr 2.8.1 pyhd3eb1b0_0 defaults\n", + "zeromq 4.3.3 h58526e2_3 conda-forge\n", + "zict 2.0.0 pyhd3eb1b0_0 defaults\n", + "zipp 3.4.0 py_0 conda-forge\n", + "zlib 1.2.11 h516909a_1010 conda-forge\n", + "zstd 1.4.9 haebb681_0 defaults\n" + ] + } + ], "source": [ "!conda list" ] @@ -864,9 +1132,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python [conda env:s2s-ai]", + "display_name": "Python 3", "language": "python", - "name": "conda-env-s2s-ai-py" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -878,7 +1146,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.10" + "version": "3.8.6" }, "toc-autonumbering": true }, diff --git a/notebooks/scripts.py b/notebooks/scripts.py index 6bf2cdd..619d17d 100644 --- a/notebooks/scripts.py +++ b/notebooks/scripts.py @@ -52,8 +52,8 @@ def make_probabilistic(ds, tercile_edges, member_dim='realization', mask=None): return ds_p -def print_RPS_per_year(preds): - """Returns pd.Dataframe of RPS per year.""" +def skill_by_year(preds): + """Returns pd.Dataframe of RPS per year. Todo: make RPSS.""" # similar verification_RPSS.ipynb # as scorer bot but returns a score for each year import xarray as xr -- GitLab