diff --git a/pixi.lock b/pixi.lock
index 8ccac5f..ff5881d 100644
--- a/pixi.lock
+++ b/pixi.lock
@@ -473,16 +473,16 @@ environments:
- pypi: https://files.pythonhosted.org/packages/69/ce/68d6e31f0a75a5cccc03535e47434c0ca4be37fe950e93117e455cbc362c/antimeridian-0.4.5-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5b/03/c17464bbf682ea87e7e3de2ddc63395e359a78ae9c01f55fc78759ecbd79/anywidget-0.9.21-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/e0/b1/0542e0cab6f49f151a2d7a42400f84f706fc0b64e85dc1f56708b2e9fd37/array_api_compat-1.12.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/1d/05/2709750ddb088eb2fc5053ba214b4f54334d15d4cb28217e2956b5507bac/array_api_extra-0.9.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/df/5d/493b1b5528ab5072feae30821ff3a07b7a0474213d548efb1fdf135f85c1/array_api_compat-1.13.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/21/2b/bfa1cfe370dd4ed51f834f2c6ad93b7f6263b83615ab96ad91094cc98ec6/array_api_extra-0.9.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/fb/1f/2903ef412cb82ba1f2211692f7339fd7c5aeb2764f2a97f0b6a9a18bbf52/arro3_compute-0.6.5-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/31/4a/72dc383d1a0d14f1d453e334e3461e229762edb1bf3f75b3ab977e9386ed/arro3_core-0.6.5-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/1b/df/2a5a1306dc1699b51b02c1c38c55f3564a8c4f84087c23c61e7e7ae37dfa/arro3_io-0.6.5-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/c3/1c/f06ad85180e7dd9855aa5ede901bfc2be858d7bee17d4e978a14c0ecec14/astropy-7.2.0-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- - pypi: https://files.pythonhosted.org/packages/1f/07/50501947849e780cb5580ebcd7af08c14d431640562e18a8ac2b055c90ec/astropy_iers_data-0.2025.12.22.0.40.30-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/57/61/2d06c08f022c9b617b79f6c55d88e596c1795a1d211e6bf584ac4b9e9506/astropy_iers_data-0.2026.1.5.0.43.43-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ee/34/a9914e676971a13d6cc671b1ed172f9804b50a3a80a143ff196e52f4c7ee/azure_core-1.37.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/3d/9e/1c90a122ea6180e8c72eb7294adc92531b0e08eb3d2324c2ba70d37f4802/azure_storage_blob-12.27.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/d8/3a/6ef2047a072e54e1142718d433d50e9514c999a58f51abfff7902f3a72f8/azure_storage_blob-12.28.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/96/9a/663251dfb35aaddcbdbef78802ea5a9d3fad9d5fadde8774eacd9e1bfbb7/boost_histogram-1.6.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/3c/56/f47a80254ed4991cce9a2f6d8ae8aafbc8df1c3270e966b2927289e5a12f/boto3-1.41.5-py3-none-any.whl
@@ -494,12 +494,11 @@ environments:
- pypi: https://files.pythonhosted.org/packages/27/27/6414b1b7e5e151300c54e28ad1cf3e3b34fe66dc3256a989b031166b1ba3/cdshealpix-0.7.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/a3/8f/c42a98f933022c7de00142526c9b6b7429fdcd0fc66c952b4ebbf0ff3b7f/cf_xarray-0.10.10-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ba/08/52f06ff2f04d376f9cd2c211aefcf2b37f1978e43289341f362fc99f6a0e/cftime-1.6.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl
- - pypi: https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/73/86/43fa9f15c5b9fb6e82620428827cd3c284aa933431405d1bcf5231ae3d3e/cligj-0.7.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/fa/25/0be9314cd72fe2ee2ef89ceb1f438bc156428a12177d684040456eee4a56/cupy_xarray-0.1.4-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/8d/05/8efadba80e1296526e69c1dceba8b0f0bc3756e8d69f6ed9b0e647cf3169/cyclopts-4.4.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/20/5b/0eceb9a5990de9025733a0d212ca43649ba9facd58b8552b6bf93c11439d/cyclopts-4.4.4-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/25/3e/e27078370414ef35fafad2c06d182110073daaeb5d3bf734b0b1eeefe452/debugpy-1.8.19-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl
@@ -522,10 +521,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/31/b3/802576f2ea5dcb48501bb162e4c7b7b3ca5654a42b2c968ef98a797a4c79/geographiclib-2.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e5/15/cf2a69ade4b194aa524ac75112d5caac37414b20a3a03e6865dfe0bd1539/geopy-2.4.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/6a/09/e21df6aef1e1ffc0c816f0522ddc3f6dcded766c3261813131c78a704470/gitpython-3.1.46-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ed/d4/90197b416cb61cefd316964fd9e7bd8324bcbafabf40eef14a9f20b81974/google_api_core-2.28.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/96/58/c1e716be1b055b504d80db2c8413f6c6a890a6ae218a65f178b63bc30356/google_api_python_client-2.187.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/c6/97/451d55e05487a5cd6279a01a7e34921858b16f7dc8aa38a2c684743cd2b3/google_auth-2.45.0-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/db/18/79e9008530b79527e0d5f79e7eef08d3b179b7f851cfd3a2f27822fbdfa9/google_auth-2.47.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/99/d5/3c97526c8796d3caf5f4b3bed2b05e8a7102326f00a334e7a438237f3b22/google_auth_httplib2-0.3.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/2d/80/6e5c7c83cea15ed4dfc4843b9df9db0716bc551ac938f7b5dd18a72bd5e4/google_cloud_storage-3.7.0-py3-none-any.whl
@@ -537,13 +536,14 @@ environments:
- pypi: https://files.pythonhosted.org/packages/d6/49/1f35189c1ca136b2f041b72402f2eb718bdcb435d9e88729fe6f6909c45d/h5netcdf-1.7.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/d9/69/4402ea66272dacc10b298cca18ed73e1c0791ff2ae9ed218d3859f9698ac/h5py-3.15.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/8c/a2/0d269db0f6163be503775dc8b6a6fa15820cc9fdc866f6ba608d86b721f2/httplib2-0.31.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/94/56/c5e8db63ba0e27b310a0b4c384da555b361741e7d186044d31f400c0419e/icechunk-1.1.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/8c/d7/db466e07a21553441adbf915f0913a3f8fecece364cacb2392f11be267be/icechunk-1.1.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/4c/0f/b66d63d4a5426c09005d3713b056e634e00e69788fdc88d1ffe40e5b7654/ipycytoscape-1.3.3-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ca/d3/642a6dc3db8ea558a9b5fbc83815b197861868dc98f98a789b85c7660670/ipyevents-2.0.4-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/00/60/249e3444fcd9c833704741769981cd02fe2c7ce94126b1394e7a3b26e543/ipyfilechooser-0.6.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a3/17/20c2552266728ceba271967b87919664ecc0e33efca29c3efc6baf88c5f9/ipykernel-7.1.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/49/69/e9858f2c0b99bf9f036348d1c84b8026f438bb6875effe6a9bcd9883dada/ipyleaflet-0.20.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/f1/df/8ee1c5dd1e3308b5d5b2f2dfea323bb2f3827da8d654abb6642051199049/ipython-9.8.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/86/92/162cfaee4ccf370465c5af1ce36a9eacec1becb552f2033bb3584e6f640a/ipython-9.9.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/56/6d/0d9848617b9f753b87f214f1c682592f7ca42de085f564352f10f0843026/ipywidgets-8.1.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl
@@ -558,11 +558,11 @@ environments:
- pypi: https://files.pythonhosted.org/packages/93/cf/be4e93afbfa0def2cd6fac9302071db0bd6d0617999ecbf53f92b9398de3/multiurl-0.3.7-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/97/1a/78b19893197ed7525edfa7f124a461626541e82aec694a468ba97755c24e/netcdf4-1.7.3-cp311-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/7b/7a/a8d32501bb95ecff342004a674720164f95ad616f269450b3bc13dc88ae3/netcdf4-1.7.4-cp311-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/ae/d3/ff8f1b9968aa4dcd1da1880322ed492314cc920998182e549b586c895a17/numbagg-0.9.4-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/c4/e6/d359fdd37498e74d26a167f7a51e54542e642ea47181eb4e643a69a066c3/numcodecs-0.16.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/b0/e0/760e73c111193db5ca37712a148e4807d1b0c60302ab31e4ada6528ca34d/numpy_groupies-0.11.3-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/4a/4e/44dbb46b3d1b0ec61afda8e84837870f2f9ace33c564317d59b70bc19d3e/nvidia_nccl_cu12-2.28.9-py3-none-manylinux_2_18_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/23/2d/609d0392d992259c6dc39881688a7fc13b1397a668bc360fbd68d1396f85/nvidia_nccl_cu12-2.29.2-py3-none-manylinux_2_18_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/53/20/08c6dc0f20c1394e2324b9344838e4e7af770cdcb52c30757a475f50daeb/obstore-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/99/e2/311fb383d9534eef7bfbe858fad931b6e3dbe85843c50592f50063c3bc83/odc_geo-0.4.10-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/84/99/6636f7097a5e461d560317024522279f52931b5a52c8caa0755a14d5f1fd/odc_loader-0.6.0-py3-none-any.whl
@@ -572,11 +572,12 @@ environments:
- pypi: https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e7/c3/3031c931098de393393e1f93a38dc9ed6805d86bb801acc3cf2d5bd1e6b7/plotly-6.5.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a8/87/77cc11c7a9ea9fd05503def69e3d18605852cd0d4b0d3b8f15bbeb3ef1d1/pooch-1.8.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/cd/24/3b7a0818484df9c28172857af32c2397b6d8fcd99d9468bd4684f98ebf0a/proto_plus-1.27.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl
- - pypi: https://files.pythonhosted.org/packages/ff/7b/e9a6fa461ef266c5a23485004934b8f08a2a8ddc447802161ea56d9837dd/psygnal-0.15.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/2d/4f/3593e5adb88a188c798604aed95fbc1479f30230e7f51e8f2c770e6a3832/psygnal-0.15.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl
@@ -587,8 +588,9 @@ environments:
- pypi: https://files.pythonhosted.org/packages/82/06/cad54e8ce758bd836ee5411691cbd49efeb9cc611b374670fce299519334/pyshp-3.0.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/9f/86/3ec01436c6235a23a80e978b261a87481c1acaf626a5c618e9edac30e5e1/pystac-1.14.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5d/d2/5f6367b14c9f250d1a6725d18bd1e9584f5ab1587e292f3a847e59189598/pystac_client-0.9.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/88/ae/baf3a8057d8129896a7e02619df43ea0d918fc5b2bb66eb6e2470595fbac/python_box-7.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- - pypi: https://files.pythonhosted.org/packages/7b/84/66c0d9cca2a09074ec2ce6fffa87709ca51b0d197ae742d835e841bac660/rasterio-1.4.4-cp313-cp313-manylinux_2_28_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/48/4a/1af9aa9810fb30668568f2c4dd3eec2412c8e9762b69201d971c509b295e/rasterio-1.5.0-cp313-cp313-manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/f2/98/7e6d147fd16a10a5f821db6e25f192265d6ecca3d82957a4fdd592cad49c/ratelim-0.1.6-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/34/83/a485250bc09db55e4b4389d99e583fac871ceeaaa4620b67a31d8db95ef5/rechunker-0.5.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/13/2f/b4530fbf948867702d0a3f27de4a6aab1d156f406d72852ab902c4d04de9/rich_rst-1.3.2-py3-none-any.whl
@@ -608,16 +610,16 @@ environments:
- pypi: https://files.pythonhosted.org/packages/c0/95/6b7873f0267973ebd55ba9cd33a690b35a116f2779901ef6185a0e21864d/streamlit-1.52.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/72/35/d3cdab8cff94971714f866181abb1aa84ad976f6e7b6218a0499197465e4/streamlit_folium-0.25.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/b5/fc/5e2988590ff2e0128eea6446806c904445a44e17256c67141573ea16b5a5/textual-6.11.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/84/38/47fab2a5fad163ca4851f7a20eb2442491cc63bf2756ec4ef161bc1461dd/textual-7.0.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/8d/c0/fdf9d3ee103ce66a55f0532835ad5e154226c5222423c6636ba049dc42fc/traittypes-0.2.3-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/94/fc/1d34ec891900d9337169ff9f8252fcaa633ae5c4d36b67effd849ed4f9ac/ty-0.0.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/e7/c1/56ef16bf5dcd255155cc736d276efa6ae0a5c26fd685e28f0412a4013c01/types_pytz-2025.2.0.20251108-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/95/20/92e3083b0e854943015bc8a7866e284ead9efadf9bf6809e6fce3b7ded61/ultraplot-1.66.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/43/6c/b26831b890b37c09882f6406efd31441c8e512bf1efbc967b9d867c5e02b/ultraplot-1.70.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e6/9f/ca52771fe972e0dcc5167fedb609940e01516066938ff2ee28b273ae4f29/vega_datasets-0.9.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/04/d5/81d1403788f072e7d0e2b2fe539a0ae4410f27886ff52df094e5348c99ea/vegafusion-2.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- - pypi: https://files.pythonhosted.org/packages/a7/6b/48f6d47a92eaf6f0dd235146307a7eb0d179b78d2faebc53aca3f1e49177/vl_convert_python-1.8.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/6f/61/dc6f4a38cf1b8699f64c57d7f021ca42c39bfe782d8a6eaefb7e8418e925/vl_convert_python-1.9.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/3f/0e/fa3b193432cfc60c93b42f3be03365f5f909d2b3ea410295cf36df739e31/widgetsnbextension-4.0.15-py3-none-any.whl
@@ -884,10 +886,10 @@ packages:
- pkg:pypi/argon2-cffi-bindings?source=hash-mapping
size: 35943
timestamp: 1762509452935
-- pypi: https://files.pythonhosted.org/packages/e0/b1/0542e0cab6f49f151a2d7a42400f84f706fc0b64e85dc1f56708b2e9fd37/array_api_compat-1.12.0-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/df/5d/493b1b5528ab5072feae30821ff3a07b7a0474213d548efb1fdf135f85c1/array_api_compat-1.13.0-py3-none-any.whl
name: array-api-compat
- version: 1.12.0
- sha256: a0b4795b6944a9507fde54679f9350e2ad2b1e2acf4a2408a098cdc27f890a8b
+ version: 1.13.0
+ sha256: c15026a0ddec42815383f07da285472e1b1ff2e632eb7afbcfe9b08fcbad9bf1
requires_dist:
- cupy ; extra == 'cupy'
- dask>=2024.9.0 ; extra == 'dask'
@@ -905,16 +907,16 @@ packages:
- array-api-strict ; extra == 'dev'
- dask[array]>=2024.9.0 ; extra == 'dev'
- jax[cpu] ; extra == 'dev'
+ - ndonnx ; extra == 'dev'
- numpy>=1.22 ; extra == 'dev'
- pytest ; extra == 'dev'
- torch ; extra == 'dev'
- sparse>=0.15.1 ; extra == 'dev'
- - ndonnx ; extra == 'dev'
requires_python: '>=3.10'
-- pypi: https://files.pythonhosted.org/packages/1d/05/2709750ddb088eb2fc5053ba214b4f54334d15d4cb28217e2956b5507bac/array_api_extra-0.9.1-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/21/2b/bfa1cfe370dd4ed51f834f2c6ad93b7f6263b83615ab96ad91094cc98ec6/array_api_extra-0.9.2-py3-none-any.whl
name: array-api-extra
- version: 0.9.1
- sha256: 78b3e6605d1cdc9a66bb49e340e1bb620f045f1809a4e146d74500c3cb813b74
+ version: 0.9.2
+ sha256: d0643a9a4e981746057649accad068ca0fe4066d890f6a95d8b4cd5131b3b661
requires_dist:
- array-api-compat>=1.12.0,<2
requires_python: '>=3.10'
@@ -1026,10 +1028,10 @@ packages:
- astropy[dev] ; extra == 'dev-all'
- astropy[test-all] ; extra == 'dev-all'
requires_python: '>=3.11'
-- pypi: https://files.pythonhosted.org/packages/1f/07/50501947849e780cb5580ebcd7af08c14d431640562e18a8ac2b055c90ec/astropy_iers_data-0.2025.12.22.0.40.30-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/57/61/2d06c08f022c9b617b79f6c55d88e596c1795a1d211e6bf584ac4b9e9506/astropy_iers_data-0.2026.1.5.0.43.43-py3-none-any.whl
name: astropy-iers-data
- version: 0.2025.12.22.0.40.30
- sha256: 2fbc71988d96aa29566667c6568a2bc5ca00748174b1f8ac3e9f7b09d4c27cac
+ version: 0.2026.1.5.0.43.43
+ sha256: fe2c35e9abc99142083d717ea76bf7bde373dc12e502aaeced28ae4ff9bfc345
requires_dist:
- pytest ; extra == 'docs'
- hypothesis ; extra == 'test'
@@ -1298,10 +1300,10 @@ packages:
purls: []
size: 249684
timestamp: 1761066654684
-- pypi: https://files.pythonhosted.org/packages/3d/9e/1c90a122ea6180e8c72eb7294adc92531b0e08eb3d2324c2ba70d37f4802/azure_storage_blob-12.27.1-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/d8/3a/6ef2047a072e54e1142718d433d50e9514c999a58f51abfff7902f3a72f8/azure_storage_blob-12.28.0-py3-none-any.whl
name: azure-storage-blob
- version: 12.27.1
- sha256: 65d1e25a4628b7b6acd20ff7902d8da5b4fde8e46e19c8f6d213a3abc3ece272
+ version: 12.28.0
+ sha256: 00fb1db28bf6a7b7ecaa48e3b1d5c83bfadacc5a678b77826081304bd87d6461
requires_dist:
- azure-core>=1.30.0
- cryptography>=2.1.4
@@ -1772,16 +1774,6 @@ packages:
- pkg:pypi/click?source=hash-mapping
size: 97676
timestamp: 1764518652276
-- pypi: https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl
- name: click-plugins
- version: 1.1.1.2
- sha256: 008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6
- requires_dist:
- - click>=4.0
- - pytest>=3.6 ; extra == 'dev'
- - pytest-cov ; extra == 'dev'
- - wheel ; extra == 'dev'
- - coveralls ; extra == 'dev'
- pypi: https://files.pythonhosted.org/packages/73/86/43fa9f15c5b9fb6e82620428827cd3c284aa933431405d1bcf5231ae3d3e/cligj-0.7.2-py3-none-any.whl
name: cligj
version: 0.7.2
@@ -2517,10 +2509,10 @@ packages:
- pkg:pypi/cycler?source=hash-mapping
size: 14778
timestamp: 1764466758386
-- pypi: https://files.pythonhosted.org/packages/8d/05/8efadba80e1296526e69c1dceba8b0f0bc3756e8d69f6ed9b0e647cf3169/cyclopts-4.4.1-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/20/5b/0eceb9a5990de9025733a0d212ca43649ba9facd58b8552b6bf93c11439d/cyclopts-4.4.4-py3-none-any.whl
name: cyclopts
- version: 4.4.1
- sha256: 67500e9fde90f335fddbf9c452d2e7c4f58209dffe52e7abb1e272796a963bde
+ version: 4.4.4
+ sha256: 316f798fe2f2a30cb70e7140cfde2a46617bfbb575d31bbfdc0b2410a447bd83
requires_dist:
- attrs>=23.1.0
- docstring-parser>=0.15,<4.0
@@ -2869,7 +2861,7 @@ packages:
- pypi: ./
name: entropice
version: 0.1.0
- sha256: cb0c27d2c23c64d7533c03e380cf55c40e82a4d52a0392a829fad06a4ca93736
+ sha256: c15584d2588d1f67ff2c69d6d3afa461fd1b9571d423497221dcb295dd7b1514
requires_dist:
- aiohttp>=3.12.11
- bokeh>=3.7.3
@@ -2932,6 +2924,7 @@ packages:
- ty>=0.0.2,<0.0.3
- ruff>=0.14.9,<0.15
- pandas-stubs>=2.3.3.251201,<3
+ - pytest>=9.0.2,<10
requires_python: '>=3.13,<3.14'
- pypi: git+ssh://git@forgejo.tobiashoelzer.de:22222/tobias/entropy.git#9ca1bdf4afc4ac9b0ea29ebbc060ffecb5cffcf7
name: entropy
@@ -3422,17 +3415,17 @@ packages:
requires_dist:
- smmap>=3.0.1,<6
requires_python: '>=3.7'
-- pypi: https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/6a/09/e21df6aef1e1ffc0c816f0522ddc3f6dcded766c3261813131c78a704470/gitpython-3.1.46-py3-none-any.whl
name: gitpython
- version: 3.1.45
- sha256: 8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77
+ version: 3.1.46
+ sha256: 79812ed143d9d25b6d176a10bb511de0f9c67b1fa641d82097b0ab90398a2058
requires_dist:
- gitdb>=4.0.1,<5
- typing-extensions>=3.10.0.2 ; python_full_version < '3.10'
- coverage[toml] ; extra == 'test'
- ddt>=1.1.1,!=1.4.3 ; extra == 'test'
- mock ; python_full_version < '3.8' and extra == 'test'
- - mypy ; extra == 'test'
+ - mypy==1.18.2 ; python_full_version >= '3.9' and extra == 'test'
- pre-commit ; extra == 'test'
- pytest>=7.3.1 ; extra == 'test'
- pytest-cov ; extra == 'test'
@@ -3516,42 +3509,35 @@ packages:
- google-api-core>=1.31.5,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0,<3.0.0
- uritemplate>=3.0.1,<5
requires_python: '>=3.7'
-- pypi: https://files.pythonhosted.org/packages/c6/97/451d55e05487a5cd6279a01a7e34921858b16f7dc8aa38a2c684743cd2b3/google_auth-2.45.0-py2.py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/db/18/79e9008530b79527e0d5f79e7eef08d3b179b7f851cfd3a2f27822fbdfa9/google_auth-2.47.0-py3-none-any.whl
name: google-auth
- version: 2.45.0
- sha256: 82344e86dc00410ef5382d99be677c6043d72e502b625aa4f4afa0bdacca0f36
+ version: 2.47.0
+ sha256: c516d68336bfde7cf0da26aab674a36fedcf04b37ac4edd59c597178760c3498
requires_dist:
- - cachetools>=2.0.0,<7.0
- pyasn1-modules>=0.2.1
- rsa>=3.1.4,<5
- cryptography>=38.0.3 ; extra == 'cryptography'
- - cryptography<39.0.0 ; python_full_version < '3.8' and extra == 'cryptography'
- aiohttp>=3.6.2,<4.0.0 ; extra == 'aiohttp'
- requests>=2.20.0,<3.0.0 ; extra == 'aiohttp'
- cryptography ; extra == 'enterprise-cert'
- pyopenssl ; extra == 'enterprise-cert'
- pyopenssl>=20.0.0 ; extra == 'pyopenssl'
- cryptography>=38.0.3 ; extra == 'pyopenssl'
- - cryptography<39.0.0 ; python_full_version < '3.8' and extra == 'pyopenssl'
- pyjwt>=2.0 ; extra == 'pyjwt'
- cryptography>=38.0.3 ; extra == 'pyjwt'
- - cryptography<39.0.0 ; python_full_version < '3.8' and extra == 'pyjwt'
- pyu2f>=0.1.5 ; extra == 'reauth'
- requests>=2.20.0,<3.0.0 ; extra == 'requests'
- grpcio ; extra == 'testing'
- flask ; extra == 'testing'
- freezegun ; extra == 'testing'
- - mock ; extra == 'testing'
- oauth2client ; extra == 'testing'
- pyjwt>=2.0 ; extra == 'testing'
- cryptography>=38.0.3 ; extra == 'testing'
- - cryptography<39.0.0 ; python_full_version < '3.8' and extra == 'testing'
- pytest ; extra == 'testing'
- pytest-cov ; extra == 'testing'
- pytest-localserver ; extra == 'testing'
- pyopenssl>=20.0.0 ; extra == 'testing'
- cryptography>=38.0.3 ; extra == 'testing'
- - cryptography<39.0.0 ; python_full_version < '3.8' and extra == 'testing'
- pyu2f>=0.1.5 ; extra == 'testing'
- responses ; extra == 'testing'
- urllib3 ; extra == 'testing'
@@ -3564,7 +3550,7 @@ packages:
- aiohttp<3.10.0 ; extra == 'testing'
- urllib3 ; extra == 'urllib3'
- packaging ; extra == 'urllib3'
- requires_python: '>=3.7'
+ requires_python: '>=3.8'
- pypi: https://files.pythonhosted.org/packages/99/d5/3c97526c8796d3caf5f4b3bed2b05e8a7102326f00a334e7a438237f3b22/google_auth_httplib2-0.3.0-py3-none-any.whl
name: google-auth-httplib2
version: 0.3.0
@@ -3767,10 +3753,10 @@ packages:
- pkg:pypi/hyperframe?source=hash-mapping
size: 17397
timestamp: 1737618427549
-- pypi: https://files.pythonhosted.org/packages/94/56/c5e8db63ba0e27b310a0b4c384da555b361741e7d186044d31f400c0419e/icechunk-1.1.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+- pypi: https://files.pythonhosted.org/packages/8c/d7/db466e07a21553441adbf915f0913a3f8fecece364cacb2392f11be267be/icechunk-1.1.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
name: icechunk
- version: 1.1.14
- sha256: adb01a0275144c58f741b5402e658930326e86f7b389e879065e01625c021f7c
+ version: 1.1.15
+ sha256: c9e0cc3c8623a48861470553dbb8b0f1e86600989f597ce41ecf47568d8d099d
requires_dist:
- zarr>=3,!=3.0.3
- boto3 ; extra == 'test'
@@ -3926,6 +3912,11 @@ packages:
- pkg:pypi/importlib-metadata?source=hash-mapping
size: 34641
timestamp: 1747934053147
+- pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl
+ name: iniconfig
+ version: 2.3.0
+ sha256: f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12
+ requires_python: '>=3.10'
- pypi: https://files.pythonhosted.org/packages/4c/0f/b66d63d4a5426c09005d3713b056e634e00e69788fdc88d1ffe40e5b7654/ipycytoscape-1.3.3-py2.py3-none-any.whl
name: ipycytoscape
version: 1.3.3
@@ -4024,10 +4015,10 @@ packages:
- traittypes>=0.2.1,<3
- xyzservices>=2021.8.1
requires_python: '>=3.8'
-- pypi: https://files.pythonhosted.org/packages/f1/df/8ee1c5dd1e3308b5d5b2f2dfea323bb2f3827da8d654abb6642051199049/ipython-9.8.0-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/86/92/162cfaee4ccf370465c5af1ce36a9eacec1becb552f2033bb3584e6f640a/ipython-9.9.0-py3-none-any.whl
name: ipython
- version: 9.8.0
- sha256: ebe6d1d58d7d988fbf23ff8ff6d8e1622cfdb194daf4b7b73b792c4ec3b85385
+ version: 9.9.0
+ sha256: b457fe9165df2b84e8ec909a97abcf2ed88f565970efba16b1f7229c283d252b
requires_dist:
- colorama>=0.4.4 ; sys_platform == 'win32'
- decorator>=4.3.2
@@ -4067,7 +4058,8 @@ packages:
- pandas>2.1 ; extra == 'test-extra'
- trio>=0.1.0 ; extra == 'test-extra'
- matplotlib>3.9 ; extra == 'matplotlib'
- - ipython[doc,matplotlib,test,test-extra] ; extra == 'all'
+ - ipython[doc,matplotlib,terminal,test,test-extra] ; extra == 'all'
+ - argcomplete>=3.0 ; extra == 'all'
requires_python: '>=3.11'
- pypi: https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl
name: ipython-pygments-lexers
@@ -6859,14 +6851,15 @@ packages:
version: 1.6.0
sha256: 87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c
requires_python: '>=3.5'
-- pypi: https://files.pythonhosted.org/packages/97/1a/78b19893197ed7525edfa7f124a461626541e82aec694a468ba97755c24e/netcdf4-1.7.3-cp311-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
+- pypi: https://files.pythonhosted.org/packages/7b/7a/a8d32501bb95ecff342004a674720164f95ad616f269450b3bc13dc88ae3/netcdf4-1.7.4-cp311-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
name: netcdf4
- version: 1.7.3
- sha256: 0c764ba6f6a1421cab5496097e8a1c4d2e36be2a04880dfd288bb61b348c217e
+ version: 1.7.4
+ sha256: a72c9f58767779ec14cb7451c3b56bdd8fdc027a792fac2062b14e090c5617f3
requires_dist:
- cftime
- certifi
- - numpy
+ - numpy>=2.3.0 ; platform_machine == 'ARM64' and sys_platform == 'win32'
+ - numpy>=1.21.2 ; platform_machine != 'ARM64' or sys_platform != 'win32'
- cython ; extra == 'tests'
- packaging ; extra == 'tests'
- pytest ; extra == 'tests'
@@ -7045,10 +7038,10 @@ packages:
- pkg:pypi/nvidia-ml-py?source=hash-mapping
size: 48971
timestamp: 1765209768013
-- pypi: https://files.pythonhosted.org/packages/4a/4e/44dbb46b3d1b0ec61afda8e84837870f2f9ace33c564317d59b70bc19d3e/nvidia_nccl_cu12-2.28.9-py3-none-manylinux_2_18_x86_64.whl
+- pypi: https://files.pythonhosted.org/packages/23/2d/609d0392d992259c6dc39881688a7fc13b1397a668bc360fbd68d1396f85/nvidia_nccl_cu12-2.29.2-py3-none-manylinux_2_18_x86_64.whl
name: nvidia-nccl-cu12
- version: 2.28.9
- sha256: 485776daa8447da5da39681af455aa3b2c2586ddcf4af8772495e7c532c7e5ab
+ version: 2.29.2
+ sha256: 3a9a0bf4142126e0d0ed99ec202579bef8d007601f9fab75af60b10324666b12
requires_python: '>=3'
- conda: https://conda.anaconda.org/conda-forge/linux-64/nvtx-0.2.14-py313h07c4f96_0.conda
sha256: 9341cb332428242ab938c5fc202008c12430ec43b8b83511d327f14bf8fd6d96
@@ -7501,6 +7494,17 @@ packages:
- xarray ; extra == 'dev-optional'
- plotly[dev-optional] ; extra == 'dev'
requires_python: '>=3.8'
+- pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl
+ name: pluggy
+ version: 1.6.0
+ sha256: e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746
+ requires_dist:
+ - pre-commit ; extra == 'dev'
+ - tox ; extra == 'dev'
+ - pytest ; extra == 'testing'
+ - pytest-benchmark ; extra == 'testing'
+ - coverage ; extra == 'testing'
+ requires_python: '>=3.9'
- conda: https://conda.anaconda.org/conda-forge/noarch/polars-1.34.0-pyh6a1acc5_0.conda
sha256: 7e8bb10f4373202a0be760d9ac74f92c5e7e6095251180642678a8f57f10c58a
md5: d398dbcb3312bbebc2b2f3dbb98b4262
@@ -7652,10 +7656,10 @@ packages:
- pkg:pypi/psutil?source=hash-mapping
size: 501735
timestamp: 1762092897061
-- pypi: https://files.pythonhosted.org/packages/ff/7b/e9a6fa461ef266c5a23485004934b8f08a2a8ddc447802161ea56d9837dd/psygnal-0.15.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
+- pypi: https://files.pythonhosted.org/packages/2d/4f/3593e5adb88a188c798604aed95fbc1479f30230e7f51e8f2c770e6a3832/psygnal-0.15.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
name: psygnal
- version: 0.15.0
- sha256: a0172efeb861280bca05673989a4df21624f44344eff20b873d8c9d0edc01350
+ version: 0.15.1
+ sha256: e9fca977f5335deea39aed22e31d9795983e4f243e59a7d3c4105793adb7693d
requires_dist:
- wrapt ; extra == 'proxy'
- pydantic ; extra == 'pydantic'
@@ -8013,6 +8017,26 @@ packages:
- pystac[validation]>=1.10.0
- python-dateutil>=2.8.2
requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl
+ name: pytest
+ version: 9.0.2
+ sha256: 711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b
+ requires_dist:
+ - colorama>=0.4 ; sys_platform == 'win32'
+ - exceptiongroup>=1 ; python_full_version < '3.11'
+ - iniconfig>=1.0.1
+ - packaging>=22
+ - pluggy>=1.5,<2
+ - pygments>=2.7.2
+ - tomli>=1 ; python_full_version < '3.11'
+ - argcomplete ; extra == 'dev'
+ - attrs>=19.2 ; extra == 'dev'
+ - hypothesis>=3.56 ; extra == 'dev'
+ - mock ; extra == 'dev'
+ - requests ; extra == 'dev'
+ - setuptools ; extra == 'dev'
+ - xmlschema ; extra == 'dev'
+ requires_python: '>=3.10'
- conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.13.11-hc97d973_100_cp313.conda
build_number: 100
sha256: 9cf014cf28e93ee242bacfbf664e8b45ae06e50b04291e640abeaeb0cba0364c
@@ -8383,19 +8407,19 @@ packages:
license: LicenseRef-Custom
size: 6143
timestamp: 1765438804958
-- pypi: https://files.pythonhosted.org/packages/7b/84/66c0d9cca2a09074ec2ce6fffa87709ca51b0d197ae742d835e841bac660/rasterio-1.4.4-cp313-cp313-manylinux_2_28_x86_64.whl
+- pypi: https://files.pythonhosted.org/packages/48/4a/1af9aa9810fb30668568f2c4dd3eec2412c8e9762b69201d971c509b295e/rasterio-1.5.0-cp313-cp313-manylinux_2_28_x86_64.whl
name: rasterio
- version: 1.4.4
- sha256: c072450caa96428b1218b030500bb908fd6f09bc013a88969ff81a124b6a112a
+ version: 1.5.0
+ sha256: 08a7580cbb9b3bd320bdf827e10c9b2424d0df066d8eef6f2feb37e154ce0c17
requires_dist:
- affine
- attrs
- certifi
- click>=4.0,!=8.2.*
- cligj>=0.5
- - numpy>=1.24
- - click-plugins
+ - numpy>=2
- pyparsing
+ - rasterio[docs,ipython,plot,s3,test] ; extra == 'all'
- ghp-import ; extra == 'docs'
- numpydoc ; extra == 'docs'
- sphinx ; extra == 'docs'
@@ -8404,28 +8428,17 @@ packages:
- ipython>=2.0 ; extra == 'ipython'
- matplotlib ; extra == 'plot'
- boto3>=1.2.4 ; extra == 's3'
+ - aiohttp ; extra == 'test'
- boto3>=1.2.4 ; extra == 'test'
- fsspec ; extra == 'test'
- hypothesis ; extra == 'test'
+ - matplotlib ; extra == 'test'
- packaging ; extra == 'test'
- pytest-cov>=2.2.0 ; extra == 'test'
- pytest>=2.8.2 ; extra == 'test'
+ - requests ; extra == 'test'
- shapely ; extra == 'test'
- - fsspec ; extra == 'all'
- - sphinx-rtd-theme ; extra == 'all'
- - ipython>=2.0 ; extra == 'all'
- - packaging ; extra == 'all'
- - ghp-import ; extra == 'all'
- - boto3>=1.2.4 ; extra == 'all'
- - matplotlib ; extra == 'all'
- - sphinx-click ; extra == 'all'
- - sphinx ; extra == 'all'
- - pytest>=2.8.2 ; extra == 'all'
- - hypothesis ; extra == 'all'
- - shapely ; extra == 'all'
- - numpydoc ; extra == 'all'
- - pytest-cov>=2.2.0 ; extra == 'all'
- requires_python: '>=3.10'
+ requires_python: '>=3.12'
- pypi: https://files.pythonhosted.org/packages/f2/98/7e6d147fd16a10a5f821db6e25f192265d6ecca3d82957a4fdd592cad49c/ratelim-0.1.6-py2.py3-none-any.whl
name: ratelim
version: 0.1.6
@@ -9139,10 +9152,10 @@ packages:
- pkg:pypi/terminado?source=hash-mapping
size: 22452
timestamp: 1710262728753
-- pypi: https://files.pythonhosted.org/packages/b5/fc/5e2988590ff2e0128eea6446806c904445a44e17256c67141573ea16b5a5/textual-6.11.0-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/84/38/47fab2a5fad163ca4851f7a20eb2442491cc63bf2756ec4ef161bc1461dd/textual-7.0.1-py3-none-any.whl
name: textual
- version: 6.11.0
- sha256: 9e663b73ed37123a9b13c16a0c85e09ef917a4cfded97814361ed5cccfa40f89
+ version: 7.0.1
+ sha256: f9b7d16fa9b640bfff2a2008bf31e3f2d4429dc85e07a9583be033840ed15174
requires_dist:
- markdown-it-py[linkify]>=2.1.0
- mdit-py-plugins
@@ -9429,10 +9442,10 @@ packages:
license: BSD-3-Clause
size: 508347
timestamp: 1765407086135
-- pypi: https://files.pythonhosted.org/packages/95/20/92e3083b0e854943015bc8a7866e284ead9efadf9bf6809e6fce3b7ded61/ultraplot-1.66.0-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/43/6c/b26831b890b37c09882f6406efd31441c8e512bf1efbc967b9d867c5e02b/ultraplot-1.70.0-py3-none-any.whl
name: ultraplot
- version: 1.66.0
- sha256: 87fecb897ca5c7d54b76ac81e5b8635be45d9c9d42d629469f1d283e6405f9e1
+ version: 1.70.0
+ sha256: 2b29d1b1e36bd6cf88458370825cfab2c62b9acab706a2cfa434660d7dc4bf74
requires_dist:
- numpy>=1.26.0
- matplotlib>=3.9,<3.11
@@ -9496,10 +9509,10 @@ packages:
- packaging
- narwhals>=1.42
requires_python: '>=3.9'
-- pypi: https://files.pythonhosted.org/packages/a7/6b/48f6d47a92eaf6f0dd235146307a7eb0d179b78d2faebc53aca3f1e49177/vl_convert_python-1.8.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+- pypi: https://files.pythonhosted.org/packages/6f/61/dc6f4a38cf1b8699f64c57d7f021ca42c39bfe782d8a6eaefb7e8418e925/vl_convert_python-1.9.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
name: vl-convert-python
- version: 1.8.0
- sha256: b51264998e8fcc43dbce801484a950cfe6513cdc4c46b20604ef50989855a617
+ version: 1.9.0
+ sha256: 849e6773a7e05d58ab215386b1065e7713f4846b9ac6b0d743bb3e1b20337231
requires_python: '>=3.7'
- pypi: https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl
name: watchdog
diff --git a/pyproject.toml b/pyproject.toml
index 6f51207..bae64a3 100755
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -66,7 +66,7 @@ dependencies = [
"pypalettes>=0.2.1,<0.3",
"ty>=0.0.2,<0.0.3",
"ruff>=0.14.9,<0.15",
- "pandas-stubs>=2.3.3.251201,<3",
+ "pandas-stubs>=2.3.3.251201,<3", "pytest>=9.0.2,<10",
]
[project.scripts]
diff --git a/scripts/recalculate_test_metrics.py b/scripts/recalculate_test_metrics.py
new file mode 100644
index 0000000..882a68c
--- /dev/null
+++ b/scripts/recalculate_test_metrics.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python
+"""Recalculate test metrics and confusion matrix for existing training results.
+
+This script loads previously trained models and recalculates test metrics
+and confusion matrices for training runs that were completed before these
+outputs were added to the training pipeline.
+"""
+
+import pickle
+from pathlib import Path
+
+import cupy as cp
+import numpy as np
+import toml
+import torch
+import xarray as xr
+from sklearn import set_config
+from sklearn.metrics import confusion_matrix
+
+from entropice.ml.dataset import DatasetEnsemble
+from entropice.utils.paths import RESULTS_DIR
+
+# Enable array_api_dispatch to handle CuPy/NumPy namespace properly
+set_config(array_api_dispatch=True)
+
+
+def recalculate_metrics(results_dir: Path):
+ """Recalculate test metrics and confusion matrix for a training result.
+
+ Args:
+ results_dir: Path to the results directory containing the trained model.
+
+ """
+ print(f"\nProcessing: {results_dir}")
+
+ # Load the search settings to get training configuration
+ settings_file = results_dir / "search_settings.toml"
+ if not settings_file.exists():
+ print(" ❌ Missing search_settings.toml, skipping...")
+ return
+
+ with open(settings_file) as f:
+ config = toml.load(f)
+ settings = config["settings"]
+
+ # Check if metrics already exist
+ test_metrics_file = results_dir / "test_metrics.toml"
+ cm_file = results_dir / "confusion_matrix.nc"
+
+ # if test_metrics_file.exists() and cm_file.exists():
+ # print(" ✓ Metrics already exist, skipping...")
+ # return
+
+ # Load the best estimator
+ best_model_file = results_dir / "best_estimator_model.pkl"
+ if not best_model_file.exists():
+ print(" ❌ Missing best_estimator_model.pkl, skipping...")
+ return
+
+ print(f" Loading best estimator from {best_model_file.name}...")
+ with open(best_model_file, "rb") as f:
+ best_estimator = pickle.load(f)
+
+ # Recreate the dataset ensemble
+ print(" Recreating training dataset...")
+ dataset_ensemble = DatasetEnsemble(
+ grid=settings["grid"],
+ level=settings["level"],
+ target=settings["target"],
+ members=settings.get(
+ "members",
+ [
+ "AlphaEarth",
+ "ArcticDEM",
+ "ERA5-yearly",
+ "ERA5-seasonal",
+ "ERA5-shoulder",
+ ],
+ ),
+ dimension_filters=settings.get("dimension_filters", {}),
+ variable_filters=settings.get("variable_filters", {}),
+ filter_target=settings.get("filter_target", False),
+ add_lonlat=settings.get("add_lonlat", True),
+ )
+
+ task = settings["task"]
+ model = settings["model"]
+ device = "torch" if model in ["espa"] else "cuda"
+
+ # Create training data
+ training_data = dataset_ensemble.create_cat_training_dataset(task=task, device=device)
+
+ # Prepare test data - match training.py's approach
+ print(" Preparing test data...")
+ # For XGBoost with CuPy arrays, convert y_test to CPU (same as training.py)
+ y_test = (
+ training_data.y.test.get()
+ if model == "xgboost" and isinstance(training_data.y.test, cp.ndarray)
+ else training_data.y.test
+ )
+
+ # Compute predictions on the test set (use original device data)
+ print(" Computing predictions on test set...")
+ y_pred = best_estimator.predict(training_data.X.test)
+
+ # Use torch
+ y_pred = torch.as_tensor(y_pred, device="cuda")
+ y_test = torch.as_tensor(y_test, device="cuda")
+
+ # Compute metrics manually to avoid device issues
+ print(" Computing test metrics...")
+ from sklearn.metrics import (
+ accuracy_score,
+ f1_score,
+ jaccard_score,
+ precision_score,
+ recall_score,
+ )
+
+ test_metrics = {}
+ if task == "binary":
+ test_metrics["accuracy"] = float(accuracy_score(y_test, y_pred))
+ test_metrics["recall"] = float(recall_score(y_test, y_pred))
+ test_metrics["precision"] = float(precision_score(y_test, y_pred))
+ test_metrics["f1"] = float(f1_score(y_test, y_pred))
+ test_metrics["jaccard"] = float(jaccard_score(y_test, y_pred))
+ else:
+ test_metrics["accuracy"] = float(accuracy_score(y_test, y_pred))
+ test_metrics["f1_macro"] = float(f1_score(y_test, y_pred, average="macro"))
+ test_metrics["f1_weighted"] = float(f1_score(y_test, y_pred, average="weighted"))
+ test_metrics["precision_macro"] = float(precision_score(y_test, y_pred, average="macro", zero_division=0))
+ test_metrics["precision_weighted"] = float(precision_score(y_test, y_pred, average="weighted", zero_division=0))
+ test_metrics["recall_macro"] = float(recall_score(y_test, y_pred, average="macro"))
+ test_metrics["jaccard_micro"] = float(jaccard_score(y_test, y_pred, average="micro"))
+ test_metrics["jaccard_macro"] = float(jaccard_score(y_test, y_pred, average="macro"))
+ test_metrics["jaccard_weighted"] = float(jaccard_score(y_test, y_pred, average="weighted"))
+
+ # Get confusion matrix
+ print(" Computing confusion matrix...")
+ labels = list(range(len(training_data.y.labels)))
+ labels = torch.as_tensor(np.array(labels), device="cuda")
+ print(" Device of y_test:", getattr(training_data.y.test, "device", "cpu"))
+ print(" Device of y_pred:", getattr(y_pred, "device", "cpu"))
+ print(" Device of labels:", getattr(labels, "device", "cpu"))
+ cm = confusion_matrix(y_test, y_pred, labels=labels)
+ cm = cm.cpu().numpy()
+ labels = labels.cpu().numpy()
+ label_names = [training_data.y.labels[i] for i in range(len(training_data.y.labels))]
+ cm_xr = xr.DataArray(
+ cm,
+ dims=["true_label", "predicted_label"],
+ coords={"true_label": label_names, "predicted_label": label_names},
+ name="confusion_matrix",
+ )
+
+ # Store the test metrics
+ if not test_metrics_file.exists():
+ print(f" Storing test metrics to {test_metrics_file.name}...")
+ with open(test_metrics_file, "w") as f:
+ toml.dump({"test_metrics": test_metrics}, f)
+ else:
+ print(" ✓ Test metrics already exist")
+
+ # Store the confusion matrix
+ if True:
+ # if not cm_file.exists():
+ print(f" Storing confusion matrix to {cm_file.name}...")
+ cm_xr.to_netcdf(cm_file, engine="h5netcdf")
+ else:
+ print(" ✓ Confusion matrix already exists")
+
+ print(" ✓ Done!")
+
+
+def main():
+ """Find all training results and recalculate metrics for those missing them."""
+ print("Searching for training results directories...")
+
+ # Find all results directories
+ results_dirs = sorted([d for d in RESULTS_DIR.glob("*") if d.is_dir()])
+
+ print(f"Found {len(results_dirs)} results directories.\n")
+
+ for results_dir in results_dirs:
+ recalculate_metrics(results_dir)
+ # try:
+ # except Exception as e:
+ # print(f" ❌ Error processing {results_dir.name}: {e}")
+ # continue
+
+ print("\n✅ All done!")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/rechunk_zarr.py b/scripts/rechunk_zarr.py
new file mode 100644
index 0000000..664ccc9
--- /dev/null
+++ b/scripts/rechunk_zarr.py
@@ -0,0 +1,58 @@
+import xarray as xr
+import zarr
+from rich import print
+import dask.distributed as dd
+
+from entropice.utils.paths import get_era5_stores
+import entropice.utils.codecs
+
+def print_info(daily_raw = None, show_vars: bool = True):
+ if daily_raw is None:
+ daily_store = get_era5_stores("daily")
+ daily_raw = xr.open_zarr(daily_store, consolidated=False)
+ print("=== Daily INFO ===")
+ print(f" Dims: {daily_raw.sizes}")
+ numchunks = 1
+ chunksizes = {}
+ approxchunksize = 4 # 4 Bytes = float32
+ for d, cs in daily_raw.chunksizes.items():
+ numchunks *= len(cs)
+ chunksizes[d] = max(cs)
+ approxchunksize *= max(cs)
+ approxchunksize /= 10e6 # MB
+ print(f" Chunks: {chunksizes} (~{approxchunksize:.2f}MB) => {numchunks} total")
+ print(f" Encoding: {daily_raw.encoding}")
+ if show_vars:
+ print(" Variables:")
+ for var in daily_raw.data_vars:
+ da = daily_raw[var]
+ print(f" {var} Encoding:")
+ print(da.encoding)
+ print("")
+
+def rechunk():
+ daily_store = get_era5_stores("daily")
+ daily_raw = xr.open_zarr(daily_store, consolidated=False)
+ print_info(daily_raw, False)
+ daily_raw = daily_raw.chunk({
+ "time": 120,
+ "latitude": -1, # Should be 337,
+ "longitude": -1 # Should be 3600
+ })
+ print_info(daily_raw, False)
+
+ encoding = entropice.utils.codecs.from_ds(daily_raw)
+ daily_store_rechunked = daily_store.with_stem(f"{daily_store.stem}_rechunked")
+ daily_raw.to_zarr(daily_store_rechunked, mode="w", encoding=encoding, consolidated=False)
+
+
+
+if __name__ == "__main__":
+ with (
+ dd.LocalCluster(n_workers=1, threads_per_worker=10, memory_limit="100GB") as cluster,
+ dd.Client(cluster) as client,
+ ):
+ print(client)
+ print(client.dashboard_link)
+ rechunk()
+ print("Done.")
\ No newline at end of file
diff --git a/scripts/rerun_missing_inference.py b/scripts/rerun_missing_inference.py
new file mode 100644
index 0000000..cd463c1
--- /dev/null
+++ b/scripts/rerun_missing_inference.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python
+"""Rerun inference for training results that are missing predicted probabilities.
+
+This script searches through training result directories and identifies those that have
+a trained model but are missing inference results. It then loads the model and dataset
+configuration, reruns inference, and saves the results.
+"""
+
+import pickle
+from pathlib import Path
+
+import toml
+from rich.console import Console
+from rich.progress import track
+
+from entropice.ml.dataset import DatasetEnsemble
+from entropice.ml.inference import predict_proba
+from entropice.utils.paths import RESULTS_DIR
+
+console = Console()
+
+
+def find_incomplete_trainings() -> list[Path]:
+ """Find training result directories missing inference results.
+
+ Returns:
+ list[Path]: List of directories with trained models but missing predictions.
+
+ """
+ incomplete = []
+
+ if not RESULTS_DIR.exists():
+ console.print(f"[yellow]Results directory not found: {RESULTS_DIR}[/yellow]")
+ return incomplete
+
+ # Search for all training result directories
+ for result_dir in RESULTS_DIR.glob("*_cv*"):
+ if not result_dir.is_dir():
+ continue
+
+ model_file = result_dir / "best_estimator_model.pkl"
+ settings_file = result_dir / "search_settings.toml"
+ predictions_file = result_dir / "predicted_probabilities.parquet"
+
+ # Check if model and settings exist but predictions are missing
+ if model_file.exists() and settings_file.exists() and not predictions_file.exists():
+ incomplete.append(result_dir)
+
+ return incomplete
+
+
+def rerun_inference(result_dir: Path) -> bool:
+ """Rerun inference for a training result directory.
+
+ Args:
+ result_dir (Path): Path to the training result directory.
+
+ Returns:
+ bool: True if successful, False otherwise.
+
+ """
+ try:
+ console.print(f"\n[cyan]Processing: {result_dir.name}[/cyan]")
+
+ # Load settings
+ settings_file = result_dir / "search_settings.toml"
+ with open(settings_file) as f:
+ settings_data = toml.load(f)
+
+ settings = settings_data["settings"]
+
+ # Reconstruct DatasetEnsemble from settings
+ ensemble = DatasetEnsemble(
+ grid=settings["grid"],
+ level=settings["level"],
+ target=settings["target"],
+ members=settings["members"],
+ dimension_filters=settings.get("dimension_filters", {}),
+ variable_filters=settings.get("variable_filters", {}),
+ filter_target=settings.get("filter_target", False),
+ add_lonlat=settings.get("add_lonlat", True),
+ )
+
+ # Load trained model
+ model_file = result_dir / "best_estimator_model.pkl"
+ with open(model_file, "rb") as f:
+ clf = pickle.load(f)
+
+ console.print("[green]✓[/green] Loaded model and settings")
+
+ # Get class labels
+ classes = settings["classes"]
+
+ # Run inference
+ console.print("[yellow]Running inference...[/yellow]")
+ preds = predict_proba(ensemble, clf=clf, classes=classes)
+
+ # Save predictions
+ preds_file = result_dir / "predicted_probabilities.parquet"
+ preds.to_parquet(preds_file)
+
+ console.print(f"[green]✓[/green] Saved {len(preds)} predictions to {preds_file.name}")
+ return True
+
+ except Exception as e:
+ console.print(f"[red]✗ Error processing {result_dir.name}: {e}[/red]")
+ import traceback
+
+ console.print(f"[red]{traceback.format_exc()}[/red]")
+ return False
+
+
+def main():
+ """Rerun missing inferences for incomplete training results."""
+ console.print("[bold blue]Searching for incomplete training results...[/bold blue]")
+
+ incomplete_dirs = find_incomplete_trainings()
+
+ if not incomplete_dirs:
+ console.print("[green]No incomplete trainings found. All trainings have predictions![/green]")
+ return
+
+ console.print(f"[yellow]Found {len(incomplete_dirs)} training(s) missing predictions:[/yellow]")
+ for d in incomplete_dirs:
+ console.print(f" • {d.name}")
+
+ console.print(f"\n[bold]Processing {len(incomplete_dirs)} training result(s)...[/bold]\n")
+
+ successful = 0
+ failed = 0
+
+ for result_dir in track(incomplete_dirs, description="Rerunning inference"):
+ if rerun_inference(result_dir):
+ successful += 1
+ else:
+ failed += 1
+
+ console.print("\n[bold]Summary:[/bold]")
+ console.print(f" [green]Successful: {successful}[/green]")
+ console.print(f" [red]Failed: {failed}[/red]")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/src/entropice/dashboard/plots/hyperparameter_analysis.py b/src/entropice/dashboard/plots/hyperparameter_analysis.py
index 4c9100e..76ba038 100644
--- a/src/entropice/dashboard/plots/hyperparameter_analysis.py
+++ b/src/entropice/dashboard/plots/hyperparameter_analysis.py
@@ -10,9 +10,11 @@ import pandas as pd
import pydeck as pdk
import streamlit as st
+from entropice.dashboard.utils.class_ordering import get_ordered_classes
from entropice.dashboard.utils.colors import get_cmap, get_palette
from entropice.dashboard.utils.geometry import fix_hex_geometry
from entropice.ml.dataset import DatasetEnsemble
+from entropice.ml.training import TrainingSettings
def render_performance_summary(results: pd.DataFrame, refit_metric: str):
@@ -125,7 +127,7 @@ def render_performance_summary(results: pd.DataFrame, refit_metric: str):
)
-def render_parameter_distributions(results: pd.DataFrame, settings: dict | None = None):
+def render_parameter_distributions(results: pd.DataFrame, settings: TrainingSettings | None = None):
"""Render histograms of parameter distributions explored.
Args:
@@ -1152,15 +1154,18 @@ def render_top_configurations(results: pd.DataFrame, metric: str, top_n: int = 1
@st.fragment
-def render_confusion_matrix_map(result_path: Path, settings: dict):
- """Render 3D pydeck map showing confusion matrix results (TP, FP, TN, FN).
+def render_confusion_matrix_map(result_path: Path, settings: TrainingSettings):
+ """Render 3D pydeck map showing prediction results.
+
+ Uses true labels for elevation (height) and different shades of red for incorrect predictions
+ based on the predicted class.
Args:
result_path: Path to the training result directory.
settings: Settings dictionary containing grid, level, task, and target information.
"""
- st.subheader("🗺️ Confusion Matrix Spatial Distribution")
+ st.subheader("🗺️ Prediction Results Map")
# Load predicted probabilities
preds_file = result_path / "predicted_probabilities.parquet"
@@ -1190,62 +1195,41 @@ def render_confusion_matrix_map(result_path: Path, settings: dict):
st.error(f"Error loading training data: {e}")
return
- # Get the labeled cells (those with true labels)
- labeled_cells = training_data.dataset[training_data.dataset.index.isin(training_data.y.binned.index)]
+ # Get all cells from the complete dataset (not just test split)
+ # Use the full dataset which includes both train and test splits
+ all_cells = training_data.dataset.copy()
# Merge predictions with true labels
# Reset index to avoid ambiguity between index and column
- labeled_gdf = labeled_cells.copy()
- labeled_gdf = labeled_gdf.reset_index().rename(columns={"index": "cell_id"})
- labeled_gdf["true_class"] = training_data.y.binned.loc[labeled_cells.index].to_numpy()
+ labeled_gdf = all_cells.reset_index().rename(columns={"index": "cell_id"})
+ labeled_gdf["true_class"] = training_data.y.binned.loc[all_cells.index].to_numpy()
- # Merge with predictions - ensure we keep GeoDataFrame type
- merged_df = labeled_gdf.merge(preds_gdf[["cell_id", "predicted_class"]], on="cell_id", how="inner")
+ # Merge with predictions - use left join to keep all cells
+ merged_df = labeled_gdf.merge(preds_gdf[["cell_id", "predicted_class"]], on="cell_id", how="left")
merged = gpd.GeoDataFrame(merged_df, geometry="geometry", crs=labeled_gdf.crs)
+ # Mark which cells have predictions (test split) vs not (training split)
+ merged["in_test_split"] = merged["predicted_class"].notna()
+
+ # For cells without predictions (training split), use true class as predicted class for visualization
+ merged["predicted_class"] = merged["predicted_class"].fillna(merged["true_class"])
+
if len(merged) == 0:
st.warning("No matching predictions found for labeled cells.")
return
- # Determine confusion matrix category
- def get_confusion_category(row):
- true_label = row["true_class"]
- pred_label = row["predicted_class"]
+ # Mark correct vs incorrect predictions (only meaningful for test split)
+ merged["is_correct"] = merged["true_class"] == merged["predicted_class"]
- if task == "binary":
- # For binary classification
- if true_label == "RTS" and pred_label == "RTS":
- return "True Positive"
- elif true_label == "RTS" and pred_label == "No-RTS":
- return "False Negative"
- elif true_label == "No-RTS" and pred_label == "RTS":
- return "False Positive"
- else: # true_label == "No-RTS" and pred_label == "No-RTS"
- return "True Negative"
- else:
- # For multiclass (count/density)
- if true_label == pred_label:
- return "Correct"
- else:
- return "Incorrect"
-
- merged["confusion_category"] = merged.apply(get_confusion_category, axis=1)
+ # Get ordered class labels for the task
+ ordered_classes = get_ordered_classes(task)
# Create controls
- col1, col2 = st.columns([3, 1])
+ col1, col2, col3 = st.columns([2, 1, 1])
with col1:
- # Filter by confusion category
- if task == "binary":
- categories = [
- "All",
- "True Positive",
- "False Positive",
- "True Negative",
- "False Negative",
- ]
- else:
- categories = ["All", "Correct", "Incorrect"]
+ # Filter by prediction correctness and split
+ categories = ["All", "Test Split Only", "Training Split Only", "Correct (Test)", "Incorrect (Test)"]
selected_category = st.selectbox(
"Filter by Category",
@@ -1263,10 +1247,26 @@ def render_confusion_matrix_map(result_path: Path, settings: dict):
key="confusion_map_opacity",
)
+ with col3:
+ line_width = st.slider(
+ "Line Width",
+ min_value=0.5,
+ max_value=3.0,
+ value=1.0,
+ step=0.5,
+ key="confusion_map_line_width",
+ )
+
# Filter data if needed
- if selected_category != "All":
- display_gdf = merged[merged["confusion_category"] == selected_category].copy()
- else:
+ if selected_category == "Test Split Only":
+ display_gdf = merged[merged["in_test_split"]].copy()
+ elif selected_category == "Training Split Only":
+ display_gdf = merged[~merged["in_test_split"]].copy()
+ elif selected_category == "Correct (Test)":
+ display_gdf = merged[merged["is_correct"] & merged["in_test_split"]].copy()
+ elif selected_category == "Incorrect (Test)":
+ display_gdf = merged[~merged["is_correct"] & merged["in_test_split"]].copy()
+ else: # "All"
display_gdf = merged.copy()
if len(display_gdf) == 0:
@@ -1280,49 +1280,72 @@ def render_confusion_matrix_map(result_path: Path, settings: dict):
if grid == "hex":
display_gdf_wgs84["geometry"] = display_gdf_wgs84["geometry"].apply(fix_hex_geometry)
- # Assign colors based on confusion category
- if task == "binary":
- color_map = {
- "True Positive": [46, 204, 113], # Green
- "False Positive": [231, 76, 60], # Red
- "True Negative": [52, 152, 219], # Blue
- "False Negative": [241, 196, 15], # Yellow
- }
- else:
- color_map = {
- "Correct": [46, 204, 113], # Green
- "Incorrect": [231, 76, 60], # Red
- }
+ # Get red material colormap for incorrect predictions
+ red_cmap = get_cmap("red_predictions") # Use red_material palette
+ n_classes = len(ordered_classes)
- display_gdf_wgs84["fill_color"] = display_gdf_wgs84["confusion_category"].map(color_map)
+ # Assign colors based on correctness
+ def get_color(row):
+ if row["is_correct"]:
+ # Green for correct predictions
+ return [46, 204, 113]
+ else:
+ # Different shades of red for each predicted class (ordered)
+ pred_class = row["predicted_class"]
+ if pred_class in ordered_classes:
+ class_idx = ordered_classes.index(pred_class)
+ # Sample from red colormap based on class index
+ color_value = red_cmap(class_idx / max(n_classes - 1, 1))
+ return [int(color_value[0] * 255), int(color_value[1] * 255), int(color_value[2] * 255)]
+ else:
+ # Fallback red if class not found
+ return [231, 76, 60]
- # Add elevation based on confusion category (higher for errors)
- if task == "binary":
- elevation_map = {
- "True Positive": 0.8,
- "False Positive": 1.0,
- "True Negative": 0.3,
- "False Negative": 1.0,
- }
- else:
- elevation_map = {
- "Correct": 0.5,
- "Incorrect": 1.0,
- }
+ display_gdf_wgs84["fill_color"] = display_gdf_wgs84.apply(get_color, axis=1)
- display_gdf_wgs84["elevation"] = display_gdf_wgs84["confusion_category"].map(elevation_map)
+ # Add line color based on split: blue for test split, orange for training split
+ def get_line_color(row):
+ if row["in_test_split"]:
+ return [52, 152, 219] # Blue for test split
+ else:
+ return [230, 126, 34] # Orange for training split
+
+ display_gdf_wgs84["line_color"] = display_gdf_wgs84.apply(get_line_color, axis=1)
+
+ # Add elevation based on TRUE label (not predicted)
+ # Map each true class to a height based on its position in the ordered list
+ def get_elevation(row):
+ true_class = row["true_class"]
+ if true_class in ordered_classes:
+ class_idx = ordered_classes.index(true_class)
+ # Normalize to 0-1 range based on class position
+ return (class_idx + 1) / n_classes
+ else:
+ return 0.5 # Default elevation
+
+ display_gdf_wgs84["elevation"] = display_gdf_wgs84.apply(get_elevation, axis=1)
# Convert to GeoJSON format
geojson_data = []
for _, row in display_gdf_wgs84.iterrows():
+ # Determine split and status for tooltip
+ split_name = "Test Split" if row["in_test_split"] else "Training Split"
+ if row["in_test_split"]:
+ status = "✓ Correct" if row["is_correct"] else "✗ Incorrect"
+ else:
+ status = "(No prediction - training data)"
+
feature = {
"type": "Feature",
"geometry": row["geometry"].__geo_interface__,
"properties": {
"true_class": str(row["true_class"]),
- "predicted_class": str(row["predicted_class"]),
- "confusion_category": str(row["confusion_category"]),
+ "predicted_class": str(row["predicted_class"]) if row["in_test_split"] else "N/A",
+ "is_correct": bool(row["is_correct"]),
+ "split": split_name,
+ "status": status,
"fill_color": row["fill_color"],
+ "line_color": row["line_color"],
"elevation": float(row["elevation"]),
},
}
@@ -1338,8 +1361,8 @@ def render_confusion_matrix_map(result_path: Path, settings: dict):
extruded=True,
wireframe=False,
get_fill_color="properties.fill_color",
- get_line_color=[80, 80, 80],
- line_width_min_pixels=0.5,
+ get_line_color="properties.line_color",
+ line_width_min_pixels=line_width,
get_elevation="properties.elevation",
elevation_scale=500000,
pickable=True,
@@ -1353,9 +1376,10 @@ def render_confusion_matrix_map(result_path: Path, settings: dict):
layers=[layer],
initial_view_state=view_state,
tooltip={
- "html": "True Label: {true_class}
"
+ "html": "Split: {split}
"
+ "True Label: {true_class}
"
"Predicted Label: {predicted_class}
"
- "Category: {confusion_category}",
+ "Status: {status}",
"style": {"backgroundColor": "steelblue", "color": "white"},
},
map_style="https://basemaps.cartocdn.com/gl/dark-matter-gl-style/style.json",
@@ -1365,56 +1389,240 @@ def render_confusion_matrix_map(result_path: Path, settings: dict):
st.pydeck_chart(deck)
# Show statistics
- col1, col2, col3 = st.columns(3)
+ col1, col2, col3, col4 = st.columns(4)
with col1:
st.metric("Total Labeled Cells", len(merged))
- if task == "binary":
- with col2:
- tp = len(merged[merged["confusion_category"] == "True Positive"])
- fp = len(merged[merged["confusion_category"] == "False Positive"])
- tn = len(merged[merged["confusion_category"] == "True Negative"])
- fn = len(merged[merged["confusion_category"] == "False Negative"])
+ with col2:
+ test_count = len(merged[merged["in_test_split"]])
+ st.metric("Test Split", test_count)
- accuracy = (tp + tn) / len(merged) if len(merged) > 0 else 0
- st.metric("Accuracy", f"{accuracy:.2%}")
+ with col3:
+ train_count = len(merged[~merged["in_test_split"]])
+ st.metric("Training Split", train_count)
- with col3:
- precision = tp / (tp + fp) if (tp + fp) > 0 else 0
- recall = tp / (tp + fn) if (tp + fn) > 0 else 0
- f1 = 2 * (precision * recall) / (precision + recall) if (precision + recall) > 0 else 0
- st.metric("F1 Score", f"{f1:.3f}")
-
- # Show confusion matrix counts
- st.caption(f"TP: {tp} | FP: {fp} | TN: {tn} | FN: {fn}")
- else:
- with col2:
- correct = len(merged[merged["confusion_category"] == "Correct"])
- accuracy = correct / len(merged) if len(merged) > 0 else 0
- st.metric("Accuracy", f"{accuracy:.2%}")
-
- with col3:
- incorrect = len(merged[merged["confusion_category"] == "Incorrect"])
- st.metric("Incorrect", incorrect)
+ with col4:
+ test_cells = merged[merged["in_test_split"]]
+ if len(test_cells) > 0:
+ correct = len(test_cells[test_cells["is_correct"]])
+ accuracy = correct / len(test_cells)
+ st.metric("Test Accuracy", f"{accuracy:.2%}")
+ else:
+ st.metric("Test Accuracy", "N/A")
# Add legend
with st.expander("Legend", expanded=True):
- st.markdown("**Confusion Matrix Categories:**")
+ # Split indicators (border colors)
+ st.markdown("**Data Split (Border Color):**")
- for category, color in color_map.items():
- count = len(merged[merged["confusion_category"] == category])
- percentage = count / len(merged) * 100 if len(merged) > 0 else 0
+ test_count = len(merged[merged["in_test_split"]])
+ train_count = len(merged[~merged["in_test_split"]])
- st.markdown(
- f'