diff --git a/docs/examples/documentation_indexing.ipynb b/docs/examples/documentation_indexing.ipynb
index 28b54b3f7..a6aac5cb2 100644
--- a/docs/examples/documentation_indexing.ipynb
+++ b/docs/examples/documentation_indexing.ipynb
@@ -56,6 +56,7 @@
"source": [
"from glob import glob\n",
"from os import path\n",
+ "import warnings\n",
"\n",
"import numpy as np\n",
"\n",
@@ -179,9 +180,9 @@
" \"W\": c_grid_dimensions,\n",
"}\n",
"\n",
- "fieldsetC = parcels.FieldSet.from_nemo(\n",
- " filenames, variables, dimensions, netcdf_decodewarning=False\n",
- ")"
+ "with warnings.catch_warnings():\n",
+ " warnings.simplefilter(\"ignore\", parcels.FileWarning)\n",
+ " fieldsetC = parcels.FieldSet.from_nemo(filenames, variables, dimensions)"
]
},
{
@@ -189,7 +190,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "Note by the way, that we used `netcdf_decodewarning=False` in the `FieldSet.from_nemo()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`.\n"
+ "Note by the way, that we used `warnings.catch_warnings()` with `warnings.simplefilter(\"ignore\", parcels.FileWarning)` to wrap the `FieldSet.from_nemo()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`.\n"
]
},
{
@@ -293,7 +294,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.6"
+ "version": "3.12.4"
}
},
"nbformat": 4,
diff --git a/docs/examples/tutorial_nemo_3D.ipynb b/docs/examples/tutorial_nemo_3D.ipynb
index 240435b6d..abf8ba6c4 100644
--- a/docs/examples/tutorial_nemo_3D.ipynb
+++ b/docs/examples/tutorial_nemo_3D.ipynb
@@ -54,15 +54,16 @@
"source": [
"from datetime import timedelta\n",
"from glob import glob\n",
+ "import warnings\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import xarray as xr\n",
"\n",
"import parcels\n",
- "from parcels import logger\n",
+ "from parcels import FileWarning\n",
"\n",
"# Add a filter for the xarray decoding warning\n",
- "logger.addFilter(parcels.XarrayDecodedFilter())\n",
+ "warnings.simplefilter(\"ignore\", FileWarning)\n",
"\n",
"example_dataset_folder = parcels.download_example_dataset(\n",
" \"NemoNorthSeaORCA025-N006_data\"\n",
@@ -234,7 +235,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.12.4"
+ "version": "3.12.5"
}
},
"nbformat": 4,
diff --git a/docs/examples/tutorial_timestamps.ipynb b/docs/examples/tutorial_timestamps.ipynb
index 3217d77e1..ae7c41347 100644
--- a/docs/examples/tutorial_timestamps.ipynb
+++ b/docs/examples/tutorial_timestamps.ipynb
@@ -15,6 +15,7 @@
"outputs": [],
"source": [
"from glob import glob\n",
+ "import warnings\n",
"\n",
"import numpy as np\n",
"\n",
@@ -49,53 +50,56 @@
},
"outputs": [
{
- "name": "stdout",
+ "name": "stderr",
"output_type": "stream",
"text": [
- "WARNING: File /Users/erik/Library/Caches/parcels/WOA_data/woa18_decav_t10_04.nc could not be decoded properly by xarray (version 2023.9.0). It will be opened with no decoding. Filling values might be wrongly parsed.\n"
+ "C:\\Users\\asche\\Desktop\\po-code\\parcels_dev\\parcels\\parcels\\field.py:502: FileWarning: File C:\\Users\\asche\\AppData\\Local\\parcels\\parcels\\Cache\\WOA_data\\woa18_decav_t01_04.nc could not be decoded properly by xarray (version 2024.6.0). It will be opened with no decoding. Filling values might be wrongly parsed.\n",
+ " with _grid_fb_class(\n",
+ "C:\\Users\\asche\\Desktop\\po-code\\parcels_dev\\parcels\\parcels\\field.py:330: FileWarning: File C:\\Users\\asche\\AppData\\Local\\parcels\\parcels\\Cache\\WOA_data\\woa18_decav_t01_04.nc could not be decoded properly by xarray (version 2024.6.0). It will be opened with no decoding. Filling values might be wrongly parsed.\n",
+ " with _grid_fb_class(\n"
]
},
{
"ename": "RuntimeError",
- "evalue": "Xarray could not convert the calendar. If youre using from_netcdf, try using the timestamps keyword in the construction of your Field. See also the tutorial at https://docs.oceanparcels.org/en/latest/examples/tutorial_timestamps.html",
+ "evalue": "Xarray could not convert the calendar. If you're using from_netcdf, try using the timestamps keyword in the construction of your Field. See also the tutorial at https://docs.oceanparcels.org/en/latest/examples/tutorial_timestamps.html",
"output_type": "error",
"traceback": [
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
- "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/coding/times.py:319\u001b[0m, in \u001b[0;36mdecode_cf_datetime\u001b[0;34m(num_dates, units, calendar, use_cftime)\u001b[0m\n\u001b[1;32m 318\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 319\u001b[0m dates \u001b[39m=\u001b[39m _decode_datetime_with_pandas(flat_num_dates, units, calendar)\n\u001b[1;32m 320\u001b[0m \u001b[39mexcept\u001b[39;00m (\u001b[39mKeyError\u001b[39;00m, OutOfBoundsDatetime, OutOfBoundsTimedelta, \u001b[39mOverflowError\u001b[39;00m):\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/coding/times.py:253\u001b[0m, in \u001b[0;36m_decode_datetime_with_pandas\u001b[0;34m(flat_num_dates, units, calendar)\u001b[0m\n\u001b[1;32m 252\u001b[0m time_units, ref_date \u001b[39m=\u001b[39m _unpack_netcdf_time_units(units)\n\u001b[0;32m--> 253\u001b[0m time_units \u001b[39m=\u001b[39m _netcdf_to_numpy_timeunit(time_units)\n\u001b[1;32m 254\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m 255\u001b[0m \u001b[39m# TODO: the strict enforcement of nanosecond precision Timestamps can be\u001b[39;00m\n\u001b[1;32m 256\u001b[0m \u001b[39m# relaxed when addressing GitHub issue #7493.\u001b[39;00m\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/coding/times.py:115\u001b[0m, in \u001b[0;36m_netcdf_to_numpy_timeunit\u001b[0;34m(units)\u001b[0m\n\u001b[1;32m 114\u001b[0m units \u001b[39m=\u001b[39m \u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m{\u001b[39;00munits\u001b[39m}\u001b[39;00m\u001b[39ms\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[0;32m--> 115\u001b[0m \u001b[39mreturn\u001b[39;00m {\n\u001b[1;32m 116\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mnanoseconds\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mns\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 117\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mmicroseconds\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mus\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 118\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mmilliseconds\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mms\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 119\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mseconds\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39ms\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 120\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mminutes\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mm\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 121\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mhours\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mh\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 122\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mdays\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mD\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 123\u001b[0m }[units]\n",
- "\u001b[0;31mKeyError\u001b[0m: 'months'",
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+ "\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\coding\\times.py:322\u001b[0m, in \u001b[0;36mdecode_cf_datetime\u001b[1;34m(num_dates, units, calendar, use_cftime)\u001b[0m\n\u001b[0;32m 321\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 322\u001b[0m dates \u001b[38;5;241m=\u001b[39m \u001b[43m_decode_datetime_with_pandas\u001b[49m\u001b[43m(\u001b[49m\u001b[43mflat_num_dates\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 323\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mKeyError\u001b[39;00m, OutOfBoundsDatetime, OutOfBoundsTimedelta, \u001b[38;5;167;01mOverflowError\u001b[39;00m):\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\coding\\times.py:256\u001b[0m, in \u001b[0;36m_decode_datetime_with_pandas\u001b[1;34m(flat_num_dates, units, calendar)\u001b[0m\n\u001b[0;32m 255\u001b[0m time_units, ref_date \u001b[38;5;241m=\u001b[39m _unpack_netcdf_time_units(units)\n\u001b[1;32m--> 256\u001b[0m time_units \u001b[38;5;241m=\u001b[39m \u001b[43m_netcdf_to_numpy_timeunit\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtime_units\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 258\u001b[0m \u001b[38;5;66;03m# TODO: the strict enforcement of nanosecond precision Timestamps can be\u001b[39;00m\n\u001b[0;32m 259\u001b[0m \u001b[38;5;66;03m# relaxed when addressing GitHub issue #7493.\u001b[39;00m\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\coding\\times.py:118\u001b[0m, in \u001b[0;36m_netcdf_to_numpy_timeunit\u001b[1;34m(units)\u001b[0m\n\u001b[0;32m 117\u001b[0m units \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00munits\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124ms\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m--> 118\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m{\u001b[49m\n\u001b[0;32m 119\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mnanoseconds\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mns\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 120\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmicroseconds\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mus\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 121\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmilliseconds\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mms\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 122\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mseconds\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43ms\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 123\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mminutes\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mm\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 124\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mhours\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mh\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 125\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdays\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mD\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 126\u001b[0m \u001b[43m\u001b[49m\u001b[43m}\u001b[49m\u001b[43m[\u001b[49m\u001b[43munits\u001b[49m\u001b[43m]\u001b[49m\n",
+ "\u001b[1;31mKeyError\u001b[0m: 'months'",
"\nDuring handling of the above exception, another exception occurred:\n",
- "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/coding/times.py:213\u001b[0m, in \u001b[0;36m_decode_cf_datetime_dtype\u001b[0;34m(data, units, calendar, use_cftime)\u001b[0m\n\u001b[1;32m 212\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 213\u001b[0m result \u001b[39m=\u001b[39m decode_cf_datetime(example_value, units, calendar, use_cftime)\n\u001b[1;32m 214\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m:\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/coding/times.py:321\u001b[0m, in \u001b[0;36mdecode_cf_datetime\u001b[0;34m(num_dates, units, calendar, use_cftime)\u001b[0m\n\u001b[1;32m 320\u001b[0m \u001b[39mexcept\u001b[39;00m (\u001b[39mKeyError\u001b[39;00m, OutOfBoundsDatetime, OutOfBoundsTimedelta, \u001b[39mOverflowError\u001b[39;00m):\n\u001b[0;32m--> 321\u001b[0m dates \u001b[39m=\u001b[39m _decode_datetime_with_cftime(\n\u001b[1;32m 322\u001b[0m flat_num_dates\u001b[39m.\u001b[39;49mastype(\u001b[39mfloat\u001b[39;49m), units, calendar\n\u001b[1;32m 323\u001b[0m )\n\u001b[1;32m 325\u001b[0m \u001b[39mif\u001b[39;00m (\n\u001b[1;32m 326\u001b[0m dates[np\u001b[39m.\u001b[39mnanargmin(num_dates)]\u001b[39m.\u001b[39myear \u001b[39m<\u001b[39m \u001b[39m1678\u001b[39m\n\u001b[1;32m 327\u001b[0m \u001b[39mor\u001b[39;00m dates[np\u001b[39m.\u001b[39mnanargmax(num_dates)]\u001b[39m.\u001b[39myear \u001b[39m>\u001b[39m\u001b[39m=\u001b[39m \u001b[39m2262\u001b[39m\n\u001b[1;32m 328\u001b[0m ):\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/coding/times.py:237\u001b[0m, in \u001b[0;36m_decode_datetime_with_cftime\u001b[0;34m(num_dates, units, calendar)\u001b[0m\n\u001b[1;32m 235\u001b[0m \u001b[39mif\u001b[39;00m num_dates\u001b[39m.\u001b[39msize \u001b[39m>\u001b[39m \u001b[39m0\u001b[39m:\n\u001b[1;32m 236\u001b[0m \u001b[39mreturn\u001b[39;00m np\u001b[39m.\u001b[39masarray(\n\u001b[0;32m--> 237\u001b[0m cftime\u001b[39m.\u001b[39;49mnum2date(num_dates, units, calendar, only_use_cftime_datetimes\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m)\n\u001b[1;32m 238\u001b[0m )\n\u001b[1;32m 239\u001b[0m \u001b[39melse\u001b[39;00m:\n",
- "File \u001b[0;32msrc/cftime/_cftime.pyx:580\u001b[0m, in \u001b[0;36mcftime._cftime.num2date\u001b[0;34m()\u001b[0m\n",
- "File \u001b[0;32msrc/cftime/_cftime.pyx:98\u001b[0m, in \u001b[0;36mcftime._cftime._dateparse\u001b[0;34m()\u001b[0m\n",
- "\u001b[0;31mValueError\u001b[0m: 'months since' units only allowed for '360_day' calendar",
+ "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\coding\\times.py:216\u001b[0m, in \u001b[0;36m_decode_cf_datetime_dtype\u001b[1;34m(data, units, calendar, use_cftime)\u001b[0m\n\u001b[0;32m 215\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 216\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mdecode_cf_datetime\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexample_value\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 217\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\coding\\times.py:324\u001b[0m, in \u001b[0;36mdecode_cf_datetime\u001b[1;34m(num_dates, units, calendar, use_cftime)\u001b[0m\n\u001b[0;32m 323\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mKeyError\u001b[39;00m, OutOfBoundsDatetime, OutOfBoundsTimedelta, \u001b[38;5;167;01mOverflowError\u001b[39;00m):\n\u001b[1;32m--> 324\u001b[0m dates \u001b[38;5;241m=\u001b[39m \u001b[43m_decode_datetime_with_cftime\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 325\u001b[0m \u001b[43m \u001b[49m\u001b[43mflat_num_dates\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mastype\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mfloat\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\n\u001b[0;32m 326\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 328\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 329\u001b[0m dates[np\u001b[38;5;241m.\u001b[39mnanargmin(num_dates)]\u001b[38;5;241m.\u001b[39myear \u001b[38;5;241m<\u001b[39m \u001b[38;5;241m1678\u001b[39m\n\u001b[0;32m 330\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m dates[np\u001b[38;5;241m.\u001b[39mnanargmax(num_dates)]\u001b[38;5;241m.\u001b[39myear \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m2262\u001b[39m\n\u001b[0;32m 331\u001b[0m ):\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\coding\\times.py:240\u001b[0m, in \u001b[0;36m_decode_datetime_with_cftime\u001b[1;34m(num_dates, units, calendar)\u001b[0m\n\u001b[0;32m 238\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m num_dates\u001b[38;5;241m.\u001b[39msize \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[0;32m 239\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m np\u001b[38;5;241m.\u001b[39masarray(\n\u001b[1;32m--> 240\u001b[0m \u001b[43mcftime\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum2date\u001b[49m\u001b[43m(\u001b[49m\u001b[43mnum_dates\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43monly_use_cftime_datetimes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[0;32m 241\u001b[0m )\n\u001b[0;32m 242\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n",
+ "File \u001b[1;32msrc\\\\cftime\\\\_cftime.pyx:587\u001b[0m, in \u001b[0;36mcftime._cftime.num2date\u001b[1;34m()\u001b[0m\n",
+ "File \u001b[1;32msrc\\\\cftime\\\\_cftime.pyx:101\u001b[0m, in \u001b[0;36mcftime._cftime._dateparse\u001b[1;34m()\u001b[0m\n",
+ "\u001b[1;31mValueError\u001b[0m: 'months since' units only allowed for '360_day' calendar",
"\nDuring handling of the above exception, another exception occurred:\n",
- "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/conventions.py:428\u001b[0m, in \u001b[0;36mdecode_cf_variables\u001b[0;34m(variables, attributes, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)\u001b[0m\n\u001b[1;32m 427\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 428\u001b[0m new_vars[k] \u001b[39m=\u001b[39m decode_cf_variable(\n\u001b[1;32m 429\u001b[0m k,\n\u001b[1;32m 430\u001b[0m v,\n\u001b[1;32m 431\u001b[0m concat_characters\u001b[39m=\u001b[39;49mconcat_characters,\n\u001b[1;32m 432\u001b[0m mask_and_scale\u001b[39m=\u001b[39;49mmask_and_scale,\n\u001b[1;32m 433\u001b[0m decode_times\u001b[39m=\u001b[39;49mdecode_times,\n\u001b[1;32m 434\u001b[0m stack_char_dim\u001b[39m=\u001b[39;49mstack_char_dim,\n\u001b[1;32m 435\u001b[0m use_cftime\u001b[39m=\u001b[39;49muse_cftime,\n\u001b[1;32m 436\u001b[0m decode_timedelta\u001b[39m=\u001b[39;49mdecode_timedelta,\n\u001b[1;32m 437\u001b[0m )\n\u001b[1;32m 438\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m \u001b[39mas\u001b[39;00m e:\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/conventions.py:279\u001b[0m, in \u001b[0;36mdecode_cf_variable\u001b[0;34m(name, var, concat_characters, mask_and_scale, decode_times, decode_endianness, stack_char_dim, use_cftime, decode_timedelta)\u001b[0m\n\u001b[1;32m 278\u001b[0m \u001b[39mif\u001b[39;00m decode_times:\n\u001b[0;32m--> 279\u001b[0m var \u001b[39m=\u001b[39m times\u001b[39m.\u001b[39;49mCFDatetimeCoder(use_cftime\u001b[39m=\u001b[39;49muse_cftime)\u001b[39m.\u001b[39;49mdecode(var, name\u001b[39m=\u001b[39;49mname)\n\u001b[1;32m 281\u001b[0m \u001b[39mif\u001b[39;00m decode_endianness \u001b[39mand\u001b[39;00m \u001b[39mnot\u001b[39;00m var\u001b[39m.\u001b[39mdtype\u001b[39m.\u001b[39misnative:\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/coding/times.py:831\u001b[0m, in \u001b[0;36mCFDatetimeCoder.decode\u001b[0;34m(self, variable, name)\u001b[0m\n\u001b[1;32m 830\u001b[0m calendar \u001b[39m=\u001b[39m pop_to(attrs, encoding, \u001b[39m\"\u001b[39m\u001b[39mcalendar\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m--> 831\u001b[0m dtype \u001b[39m=\u001b[39m _decode_cf_datetime_dtype(data, units, calendar, \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49muse_cftime)\n\u001b[1;32m 832\u001b[0m transform \u001b[39m=\u001b[39m partial(\n\u001b[1;32m 833\u001b[0m decode_cf_datetime,\n\u001b[1;32m 834\u001b[0m units\u001b[39m=\u001b[39munits,\n\u001b[1;32m 835\u001b[0m calendar\u001b[39m=\u001b[39mcalendar,\n\u001b[1;32m 836\u001b[0m use_cftime\u001b[39m=\u001b[39m\u001b[39mself\u001b[39m\u001b[39m.\u001b[39muse_cftime,\n\u001b[1;32m 837\u001b[0m )\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/coding/times.py:223\u001b[0m, in \u001b[0;36m_decode_cf_datetime_dtype\u001b[0;34m(data, units, calendar, use_cftime)\u001b[0m\n\u001b[1;32m 218\u001b[0m msg \u001b[39m=\u001b[39m (\n\u001b[1;32m 219\u001b[0m \u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39munable to decode time units \u001b[39m\u001b[39m{\u001b[39;00munits\u001b[39m!r}\u001b[39;00m\u001b[39m with \u001b[39m\u001b[39m{\u001b[39;00mcalendar_msg\u001b[39m!r}\u001b[39;00m\u001b[39m. Try \u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 220\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mopening your dataset with decode_times=False or installing cftime \u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 221\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mif it is not installed.\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 222\u001b[0m )\n\u001b[0;32m--> 223\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(msg)\n\u001b[1;32m 224\u001b[0m \u001b[39melse\u001b[39;00m:\n",
- "\u001b[0;31mValueError\u001b[0m: unable to decode time units 'months since 1955-01-01 00:00:00' with 'the default calendar'. Try opening your dataset with decode_times=False or installing cftime if it is not installed.",
+ "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\conventions.py:440\u001b[0m, in \u001b[0;36mdecode_cf_variables\u001b[1;34m(variables, attributes, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)\u001b[0m\n\u001b[0;32m 439\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 440\u001b[0m new_vars[k] \u001b[38;5;241m=\u001b[39m \u001b[43mdecode_cf_variable\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 441\u001b[0m \u001b[43m \u001b[49m\u001b[43mk\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 442\u001b[0m \u001b[43m \u001b[49m\u001b[43mv\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 443\u001b[0m \u001b[43m \u001b[49m\u001b[43mconcat_characters\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconcat_characters\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 444\u001b[0m \u001b[43m \u001b[49m\u001b[43mmask_and_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmask_and_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 445\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_times\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_times\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 446\u001b[0m \u001b[43m \u001b[49m\u001b[43mstack_char_dim\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstack_char_dim\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 447\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cftime\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 448\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_timedelta\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_timedelta\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 449\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 450\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\conventions.py:291\u001b[0m, in \u001b[0;36mdecode_cf_variable\u001b[1;34m(name, var, concat_characters, mask_and_scale, decode_times, decode_endianness, stack_char_dim, use_cftime, decode_timedelta)\u001b[0m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m decode_times:\n\u001b[1;32m--> 291\u001b[0m var \u001b[38;5;241m=\u001b[39m \u001b[43mtimes\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mCFDatetimeCoder\u001b[49m\u001b[43m(\u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cftime\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecode\u001b[49m\u001b[43m(\u001b[49m\u001b[43mvar\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 293\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m decode_endianness \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m var\u001b[38;5;241m.\u001b[39mdtype\u001b[38;5;241m.\u001b[39misnative:\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\coding\\times.py:987\u001b[0m, in \u001b[0;36mCFDatetimeCoder.decode\u001b[1;34m(self, variable, name)\u001b[0m\n\u001b[0;32m 986\u001b[0m calendar \u001b[38;5;241m=\u001b[39m pop_to(attrs, encoding, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcalendar\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m--> 987\u001b[0m dtype \u001b[38;5;241m=\u001b[39m \u001b[43m_decode_cf_datetime_dtype\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43muse_cftime\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 988\u001b[0m transform \u001b[38;5;241m=\u001b[39m partial(\n\u001b[0;32m 989\u001b[0m decode_cf_datetime,\n\u001b[0;32m 990\u001b[0m units\u001b[38;5;241m=\u001b[39munits,\n\u001b[0;32m 991\u001b[0m calendar\u001b[38;5;241m=\u001b[39mcalendar,\n\u001b[0;32m 992\u001b[0m use_cftime\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39muse_cftime,\n\u001b[0;32m 993\u001b[0m )\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\coding\\times.py:226\u001b[0m, in \u001b[0;36m_decode_cf_datetime_dtype\u001b[1;34m(data, units, calendar, use_cftime)\u001b[0m\n\u001b[0;32m 221\u001b[0m msg \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m 222\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124munable to decode time units \u001b[39m\u001b[38;5;132;01m{\u001b[39;00munits\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m with \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcalendar_msg\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m. Try \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 223\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mopening your dataset with decode_times=False or installing cftime \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 224\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mif it is not installed.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 225\u001b[0m )\n\u001b[1;32m--> 226\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(msg)\n\u001b[0;32m 227\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n",
+ "\u001b[1;31mValueError\u001b[0m: unable to decode time units 'months since 1955-01-01 00:00:00' with 'the default calendar'. Try opening your dataset with decode_times=False or installing cftime if it is not installed.",
+ "\nThe above exception was the direct cause of the following exception:\n",
+ "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
+ "File \u001b[1;32m~\\Desktop\\po-code\\parcels_dev\\parcels\\parcels\\tools\\converters.py:281\u001b[0m, in \u001b[0;36mconvert_xarray_time_units\u001b[1;34m(ds, time)\u001b[0m\n\u001b[0;32m 280\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 281\u001b[0m da2 \u001b[38;5;241m=\u001b[39m \u001b[43mxr\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecode_cf\u001b[49m\u001b[43m(\u001b[49m\u001b[43mda2\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 282\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m:\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\conventions.py:581\u001b[0m, in \u001b[0;36mdecode_cf\u001b[1;34m(obj, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)\u001b[0m\n\u001b[0;32m 579\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcan only decode Dataset or DataStore objects\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m--> 581\u001b[0m \u001b[38;5;28mvars\u001b[39m, attrs, coord_names \u001b[38;5;241m=\u001b[39m \u001b[43mdecode_cf_variables\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 582\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mvars\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 583\u001b[0m \u001b[43m \u001b[49m\u001b[43mattrs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 584\u001b[0m \u001b[43m \u001b[49m\u001b[43mconcat_characters\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 585\u001b[0m \u001b[43m \u001b[49m\u001b[43mmask_and_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 586\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_times\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 587\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_coords\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 588\u001b[0m \u001b[43m \u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdrop_variables\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 589\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cftime\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 590\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_timedelta\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_timedelta\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 591\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 592\u001b[0m ds \u001b[38;5;241m=\u001b[39m Dataset(\u001b[38;5;28mvars\u001b[39m, attrs\u001b[38;5;241m=\u001b[39mattrs)\n",
+ "File \u001b[1;32mc:\\Users\\asche\\miniconda3\\envs\\parcels_dev\\Lib\\site-packages\\xarray\\conventions.py:451\u001b[0m, in \u001b[0;36mdecode_cf_variables\u001b[1;34m(variables, attributes, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)\u001b[0m\n\u001b[0;32m 450\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m--> 451\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mtype\u001b[39m(e)(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFailed to decode variable \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mk\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n\u001b[0;32m 452\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m decode_coords \u001b[38;5;129;01min\u001b[39;00m [\u001b[38;5;28;01mTrue\u001b[39;00m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcoordinates\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mall\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n",
+ "\u001b[1;31mValueError\u001b[0m: Failed to decode variable 'time': unable to decode time units 'months since 1955-01-01 00:00:00' with 'the default calendar'. Try opening your dataset with decode_times=False or installing cftime if it is not installed.",
"\nDuring handling of the above exception, another exception occurred:\n",
- "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
- "File \u001b[0;32m~/Codes/ParcelsCode/parcels/tools/converters.py:266\u001b[0m, in \u001b[0;36mconvert_xarray_time_units\u001b[0;34m(ds, time)\u001b[0m\n\u001b[1;32m 265\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 266\u001b[0m da2 \u001b[39m=\u001b[39m xr\u001b[39m.\u001b[39;49mdecode_cf(da2)\n\u001b[1;32m 267\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mValueError\u001b[39;00m:\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/conventions.py:569\u001b[0m, in \u001b[0;36mdecode_cf\u001b[0;34m(obj, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)\u001b[0m\n\u001b[1;32m 567\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mTypeError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39mcan only decode Dataset or DataStore objects\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m--> 569\u001b[0m \u001b[39mvars\u001b[39m, attrs, coord_names \u001b[39m=\u001b[39m decode_cf_variables(\n\u001b[1;32m 570\u001b[0m \u001b[39mvars\u001b[39;49m,\n\u001b[1;32m 571\u001b[0m attrs,\n\u001b[1;32m 572\u001b[0m concat_characters,\n\u001b[1;32m 573\u001b[0m mask_and_scale,\n\u001b[1;32m 574\u001b[0m decode_times,\n\u001b[1;32m 575\u001b[0m decode_coords,\n\u001b[1;32m 576\u001b[0m drop_variables\u001b[39m=\u001b[39;49mdrop_variables,\n\u001b[1;32m 577\u001b[0m use_cftime\u001b[39m=\u001b[39;49muse_cftime,\n\u001b[1;32m 578\u001b[0m decode_timedelta\u001b[39m=\u001b[39;49mdecode_timedelta,\n\u001b[1;32m 579\u001b[0m )\n\u001b[1;32m 580\u001b[0m ds \u001b[39m=\u001b[39m Dataset(\u001b[39mvars\u001b[39m, attrs\u001b[39m=\u001b[39mattrs)\n",
- "File \u001b[0;32m~/miniconda3/envs/parcels/lib/python3.11/site-packages/xarray/conventions.py:439\u001b[0m, in \u001b[0;36mdecode_cf_variables\u001b[0;34m(variables, attributes, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)\u001b[0m\n\u001b[1;32m 438\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m \u001b[39mas\u001b[39;00m e:\n\u001b[0;32m--> 439\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mtype\u001b[39m(e)(\u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mFailed to decode variable \u001b[39m\u001b[39m{\u001b[39;00mk\u001b[39m!r}\u001b[39;00m\u001b[39m: \u001b[39m\u001b[39m{\u001b[39;00me\u001b[39m}\u001b[39;00m\u001b[39m\"\u001b[39m)\n\u001b[1;32m 440\u001b[0m \u001b[39mif\u001b[39;00m decode_coords \u001b[39min\u001b[39;00m [\u001b[39mTrue\u001b[39;00m, \u001b[39m\"\u001b[39m\u001b[39mcoordinates\u001b[39m\u001b[39m\"\u001b[39m, \u001b[39m\"\u001b[39m\u001b[39mall\u001b[39m\u001b[39m\"\u001b[39m]:\n",
- "\u001b[0;31mValueError\u001b[0m: Failed to decode variable 'time': unable to decode time units 'months since 1955-01-01 00:00:00' with 'the default calendar'. Try opening your dataset with decode_times=False or installing cftime if it is not installed.",
- "\nDuring handling of the above exception, another exception occurred:\n",
- "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)",
- "\u001b[1;32m/Users/erik/Codes/ParcelsCode/docs/examples/tutorial_timestamps.ipynb Cell 5\u001b[0m line \u001b[0;36m2\n\u001b[1;32m 1\u001b[0m example_dataset_folder \u001b[39m=\u001b[39m download_example_dataset(\u001b[39m\"\u001b[39m\u001b[39mWOA_data\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m----> 2\u001b[0m tempfield \u001b[39m=\u001b[39m Field\u001b[39m.\u001b[39;49mfrom_netcdf(\n\u001b[1;32m 3\u001b[0m glob(\u001b[39mf\u001b[39;49m\u001b[39m\"\u001b[39;49m\u001b[39m{\u001b[39;49;00mexample_dataset_folder\u001b[39m}\u001b[39;49;00m\u001b[39m/woa18_decav_*_04.nc\u001b[39;49m\u001b[39m\"\u001b[39;49m),\n\u001b[1;32m 4\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mt_an\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 5\u001b[0m {\u001b[39m\"\u001b[39;49m\u001b[39mlon\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mlon\u001b[39;49m\u001b[39m\"\u001b[39;49m, \u001b[39m\"\u001b[39;49m\u001b[39mlat\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mlat\u001b[39;49m\u001b[39m\"\u001b[39;49m, \u001b[39m\"\u001b[39;49m\u001b[39mtime\u001b[39;49m\u001b[39m\"\u001b[39;49m: \u001b[39m\"\u001b[39;49m\u001b[39mtime\u001b[39;49m\u001b[39m\"\u001b[39;49m},\n\u001b[1;32m 6\u001b[0m )\n",
- "File \u001b[0;32m~/Codes/ParcelsCode/parcels/field.py:457\u001b[0m, in \u001b[0;36mField.from_netcdf\u001b[0;34m(cls, filenames, variable, dimensions, indices, grid, mesh, timestamps, allow_time_extrapolation, time_periodic, deferred_load, **kwargs)\u001b[0m\n\u001b[1;32m 452\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mRuntimeError\u001b[39;00m(\u001b[39m'\u001b[39m\u001b[39mMultiple files given but no time dimension specified\u001b[39m\u001b[39m'\u001b[39m)\n\u001b[1;32m 454\u001b[0m \u001b[39mif\u001b[39;00m grid \u001b[39mis\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m 455\u001b[0m \u001b[39m# Concatenate time variable to determine overall dimension\u001b[39;00m\n\u001b[1;32m 456\u001b[0m \u001b[39m# across multiple files\u001b[39;00m\n\u001b[0;32m--> 457\u001b[0m time, time_origin, timeslices, dataFiles \u001b[39m=\u001b[39m \u001b[39mcls\u001b[39;49m\u001b[39m.\u001b[39;49mcollect_timeslices(timestamps, data_filenames,\n\u001b[1;32m 458\u001b[0m _grid_fb_class, dimensions,\n\u001b[1;32m 459\u001b[0m indices, netcdf_engine, netcdf_decodewarning)\n\u001b[1;32m 460\u001b[0m grid \u001b[39m=\u001b[39m Grid\u001b[39m.\u001b[39mcreate_grid(lon, lat, depth, time, time_origin\u001b[39m=\u001b[39mtime_origin, mesh\u001b[39m=\u001b[39mmesh)\n\u001b[1;32m 461\u001b[0m grid\u001b[39m.\u001b[39mtimeslices \u001b[39m=\u001b[39m timeslices\n",
- "File \u001b[0;32m~/Codes/ParcelsCode/parcels/field.py:290\u001b[0m, in \u001b[0;36mField.collect_timeslices\u001b[0;34m(timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning)\u001b[0m\n\u001b[1;32m 287\u001b[0m \u001b[39mfor\u001b[39;00m fname \u001b[39min\u001b[39;00m data_filenames:\n\u001b[1;32m 288\u001b[0m \u001b[39mwith\u001b[39;00m _grid_fb_class(fname, dimensions, indices, netcdf_engine\u001b[39m=\u001b[39mnetcdf_engine,\n\u001b[1;32m 289\u001b[0m netcdf_decodewarning\u001b[39m=\u001b[39mnetcdf_decodewarning) \u001b[39mas\u001b[39;00m filebuffer:\n\u001b[0;32m--> 290\u001b[0m ftime \u001b[39m=\u001b[39m filebuffer\u001b[39m.\u001b[39;49mtime\n\u001b[1;32m 291\u001b[0m timeslices\u001b[39m.\u001b[39mappend(ftime)\n\u001b[1;32m 292\u001b[0m dataFiles\u001b[39m.\u001b[39mappend([fname] \u001b[39m*\u001b[39m \u001b[39mlen\u001b[39m(ftime))\n",
- "File \u001b[0;32m~/Codes/ParcelsCode/parcels/fieldfilebuffer.py:215\u001b[0m, in \u001b[0;36mNetcdfFileBuffer.time\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 213\u001b[0m \u001b[39m@property\u001b[39m\n\u001b[1;32m 214\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mtime\u001b[39m(\u001b[39mself\u001b[39m):\n\u001b[0;32m--> 215\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtime_access()\n",
- "File \u001b[0;32m~/Codes/ParcelsCode/parcels/fieldfilebuffer.py:225\u001b[0m, in \u001b[0;36mNetcdfFileBuffer.time_access\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 222\u001b[0m \u001b[39mreturn\u001b[39;00m np\u001b[39m.\u001b[39marray([\u001b[39mNone\u001b[39;00m])\n\u001b[1;32m 224\u001b[0m time_da \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdataset[\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdimensions[\u001b[39m'\u001b[39m\u001b[39mtime\u001b[39m\u001b[39m'\u001b[39m]]\n\u001b[0;32m--> 225\u001b[0m convert_xarray_time_units(time_da, \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mdimensions[\u001b[39m'\u001b[39;49m\u001b[39mtime\u001b[39;49m\u001b[39m'\u001b[39;49m])\n\u001b[1;32m 226\u001b[0m time \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39marray([time_da[\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdimensions[\u001b[39m'\u001b[39m\u001b[39mtime\u001b[39m\u001b[39m'\u001b[39m]]\u001b[39m.\u001b[39mdata]) \u001b[39mif\u001b[39;00m \u001b[39mlen\u001b[39m(time_da\u001b[39m.\u001b[39mshape) \u001b[39m==\u001b[39m \u001b[39m0\u001b[39m \u001b[39melse\u001b[39;00m np\u001b[39m.\u001b[39marray(time_da[\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdimensions[\u001b[39m'\u001b[39m\u001b[39mtime\u001b[39m\u001b[39m'\u001b[39m]])\n\u001b[1;32m 227\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39misinstance\u001b[39m(time[\u001b[39m0\u001b[39m], datetime\u001b[39m.\u001b[39mdatetime):\n",
- "File \u001b[0;32m~/Codes/ParcelsCode/parcels/tools/converters.py:268\u001b[0m, in \u001b[0;36mconvert_xarray_time_units\u001b[0;34m(ds, time)\u001b[0m\n\u001b[1;32m 266\u001b[0m da2 \u001b[39m=\u001b[39m xr\u001b[39m.\u001b[39mdecode_cf(da2)\n\u001b[1;32m 267\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mValueError\u001b[39;00m:\n\u001b[0;32m--> 268\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mRuntimeError\u001b[39;00m(\u001b[39m'\u001b[39m\u001b[39mXarray could not convert the calendar. If you\u001b[39m\u001b[39m'\u001b[39m\u001b[39m'\u001b[39m\u001b[39mre using from_netcdf, \u001b[39m\u001b[39m'\u001b[39m\n\u001b[1;32m 269\u001b[0m \u001b[39m'\u001b[39m\u001b[39mtry using the timestamps keyword in the construction of your Field. \u001b[39m\u001b[39m'\u001b[39m\n\u001b[1;32m 270\u001b[0m \u001b[39m'\u001b[39m\u001b[39mSee also the tutorial at https://docs.oceanparcels.org/en/latest/\u001b[39m\u001b[39m'\u001b[39m\n\u001b[1;32m 271\u001b[0m \u001b[39m'\u001b[39m\u001b[39mexamples/tutorial_timestamps.html\u001b[39m\u001b[39m'\u001b[39m)\n\u001b[1;32m 272\u001b[0m ds[time] \u001b[39m=\u001b[39m da2[time]\n",
- "\u001b[0;31mRuntimeError\u001b[0m: Xarray could not convert the calendar. If youre using from_netcdf, try using the timestamps keyword in the construction of your Field. See also the tutorial at https://docs.oceanparcels.org/en/latest/examples/tutorial_timestamps.html"
+ "\u001b[1;31mRuntimeError\u001b[0m Traceback (most recent call last)",
+ "Cell \u001b[1;32mIn[2], line 2\u001b[0m\n\u001b[0;32m 1\u001b[0m example_dataset_folder \u001b[38;5;241m=\u001b[39m parcels\u001b[38;5;241m.\u001b[39mdownload_example_dataset(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mWOA_data\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m----> 2\u001b[0m tempfield \u001b[38;5;241m=\u001b[39m \u001b[43mparcels\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mField\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfrom_netcdf\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 3\u001b[0m \u001b[43m \u001b[49m\u001b[43mglob\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43mf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43mexample_dataset_folder\u001b[49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[38;5;124;43m/woa18_decav_*_04.nc\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 4\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mt_an\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 5\u001b[0m \u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlon\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlon\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlat\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlat\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtime\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtime\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 6\u001b[0m \u001b[43m)\u001b[49m\n",
+ "File \u001b[1;32m~\\Desktop\\po-code\\parcels_dev\\parcels\\parcels\\field.py:540\u001b[0m, in \u001b[0;36mField.from_netcdf\u001b[1;34m(cls, filenames, variable, dimensions, indices, grid, mesh, timestamps, allow_time_extrapolation, time_periodic, deferred_load, **kwargs)\u001b[0m\n\u001b[0;32m 535\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMultiple files given but no time dimension specified\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 537\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m grid \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 538\u001b[0m \u001b[38;5;66;03m# Concatenate time variable to determine overall dimension\u001b[39;00m\n\u001b[0;32m 539\u001b[0m \u001b[38;5;66;03m# across multiple files\u001b[39;00m\n\u001b[1;32m--> 540\u001b[0m time, time_origin, timeslices, dataFiles \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mcls\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcollect_timeslices\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 541\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimestamps\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata_filenames\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m_grid_fb_class\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdimensions\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mindices\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnetcdf_engine\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnetcdf_decodewarning\u001b[49m\n\u001b[0;32m 542\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 543\u001b[0m grid \u001b[38;5;241m=\u001b[39m Grid\u001b[38;5;241m.\u001b[39mcreate_grid(lon, lat, depth, time, time_origin\u001b[38;5;241m=\u001b[39mtime_origin, mesh\u001b[38;5;241m=\u001b[39mmesh)\n\u001b[0;32m 544\u001b[0m grid\u001b[38;5;241m.\u001b[39mtimeslices \u001b[38;5;241m=\u001b[39m timeslices\n",
+ "File \u001b[1;32m~\\Desktop\\po-code\\parcels_dev\\parcels\\parcels\\field.py:333\u001b[0m, in \u001b[0;36mField.collect_timeslices\u001b[1;34m(timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning)\u001b[0m\n\u001b[0;32m 329\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m fname \u001b[38;5;129;01min\u001b[39;00m data_filenames:\n\u001b[0;32m 330\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m _grid_fb_class(\n\u001b[0;32m 331\u001b[0m fname, dimensions, indices, netcdf_engine\u001b[38;5;241m=\u001b[39mnetcdf_engine, netcdf_decodewarning\u001b[38;5;241m=\u001b[39mnetcdf_decodewarning\n\u001b[0;32m 332\u001b[0m ) \u001b[38;5;28;01mas\u001b[39;00m filebuffer:\n\u001b[1;32m--> 333\u001b[0m ftime \u001b[38;5;241m=\u001b[39m \u001b[43mfilebuffer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtime\u001b[49m\n\u001b[0;32m 334\u001b[0m timeslices\u001b[38;5;241m.\u001b[39mappend(ftime)\n\u001b[0;32m 335\u001b[0m dataFiles\u001b[38;5;241m.\u001b[39mappend([fname] \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mlen\u001b[39m(ftime))\n",
+ "File \u001b[1;32m~\\Desktop\\po-code\\parcels_dev\\parcels\\parcels\\fieldfilebuffer.py:221\u001b[0m, in \u001b[0;36mNetcdfFileBuffer.time\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 219\u001b[0m \u001b[38;5;129m@property\u001b[39m\n\u001b[0;32m 220\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mtime\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m--> 221\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtime_access\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
+ "File \u001b[1;32m~\\Desktop\\po-code\\parcels_dev\\parcels\\parcels\\fieldfilebuffer.py:231\u001b[0m, in \u001b[0;36mNetcdfFileBuffer.time_access\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 228\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m np\u001b[38;5;241m.\u001b[39marray([\u001b[38;5;28;01mNone\u001b[39;00m])\n\u001b[0;32m 230\u001b[0m time_da \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdimensions[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m\"\u001b[39m]]\n\u001b[1;32m--> 231\u001b[0m \u001b[43mconvert_xarray_time_units\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtime_da\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdimensions\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtime\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 232\u001b[0m time \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m 233\u001b[0m np\u001b[38;5;241m.\u001b[39marray([time_da[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdimensions[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m\"\u001b[39m]]\u001b[38;5;241m.\u001b[39mdata])\n\u001b[0;32m 234\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(time_da\u001b[38;5;241m.\u001b[39mshape) \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 235\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m np\u001b[38;5;241m.\u001b[39marray(time_da[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdimensions[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m\"\u001b[39m]])\n\u001b[0;32m 236\u001b[0m )\n\u001b[0;32m 237\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(time[\u001b[38;5;241m0\u001b[39m], datetime\u001b[38;5;241m.\u001b[39mdatetime):\n",
+ "File \u001b[1;32m~\\Desktop\\po-code\\parcels_dev\\parcels\\parcels\\tools\\converters.py:283\u001b[0m, in \u001b[0;36mconvert_xarray_time_units\u001b[1;34m(ds, time)\u001b[0m\n\u001b[0;32m 281\u001b[0m da2 \u001b[38;5;241m=\u001b[39m xr\u001b[38;5;241m.\u001b[39mdecode_cf(da2)\n\u001b[0;32m 282\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m:\n\u001b[1;32m--> 283\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 284\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mXarray could not convert the calendar. If you\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mre using from_netcdf, \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 285\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtry using the timestamps keyword in the construction of your Field. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 286\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSee also the tutorial at https://docs.oceanparcels.org/en/latest/examples/tutorial_timestamps.html\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 287\u001b[0m )\n\u001b[0;32m 288\u001b[0m ds[time] \u001b[38;5;241m=\u001b[39m da2[time]\n",
+ "\u001b[1;31mRuntimeError\u001b[0m: Xarray could not convert the calendar. If you're using from_netcdf, try using the timestamps keyword in the construction of your Field. See also the tutorial at https://docs.oceanparcels.org/en/latest/examples/tutorial_timestamps.html"
]
}
],
@@ -141,13 +145,14 @@
"metadata": {},
"outputs": [],
"source": [
- "tempfield = parcels.Field.from_netcdf(\n",
- " glob(f\"{example_dataset_folder}/woa18_decav_*_04.nc\"),\n",
- " \"t_an\",\n",
- " {\"lon\": \"lon\", \"lat\": \"lat\", \"time\": \"time\"},\n",
- " netcdf_decodewarning=False,\n",
- " timestamps=timestamps,\n",
- ")"
+ "with warnings.catch_warnings():\n",
+ " warnings.simplefilter(\"ignore\", parcels.FileWarning)\n",
+ " tempfield = parcels.Field.from_netcdf(\n",
+ " glob(f\"{example_dataset_folder}/woa18_decav_*_04.nc\"),\n",
+ " \"t_an\",\n",
+ " {\"lon\": \"lon\", \"lat\": \"lat\", \"time\": \"time\"},\n",
+ " timestamps=timestamps,\n",
+ " )"
]
},
{
@@ -163,7 +168,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "Furthermore, note that we used `netcdf_decodewarning=False` in the `FieldSet.from_netcdf()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`."
+ "Furthermore, note that we used `warnings.catch_warnings()` with `warnings.simplefilter(\"ignore\", parcels.FileWarning)` to wrap the `FieldSet.from_nemo()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`."
]
}
],
@@ -183,7 +188,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.6"
+ "version": "3.12.4"
}
},
"nbformat": 4,
diff --git a/docs/reference/misc.rst b/docs/reference/misc.rst
index 55a427b8b..a8a195f78 100644
--- a/docs/reference/misc.rst
+++ b/docs/reference/misc.rst
@@ -29,6 +29,12 @@ parcels.tools.loggers module
:members:
:undoc-members:
+parcels.tools.warnings module
+
+.. automodule:: parcels.tools.warnings
+ :members:
+ :undoc-members:
+
parcels.tools.exampledata_utils module
--------------------------------------
diff --git a/parcels/compilation/codegenerator.py b/parcels/compilation/codegenerator.py
index 921b422cb..e869bad19 100644
--- a/parcels/compilation/codegenerator.py
+++ b/parcels/compilation/codegenerator.py
@@ -2,6 +2,7 @@
import collections
import math
import random
+import warnings
from abc import ABC
from copy import copy
@@ -10,8 +11,8 @@
from parcels.field import Field, NestedField, VectorField
from parcels.grid import Grid
from parcels.particle import JITParticle
-from parcels.tools.loggers import logger
from parcels.tools.statuscodes import StatusCode
+from parcels.tools.warnings import KernelWarning
class IntrinsicNode(ast.AST):
@@ -178,9 +179,11 @@ def __init__(self, obj, attr):
class ParticleXiYiZiTiAttributeNode(IntrinsicNode):
def __init__(self, obj, attr):
- logger.warning_once(
+ warnings.warn(
f"Be careful when sampling particle.{attr}, as this is updated in the kernel loop. "
- "Best to place the sampling statement before advection."
+ "Best to place the sampling statement before advection.",
+ KernelWarning,
+ stacklevel=2,
)
self.obj = obj.ccode
self.attr = attr
@@ -309,8 +312,10 @@ def visit_Subscript(self, node):
def visit_AugAssign(self, node):
node.target = self.visit(node.target)
if isinstance(node.target, ParticleAttributeNode) and node.target.attr in ["lon", "lat", "depth", "time"]:
- logger.warning_once(
- "Don't change the location of a particle directly in a Kernel. Use particle_dlon, particle_dlat, etc."
+ warnings.warn(
+ "Don't change the location of a particle directly in a Kernel. Use particle_dlon, particle_dlat, etc.",
+ KernelWarning,
+ stacklevel=2,
)
node.op = self.visit(node.op)
node.value = self.visit(node.value)
@@ -439,7 +444,11 @@ def generate(self, py_ast, funcvars: list[str]):
for kvar in funcvars:
if kvar in used_vars + ["particle_dlon", "particle_dlat", "particle_ddepth"]:
if kvar not in ["particle", "fieldset", "time", "particle_dlon", "particle_dlat", "particle_ddepth"]:
- logger.warning(kvar + " declared in multiple Kernels")
+ warnings.warn(
+ kvar + " declared in multiple Kernels",
+ KernelWarning,
+ stacklevel=2,
+ )
funcvars_copy.remove(kvar)
else:
used_vars.append(kvar)
diff --git a/parcels/field.py b/parcels/field.py
index aa36fad46..84d59ff5d 100644
--- a/parcels/field.py
+++ b/parcels/field.py
@@ -1,6 +1,7 @@
import collections
import datetime
import math
+import warnings
from ctypes import POINTER, Structure, c_float, c_int, pointer
from pathlib import Path
from typing import TYPE_CHECKING, Iterable, Type
@@ -18,7 +19,6 @@
UnitConverter,
unitconverters_map,
)
-from parcels.tools.loggers import logger
from parcels.tools.statuscodes import (
AllParcelsErrorCodes,
FieldOutOfBoundError,
@@ -26,6 +26,7 @@
FieldSamplingError,
TimeExtrapolationError,
)
+from parcels.tools.warnings import FieldSetWarning, _deprecated_param_netcdf_decodewarning
from .fieldfilebuffer import (
DaskFileBuffer,
@@ -163,6 +164,10 @@ def __init__(
to_write=False,
**kwargs,
):
+ if kwargs.get("netcdf_decodewarning") is not None:
+ _deprecated_param_netcdf_decodewarning()
+ kwargs.pop("netcdf_decodewarning")
+
if not isinstance(name, tuple):
self.name = name
self.filebuffername = name
@@ -211,8 +216,10 @@ def __init__(
GridType.RectilinearSGrid,
GridType.CurvilinearSGrid,
]:
- logger.warning_once( # type: ignore
- "General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal."
+ warnings.warn(
+ "General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal.",
+ FieldSetWarning,
+ stacklevel=2,
)
self.fieldset: "FieldSet" | None = None
@@ -223,9 +230,10 @@ def __init__(
self.time_periodic = time_periodic
if self.time_periodic is not False and self.allow_time_extrapolation:
- logger.warning_once( # type: ignore
- "allow_time_extrapolation and time_periodic cannot be used together.\n \
- allow_time_extrapolation is set to False"
+ warnings.warn(
+ "allow_time_extrapolation and time_periodic cannot be used together. allow_time_extrapolation is set to False",
+ FieldSetWarning,
+ stacklevel=2,
)
self.allow_time_extrapolation = False
if self.time_periodic is True:
@@ -275,9 +283,8 @@ def __init__(
self.dataFiles = np.append(self.dataFiles, self.dataFiles[0])
self._field_fb_class = kwargs.pop("FieldFileBuffer", None)
self.netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
- self.netcdf_decodewarning = kwargs.pop("netcdf_decodewarning", True)
- self.loaded_time_indices: Iterable[int] = []
- self.creation_log: str = kwargs.pop("creation_log", "")
+ self.loaded_time_indices: Iterable[int] = [] # type: ignore
+ self.creation_log = kwargs.pop("creation_log", "")
self.chunksize = kwargs.pop("chunksize", None)
self.netcdf_chunkdims_name_map = kwargs.pop("chunkdims_name_map", None)
self.grid.depth_field = kwargs.pop("depth_field", None)
@@ -315,8 +322,10 @@ def get_dim_filenames(cls, filenames, dim):
@staticmethod
def collect_timeslices(
- timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning=True
+ timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning=None
):
+ if netcdf_decodewarning is not None:
+ _deprecated_param_netcdf_decodewarning()
if timestamps is not None:
dataFiles = []
for findex in range(len(data_filenames)):
@@ -329,9 +338,7 @@ def collect_timeslices(
timeslices = []
dataFiles = []
for fname in data_filenames:
- with _grid_fb_class(
- fname, dimensions, indices, netcdf_engine=netcdf_engine, netcdf_decodewarning=netcdf_decodewarning
- ) as filebuffer:
+ with _grid_fb_class(fname, dimensions, indices, netcdf_engine=netcdf_engine) as filebuffer:
ftime = filebuffer.time
timeslices.append(ftime)
dataFiles.append([fname] * len(ftime))
@@ -408,7 +415,7 @@ def from_netcdf(
chunksize :
size of the chunks in dask loading
netcdf_decodewarning : bool
- Whether to show a warning id there is a problem decoding the netcdf files.
+ (DEPRECATED - v3.1.0) Whether to show a warning if there is a problem decoding the netcdf files.
Default is True, but in some cases where these warnings are expected, it may be useful to silence them
by setting netcdf_decodewarning=False.
grid :
@@ -423,6 +430,10 @@ def from_netcdf(
* `Timestamps <../examples/tutorial_timestamps.ipynb>`__
"""
+ if kwargs.get("netcdf_decodewarning") is not None:
+ _deprecated_param_netcdf_decodewarning()
+ kwargs.pop("netcdf_decodewarning")
+
# Ensure the timestamps array is compatible with the user-provided datafiles.
if timestamps is not None:
if isinstance(filenames, list):
@@ -475,7 +486,6 @@ def from_netcdf(
depth_filename = depth_filename[0]
netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
- netcdf_decodewarning = kwargs.pop("netcdf_decodewarning", True)
indices = {} if indices is None else indices.copy()
for ind in indices:
@@ -498,9 +508,7 @@ def from_netcdf(
_grid_fb_class = NetcdfFileBuffer
- with _grid_fb_class(
- lonlat_filename, dimensions, indices, netcdf_engine, netcdf_decodewarning=netcdf_decodewarning
- ) as filebuffer:
+ with _grid_fb_class(lonlat_filename, dimensions, indices, netcdf_engine) as filebuffer:
lon, lat = filebuffer.lonlat
indices = filebuffer.indices
# Check if parcels_mesh has been explicitly set in file
@@ -514,7 +522,6 @@ def from_netcdf(
indices,
netcdf_engine,
interp_method=interp_method,
- netcdf_decodewarning=netcdf_decodewarning,
) as filebuffer:
filebuffer.name = filebuffer.parse_name(variable[1])
if dimensions["depth"] == "not_yet_set":
@@ -537,7 +544,7 @@ def from_netcdf(
# Concatenate time variable to determine overall dimension
# across multiple files
time, time_origin, timeslices, dataFiles = cls.collect_timeslices(
- timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning
+ timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine
)
grid = Grid.create_grid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh)
grid.timeslices = timeslices
@@ -546,7 +553,7 @@ def from_netcdf(
# ==== means: the field has a shared grid, but may have different data files, so we need to collect the
# ==== correct file time series again.
_, _, _, dataFiles = cls.collect_timeslices(
- timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning
+ timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine
)
kwargs["dataFiles"] = dataFiles
@@ -554,7 +561,9 @@ def from_netcdf(
grid.chunksize = chunksize
if "time" in indices:
- logger.warning_once("time dimension in indices is not necessary anymore. It is then ignored.") # type: ignore
+ warnings.warn(
+ "time dimension in indices is not necessary anymore. It is then ignored.", FieldSetWarning, stacklevel=2
+ )
if "full_load" in kwargs: # for backward compatibility with Parcels < v2.0.0
deferred_load = not kwargs["full_load"]
@@ -587,7 +596,6 @@ def from_netcdf(
interp_method=interp_method,
data_full_zdim=data_full_zdim,
chunksize=chunksize,
- netcdf_decodewarning=netcdf_decodewarning,
) as filebuffer:
# If Field.from_netcdf is called directly, it may not have a 'data' dimension
# In that case, assume that 'name' is the data dimension
@@ -632,7 +640,6 @@ def from_netcdf(
kwargs["indices"] = indices
kwargs["time_periodic"] = time_periodic
kwargs["netcdf_engine"] = netcdf_engine
- kwargs["netcdf_decodewarning"] = netcdf_decodewarning
return cls(
variable,
@@ -820,16 +827,13 @@ def calc_cell_edge_sizes(self):
self.grid.cell_edge_sizes["y"][y, x] = y_conv.to_source(dy, lon, lat, self.grid.depth[0])
self.cell_edge_sizes = self.grid.cell_edge_sizes
else:
- logger.error(
+ raise ValueError(
(
- "Field.cell_edge_sizes() not implemented for ",
- self.grid.gtype,
- "grids.",
- "You can provide Field.grid.cell_edge_sizes yourself",
- "by in e.g. NEMO using the e1u fields etc from the mesh_mask.nc file",
+ f"Field.cell_edge_sizes() not implemented for {self.grid.gtype} grids. "
+ "You can provide Field.grid.cell_edge_sizes yourself by in, e.g., "
+ "NEMO using the e1u fields etc from the mesh_mask.nc file."
)
)
- exit(-1)
def cell_areas(self):
"""Method to calculate cell sizes based on cell_edge_sizes.
@@ -1347,8 +1351,10 @@ def time_index(self, time):
def _check_velocitysampling(self):
if self.name in ["U", "V", "W"]:
- logger.warning_once(
- "Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully"
+ warnings.warn(
+ "Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully",
+ RuntimeWarning,
+ stacklevel=2,
)
def __getitem__(self, key):
@@ -1653,7 +1659,6 @@ def computeTimeChunk(self, data, tindex):
cast_data_dtype=self.cast_data_dtype,
rechunk_callback_fields=rechunk_callback_fields,
chunkdims_name_map=self.netcdf_chunkdims_name_map,
- netcdf_decodewarning=self.netcdf_decodewarning,
)
filebuffer.__enter__()
time_data = filebuffer.time
diff --git a/parcels/fieldfilebuffer.py b/parcels/fieldfilebuffer.py
index c3b45fc51..fcf13b392 100644
--- a/parcels/fieldfilebuffer.py
+++ b/parcels/fieldfilebuffer.py
@@ -1,5 +1,6 @@
import datetime
import math
+import warnings
import dask.array as da
import numpy as np
@@ -11,8 +12,8 @@
from parcels._typing import InterpMethodOption
from parcels.tools.converters import convert_xarray_time_units
-from parcels.tools.loggers import logger
from parcels.tools.statuscodes import DaskChunkingError
+from parcels.tools.warnings import FileWarning
class _FileBuffer:
@@ -45,7 +46,6 @@ class NetcdfFileBuffer(_FileBuffer):
def __init__(self, *args, **kwargs):
self.lib = np
self.netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
- self.netcdf_decodewarning = kwargs.pop("netcdf_decodewarning", True)
super().__init__(*args, **kwargs)
def __enter__(self):
@@ -56,11 +56,12 @@ def __enter__(self):
self.dataset = xr.open_dataset(str(self.filename), decode_cf=True, engine=self.netcdf_engine)
self.dataset["decoded"] = True
except:
- if self.netcdf_decodewarning:
- logger.warning_once(
- f"File {self.filename} could not be decoded properly by xarray (version {xr.__version__}). "
- "It will be opened with no decoding. Filling values might be wrongly parsed."
- )
+ warnings.warn(
+ f"File {self.filename} could not be decoded properly by xarray (version {xr.__version__}). "
+ "It will be opened with no decoding. Filling values might be wrongly parsed.",
+ FileWarning,
+ stacklevel=2,
+ )
self.dataset = xr.open_dataset(str(self.filename), decode_cf=False, engine=self.netcdf_engine)
self.dataset["decoded"] = False
@@ -336,8 +337,11 @@ def __enter__(self):
)
self.dataset["decoded"] = True
except:
- logger.warning_once(
- f"File {self.filename} could not be decoded properly by xarray (version {xr.__version__}). It will be opened with no decoding. Filling values might be wrongly parsed."
+ warnings.warn(
+ f"File {self.filename} could not be decoded properly by xarray (version {xr.__version__}). "
+ "It will be opened with no decoding. Filling values might be wrongly parsed.",
+ FileWarning,
+ stacklevel=2,
)
if self.lock_file:
self.dataset = xr.open_dataset(
@@ -740,9 +744,11 @@ def _get_initial_chunk_dictionary(self):
if predefined_cap is not None:
chunk_cap = da_utils.parse_bytes(predefined_cap)
else:
- logger.info_once(
- "Unable to locate chunking hints from dask, thus estimating the max. chunk size heuristically."
- "Please consider defining the 'chunk-size' for 'array' in your local dask configuration file (see https://docs.oceanparcels.org/en/latest/examples/documentation_MPI.html#Chunking-the-FieldSet-with-dask and https://docs.dask.org)."
+ warnings.warn(
+ "Unable to locate chunking hints from dask, thus estimating the max. chunk size heuristically. "
+ "Please consider defining the 'chunk-size' for 'array' in your local dask configuration file (see https://docs.oceanparcels.org/en/latest/examples/documentation_MPI.html#Chunking-the-FieldSet-with-dask and https://docs.dask.org).",
+ FileWarning,
+ stacklevel=2,
)
loni, lonname, lonvalue = self._is_dimension_in_dataset("lon")
lati, latname, latvalue = self._is_dimension_in_dataset("lat")
@@ -779,8 +785,10 @@ def _get_initial_chunk_dictionary(self):
if isinstance(self.chunksize, dict):
self.chunksize = init_chunk_dict
except:
- logger.warning(
- f"Chunking with init_chunk_dict = {init_chunk_dict} failed - Executing Dask chunking 'failsafe'..."
+ warnings.warn(
+ f"Chunking with init_chunk_dict = {init_chunk_dict} failed - Executing Dask chunking 'failsafe'...",
+ FileWarning,
+ stacklevel=2,
)
self.autochunkingfailed = True
if not self.autochunkingfailed:
diff --git a/parcels/fieldset.py b/parcels/fieldset.py
index 16b690f5c..9a5fdf3ba 100644
--- a/parcels/fieldset.py
+++ b/parcels/fieldset.py
@@ -1,6 +1,7 @@
import importlib.util
import os
import sys
+import warnings
from copy import deepcopy
from glob import glob
@@ -16,6 +17,7 @@
from parcels.tools.converters import TimeConverter, convert_xarray_time_units
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import TimeExtrapolationError
+from parcels.tools.warnings import FieldSetWarning
__all__ = ["FieldSet"]
@@ -435,7 +437,11 @@ def from_netcdf(
"""
# Ensure that times are not provided both in netcdf file and in 'timestamps'.
if timestamps is not None and "time" in dimensions:
- logger.warning_once("Time already provided, defaulting to dimensions['time'] over timestamps.") # type: ignore
+ warnings.warn(
+ "Time already provided, defaulting to dimensions['time'] over timestamps.",
+ FieldSetWarning,
+ stacklevel=2,
+ )
timestamps = None
fields: dict[str, Field] = {}
@@ -909,8 +915,10 @@ def from_pop(
if hasattr(fieldset, "W"):
if depth_units == "m":
fieldset.W.set_scaling_factor(-0.01) # cm/s to m/s and change the W direction
- logger.warning_once( # type: ignore
- "Parcels assumes depth in POP output to be in 'm'. Use depth_units='cm' if the output depth is in 'cm'."
+ warnings.warn(
+ "Parcels assumes depth in POP output to be in 'm'. Use depth_units='cm' if the output depth is in 'cm'.",
+ FieldSetWarning,
+ stacklevel=2,
)
elif depth_units == "cm":
fieldset.W.set_scaling_factor(-1.0) # change the W direction but keep W in cm/s because depth is in cm
diff --git a/parcels/grid.py b/parcels/grid.py
index 27cb3a285..02763fdf9 100644
--- a/parcels/grid.py
+++ b/parcels/grid.py
@@ -1,4 +1,5 @@
import functools
+import warnings
from ctypes import POINTER, Structure, c_double, c_float, c_int, c_void_p, cast, pointer
from enum import IntEnum
@@ -7,7 +8,7 @@
from parcels._typing import Mesh
from parcels.tools.converters import TimeConverter
-from parcels.tools.loggers import logger
+from parcels.tools.warnings import FieldSetWarning
__all__ = [
"GridType",
@@ -344,9 +345,11 @@ def __init__(self, lon, lat, time, time_origin, mesh: Mesh):
if self.ydim > 1 and self.lat[-1] < self.lat[0]:
self.lat = np.flip(self.lat, axis=0)
self.lat_flipped = True
- logger.warning_once( # type: ignore
+ warnings.warn(
"Flipping lat data from North-South to South-North. "
- "Note that this may lead to wrong sign for meridional velocity, so tread very carefully"
+ "Note that this may lead to wrong sign for meridional velocity, so tread very carefully",
+ FieldSetWarning,
+ stacklevel=2,
)
def add_periodic_halo(self, zonal, meridional, halosize=5):
@@ -365,8 +368,12 @@ def add_periodic_halo(self, zonal, meridional, halosize=5):
if zonal:
lonshift = self.lon[-1] - 2 * self.lon[0] + self.lon[1]
if not np.allclose(self.lon[1] - self.lon[0], self.lon[-1] - self.lon[-2]):
- logger.warning_once(
- "The zonal halo is located at the east and west of current grid, with a dx = lon[1]-lon[0] between the last nodes of the original grid and the first ones of the halo. In your grid, lon[1]-lon[0] != lon[-1]-lon[-2]. Is the halo computed as you expect?"
+ warnings.warn(
+ "The zonal halo is located at the east and west of current grid, "
+ "with a dx = lon[1]-lon[0] between the last nodes of the original grid and the first ones of the halo. "
+ "In your grid, lon[1]-lon[0] != lon[-1]-lon[-2]. Is the halo computed as you expect?",
+ FieldSetWarning,
+ stacklevel=2,
)
self.lon = np.concatenate((self.lon[-halosize:] - lonshift, self.lon, self.lon[0:halosize] + lonshift))
self.xdim = self.lon.size
@@ -374,8 +381,12 @@ def add_periodic_halo(self, zonal, meridional, halosize=5):
self.zonal_halo = halosize
if meridional:
if not np.allclose(self.lat[1] - self.lat[0], self.lat[-1] - self.lat[-2]):
- logger.warning_once(
- "The meridional halo is located at the north and south of current grid, with a dy = lat[1]-lat[0] between the last nodes of the original grid and the first ones of the halo. In your grid, lat[1]-lat[0] != lat[-1]-lat[-2]. Is the halo computed as you expect?"
+ warnings.warn(
+ "The meridional halo is located at the north and south of current grid, "
+ "with a dy = lat[1]-lat[0] between the last nodes of the original grid and the first ones of the halo. "
+ "In your grid, lat[1]-lat[0] != lat[-1]-lat[-2]. Is the halo computed as you expect?",
+ FieldSetWarning,
+ stacklevel=2,
)
latshift = self.lat[-1] - 2 * self.lat[0] + self.lat[1]
self.lat = np.concatenate((self.lat[-halosize:] - latshift, self.lat, self.lat[0:halosize] + latshift))
@@ -539,8 +550,12 @@ def add_periodic_halo(self, zonal, meridional, halosize=5):
if zonal:
lonshift = self.lon[:, -1] - 2 * self.lon[:, 0] + self.lon[:, 1]
if not np.allclose(self.lon[:, 1] - self.lon[:, 0], self.lon[:, -1] - self.lon[:, -2]):
- logger.warning_once(
- "The zonal halo is located at the east and west of current grid, with a dx = lon[:,1]-lon[:,0] between the last nodes of the original grid and the first ones of the halo. In your grid, lon[:,1]-lon[:,0] != lon[:,-1]-lon[:,-2]. Is the halo computed as you expect?"
+ warnings.warn(
+ "The zonal halo is located at the east and west of current grid, "
+ "with a dx = lon[1]-lon[0] between the last nodes of the original grid and the first ones of the halo. "
+ "In your grid, lon[1]-lon[0] != lon[-1]-lon[-2]. Is the halo computed as you expect?",
+ FieldSetWarning,
+ stacklevel=2,
)
self.lon = np.concatenate(
(
@@ -559,8 +574,12 @@ def add_periodic_halo(self, zonal, meridional, halosize=5):
self.zonal_halo = halosize
if meridional:
if not np.allclose(self.lat[1, :] - self.lat[0, :], self.lat[-1, :] - self.lat[-2, :]):
- logger.warning_once(
- "The meridional halo is located at the north and south of current grid, with a dy = lat[1,:]-lat[0,:] between the last nodes of the original grid and the first ones of the halo. In your grid, lat[1,:]-lat[0,:] != lat[-1,:]-lat[-2,:]. Is the halo computed as you expect?"
+ warnings.warn(
+ "The meridional halo is located at the north and south of current grid, "
+ "with a dy = lat[1]-lat[0] between the last nodes of the original grid and the first ones of the halo. "
+ "In your grid, lat[1]-lat[0] != lat[-1]-lat[-2]. Is the halo computed as you expect?",
+ FieldSetWarning,
+ stacklevel=2,
)
latshift = self.lat[-1, :] - 2 * self.lat[0, :] + self.lat[1, :]
self.lat = np.concatenate(
diff --git a/parcels/interaction/interactionkernel.py b/parcels/interaction/interactionkernel.py
index dce302e21..665be16ab 100644
--- a/parcels/interaction/interactionkernel.py
+++ b/parcels/interaction/interactionkernel.py
@@ -1,5 +1,6 @@
import inspect
import sys
+import warnings
from collections import defaultdict
import numpy as np
@@ -7,7 +8,6 @@
from parcels._compat import MPI
from parcels.field import NestedField, VectorField
from parcels.kernel import BaseKernel
-from parcels.tools.loggers import logger
from parcels.tools.statuscodes import StatusCode
__all__ = ["InteractionKernel"]
@@ -206,8 +206,10 @@ def execute_python(self, pset, endtime, dt):
# InteractionKernels do not implement a way to recover
# from errors.
if res != StatusCode.Success:
- logger.warning_once(
- "Some InteractionKernel was not completed succesfully, likely because a Particle threw an error that was not captured."
+ warnings.warn(
+ "Some InteractionKernel was not completed succesfully, likely because a Particle threw an error that was not captured.",
+ RuntimeWarning,
+ stacklevel=2,
)
for particle_idx in active_idx:
@@ -231,8 +233,10 @@ def execute(self, pset, endtime, dt, output_file=None):
pset.particledata.state[:] = StatusCode.Evaluate
if abs(dt) < 1e-6:
- logger.warning_once(
- "'dt' is too small, causing numerical accuracy limit problems. Please chose a higher 'dt' and rather scale the 'time' axis of the field accordingly. (related issue #762)"
+ warnings.warn(
+ "'dt' is too small, causing numerical accuracy limit problems. Please chose a higher 'dt' and rather scale the 'time' axis of the field accordingly. (related issue #762)",
+ RuntimeWarning,
+ stacklevel=2,
)
if pset.fieldset is not None:
@@ -265,7 +269,11 @@ def execute(self, pset, endtime, dt, output_file=None):
elif p.state == StatusCode.Delete:
pass
else:
- logger.warning_once(f"Deleting particle {p.id} because of non-recoverable error")
+ warnings.warn(
+ f"Deleting particle {p.id} because of non-recoverable error",
+ RuntimeWarning,
+ stacklevel=2,
+ )
p.delete()
# Remove all particles that signalled deletion
diff --git a/parcels/kernel.py b/parcels/kernel.py
index fff3571d1..950c56c81 100644
--- a/parcels/kernel.py
+++ b/parcels/kernel.py
@@ -10,6 +10,7 @@
import sys
import textwrap
import types
+import warnings
from copy import deepcopy
from ctypes import byref, c_double, c_int
from time import time as ostime
@@ -38,6 +39,7 @@
StatusCode,
TimeExtrapolationError,
)
+from parcels.tools.warnings import KernelWarning
__all__ = ["Kernel", "BaseKernel"]
@@ -219,7 +221,11 @@ def __init__(
user_ctx["random"] = globals()["random"]
user_ctx["StatusCode"] = globals()["StatusCode"]
except:
- logger.warning("Could not access user context when merging kernels")
+ warnings.warn(
+ "Could not access user context when merging kernels",
+ KernelWarning,
+ stacklevel=2,
+ )
user_ctx = globals()
finally:
del stack # Remove cyclic references
@@ -347,9 +353,11 @@ def check_fieldsets_in_kernels(self, pyfunc):
if f.creation_log != "from_nemo" and f._scaling_factor is not None and f._scaling_factor > 0:
warning = True
if warning:
- logger.warning_once(
- "Note that in AdvectionRK4_3D, vertical velocity is assumed positive towards increasing z.\n"
- " If z increases downward and w is positive upward you can re-orient it downwards by setting fieldset.W.set_scaling_factor(-1.)"
+ warnings.warn(
+ "Note that in AdvectionRK4_3D, vertical velocity is assumed positive towards increasing z. "
+ "If z increases downward and w is positive upward you can re-orient it downwards by setting fieldset.W.set_scaling_factor(-1.)",
+ KernelWarning,
+ stacklevel=2,
)
elif pyfunc is AdvectionAnalytical:
if self.fieldset.particlefile is not None:
@@ -362,8 +370,10 @@ def check_fieldsets_in_kernels(self, pyfunc):
raise NotImplementedError("Analytical Advection only works with Z-grids in the vertical")
elif pyfunc is AdvectionRK45:
if not hasattr(self.fieldset, "RK45_tol"):
- logger.info(
- "Setting RK45 tolerance to 10 m. Use fieldset.add_constant('RK45_tol', [distance]) to change."
+ warnings.warn(
+ "Setting RK45 tolerance to 10 m. Use fieldset.add_constant('RK45_tol', [distance]) to change.",
+ KernelWarning,
+ stacklevel=2,
)
self.fieldset.add_constant("RK45_tol", 10)
if self.fieldset.U.grid.mesh == "spherical":
@@ -371,13 +381,17 @@ def check_fieldsets_in_kernels(self, pyfunc):
1852 * 60
) # TODO does not account for zonal variation in meter -> degree conversion
if not hasattr(self.fieldset, "RK45_min_dt"):
- logger.info(
- "Setting RK45 minimum timestep to 1 s. Use fieldset.add_constant('RK45_min_dt', [timestep]) to change."
+ warnings.warn(
+ "Setting RK45 minimum timestep to 1 s. Use fieldset.add_constant('RK45_min_dt', [timestep]) to change.",
+ KernelWarning,
+ stacklevel=2,
)
self.fieldset.add_constant("RK45_min_dt", 1)
if not hasattr(self.fieldset, "RK45_max_dt"):
- logger.info(
- "Setting RK45 maximum timestep to 1 day. Use fieldset.add_constant('RK45_max_dt', [timestep]) to change."
+ warnings.warn(
+ "Setting RK45 maximum timestep to 1 day. Use fieldset.add_constant('RK45_max_dt', [timestep]) to change.",
+ KernelWarning,
+ stacklevel=2,
)
self.fieldset.add_constant("RK45_max_dt", 60 * 60 * 24)
@@ -622,8 +636,10 @@ def execute(self, pset, endtime, dt):
pset.particledata.state[:] = StatusCode.Evaluate
if abs(dt) < 1e-6:
- logger.warning_once(
- "'dt' is too small, causing numerical accuracy limit problems. Please chose a higher 'dt' and rather scale the 'time' axis of the field accordingly. (related issue #762)"
+ warnings.warn(
+ "'dt' is too small, causing numerical accuracy limit problems. Please chose a higher 'dt' and rather scale the 'time' axis of the field accordingly. (related issue #762)",
+ RuntimeWarning,
+ stacklevel=2,
)
if pset.fieldset is not None:
@@ -664,7 +680,11 @@ def execute(self, pset, endtime, dt):
elif p.state == StatusCode.Delete:
pass
else:
- logger.warning_once(f"Deleting particle {p.id} because of non-recoverable error")
+ warnings.warn(
+ f"Deleting particle {p.id} because of non-recoverable error",
+ RuntimeWarning,
+ stacklevel=2,
+ )
p.delete()
# Remove all particles that signalled deletion
diff --git a/parcels/particledata.py b/parcels/particledata.py
index 2d5964f9c..5d15bcbbc 100644
--- a/parcels/particledata.py
+++ b/parcels/particledata.py
@@ -1,10 +1,10 @@
+import warnings
from ctypes import POINTER, Structure
from operator import attrgetter
import numpy as np
from parcels._compat import MPI, KMeans
-from parcels.tools.loggers import logger
from parcels.tools.statuscodes import StatusCode
@@ -25,9 +25,11 @@ def partitionParticlesMPI_default(coords, mpi_size=1):
kmeans = KMeans(n_clusters=mpi_size, random_state=0).fit(coords)
mpiProcs = kmeans.labels_
else: # assigning random labels if no KMeans (see https://github.com/OceanParcels/parcels/issues/1261)
- logger.warning_once(
+ warnings.warn(
"sklearn needs to be available if MPI is installed. "
- "See https://docs.oceanparcels.org/en/latest/installation.html#installation-for-developers for more information"
+ "See https://docs.oceanparcels.org/en/latest/installation.html#installation-for-developers for more information",
+ RuntimeWarning,
+ stacklevel=2,
)
mpiProcs = np.random.randint(0, mpi_size, size=coords.shape[0])
diff --git a/parcels/particlefile.py b/parcels/particlefile.py
index 588e597d3..ee519b25d 100644
--- a/parcels/particlefile.py
+++ b/parcels/particlefile.py
@@ -1,6 +1,7 @@
"""Module controlling the writing of ParticleSets to Zarr file."""
import os
+import warnings
from datetime import timedelta
import numpy as np
@@ -9,7 +10,7 @@
import parcels
from parcels._compat import MPI
-from parcels.tools.loggers import logger
+from parcels.tools.warnings import FileWarning
__all__ = ["ParticleFile"]
@@ -112,8 +113,10 @@ def __init__(self, name, particleset, outputdt=np.inf, chunks=None, create_new_z
if MPI and MPI.COMM_WORLD.Get_size() > 1:
self.fname = os.path.join(name, f"proc{self.mpi_rank:02d}.zarr")
if extension in [".zarr"]:
- logger.warning(
- f"The ParticleFile name contains .zarr extension, but zarr files will be written per processor in MPI mode at {self.fname}"
+ warnings.warn(
+ f"The ParticleFile name contains .zarr extension, but zarr files will be written per processor in MPI mode at {self.fname}",
+ FileWarning,
+ stacklevel=2,
)
else:
self.fname = name if extension in [".zarr"] else "%s.zarr" % name
@@ -204,7 +207,11 @@ def write(self, pset, time, indices=None):
time = time.total_seconds() if isinstance(time, timedelta) else time
if pset.particledata._ncount == 0:
- logger.warning("ParticleSet is empty on writing as array at time %g" % time)
+ warnings.warn(
+ "ParticleSet is empty on writing as array at time %g" % time,
+ RuntimeWarning,
+ stacklevel=2,
+ )
return
if indices is None:
@@ -229,10 +236,12 @@ def write(self, pset, time, indices=None):
if self.chunks is None:
self.chunks = (len(ids), 1)
if pset.repeatpclass is not None and self.chunks[0] < 1e4:
- logger.warning(
+ warnings.warn(
f"ParticleFile chunks are set to {self.chunks}, but this may lead to "
f"a significant slowdown in Parcels when many calls to repeatdt. "
- f"Consider setting a larger chunk size for your ParticleFile (e.g. chunks=(int(1e4), 1))."
+ f"Consider setting a larger chunk size for your ParticleFile (e.g. chunks=(int(1e4), 1)).",
+ FileWarning,
+ stacklevel=2,
)
if (self.maxids > len(ids)) or (self.maxids > self.chunks[0]):
arrsize = (self.maxids, self.chunks[1])
diff --git a/parcels/particleset.py b/parcels/particleset.py
index 5d9c6a571..6e0bd3d11 100644
--- a/parcels/particleset.py
+++ b/parcels/particleset.py
@@ -1,5 +1,6 @@
import os
import sys
+import warnings
from copy import copy
from datetime import date, datetime, timedelta
@@ -29,6 +30,7 @@
from parcels.tools.global_statics import get_package_dir
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import StatusCode
+from parcels.tools.warnings import FileWarning
__all__ = ["ParticleSet"]
@@ -710,10 +712,12 @@ def from_particlefile(
Keyword arguments passed to the particleset constructor.
"""
if repeatdt is not None:
- logger.warning(
+ warnings.warn(
f"Note that the `repeatdt` argument is not retained from {filename}, and that "
"setting a new repeatdt will start particles from the _new_ particle "
- "locations."
+ "locations.",
+ FileWarning,
+ stacklevel=2,
)
pfile = xr.open_zarr(str(filename))
diff --git a/parcels/tools/__init__.py b/parcels/tools/__init__.py
index c06dfb842..5a735ed00 100644
--- a/parcels/tools/__init__.py
+++ b/parcels/tools/__init__.py
@@ -5,3 +5,4 @@
from .loggers import *
from .statuscodes import *
from .timer import *
+from .warnings import *
diff --git a/parcels/tools/loggers.py b/parcels/tools/loggers.py
index 335bc3a8d..7f6803285 100644
--- a/parcels/tools/loggers.py
+++ b/parcels/tools/loggers.py
@@ -3,55 +3,11 @@
import logging
import sys
-__all__ = ["logger", "XarrayDecodedFilter"]
-
-warning_once_level = 25
-info_once_level = 26
-
-
-class DuplicateFilter:
- """Utility class to prevent warning_once warnings from being displayed more than once."""
-
- def __init__(self):
- self.msgs = set()
-
- def filter(self, record):
- rv = record.msg not in self.msgs
- if record.levelno in [warning_once_level, info_once_level]:
- self.msgs.add(record.msg)
- return rv
-
-
-def warning_once(self, message, *args, **kws):
- """Custom logging level for warnings that need to be displayed only once."""
- if self.isEnabledFor(warning_once_level):
- self._log(warning_once_level, message, args, **kws)
-
-
-def info_once(self, message, *args, **kws):
- """Custom logging level for info that need to be displayed only once."""
- if self.isEnabledFor(info_once_level):
- self._log(info_once_level, message, args, **kws)
+__all__ = ["logger"]
logger = logging.getLogger(__name__)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(fmt="%(levelname)s: %(message)s"))
logger.addHandler(handler)
-
-logging.addLevelName(warning_once_level, "WARNING")
-logging.Logger.warning_once = warning_once # type: ignore
-
-logging.addLevelName(info_once_level, "INFO")
-logging.Logger.info_once = info_once # type: ignore
-
-dup_filter = DuplicateFilter()
-logger.addFilter(dup_filter)
logger.setLevel(10)
-
-
-class XarrayDecodedFilter(logging.Filter):
- """Filters the warning_once from fieldfilebuffer when cf_decoding fails."""
-
- def filter(self, record):
- return "Filling values might be wrongly parsed" not in record.getMessage()
diff --git a/parcels/tools/warnings.py b/parcels/tools/warnings.py
new file mode 100644
index 000000000..2f3cfae74
--- /dev/null
+++ b/parcels/tools/warnings.py
@@ -0,0 +1,45 @@
+import warnings
+
+__all__ = ["FieldSetWarning", "FileWarning", "KernelWarning"]
+
+
+class FieldSetWarning(UserWarning):
+ """Warning that is raised when there are issues in the construction of the FieldSet or its Grid.
+
+ These warnings are often caused by issues in the input data dimensions
+ or options selected when loading data into a FieldSet.
+ """
+
+ pass
+
+
+class FileWarning(UserWarning):
+ """Warning that is raised when there are issues with input or output files.
+
+ These warnings can be related to file chunking, naming, or decoding issues.
+ Chunking issues in particular may negatively impact performance
+ (see also https://docs.oceanparcels.org/en/latest/examples/documentation_MPI.html#Chunking-the-FieldSet-with-dask)
+ """
+
+ pass
+
+
+class KernelWarning(RuntimeWarning):
+ """Warning that is raised when there are issues with the Kernel.
+
+ These warnings often result from issues in the FieldSet or user-defined Kernel
+ that are passed into the Parcels Kernel loop.
+ """
+
+ pass
+
+
+def _deprecated_param_netcdf_decodewarning():
+ warnings.warn(
+ "The 'netcdf_decodewarning' argument is deprecated in v3.1.0 and will be removed completely in a future release. "
+ "The parameter no longer has any effect, please use the Python warnings module to control warnings, "
+ "e.g., warnings.filterwarnings('ignore', category=parcels.FileWarning). "
+ "See also https://docs.oceanparcels.org/en/latest/examples/tutorial_nemo_3D.html",
+ DeprecationWarning,
+ stacklevel=2,
+ )
diff --git a/tests/test_tools.py b/tests/tools/exampledata_utils.py
similarity index 96%
rename from tests/test_tools.py
rename to tests/tools/exampledata_utils.py
index 2d7df023e..3c1f88c6e 100644
--- a/tests/test_tools.py
+++ b/tests/tools/exampledata_utils.py
@@ -3,7 +3,10 @@
import pytest
import requests
-from parcels import download_example_dataset, list_example_datasets
+from parcels import (
+ download_example_dataset,
+ list_example_datasets,
+)
@pytest.mark.skip(reason="too time intensive")
diff --git a/tests/test_converters.py b/tests/tools/test_converters.py
similarity index 100%
rename from tests/test_converters.py
rename to tests/tools/test_converters.py
diff --git a/tests/tools/test_warnings.py b/tests/tools/test_warnings.py
new file mode 100644
index 000000000..f7b908756
--- /dev/null
+++ b/tests/tools/test_warnings.py
@@ -0,0 +1,89 @@
+import os
+import warnings
+
+import numpy as np
+import pytest
+
+from parcels import (
+ AdvectionRK4_3D,
+ AdvectionRK45,
+ FieldSet,
+ FieldSetWarning,
+ KernelWarning,
+ ParticleSet,
+ ScipyParticle,
+)
+
+
+def test_fieldset_warnings():
+ # halo with inconsistent boundaries
+ lat = [0, 1, 5, 10]
+ lon = [0, 1, 5, 10]
+ u = [[1, 1, 1, 1] for _ in range(4)]
+ v = [[1, 1, 1, 1] for _ in range(4)]
+ fieldset = FieldSet.from_data(data={"U": u, "V": v}, dimensions={"lon": lon, "lat": lat}, transpose=True)
+ with pytest.warns(FieldSetWarning):
+ fieldset.add_periodic_halo(meridional=True, zonal=True)
+
+ # flipping lats warning
+ lat = [0, 1, 5, -5]
+ lon = [0, 1, 5, 10]
+ u = [[1, 1, 1, 1] for _ in range(4)]
+ v = [[1, 1, 1, 1] for _ in range(4)]
+ with pytest.warns(FieldSetWarning):
+ fieldset = FieldSet.from_data(data={"U": u, "V": v}, dimensions={"lon": lon, "lat": lat}, transpose=True)
+
+ with pytest.warns(FieldSetWarning):
+ # allow_time_extrapolation with time_periodic warning
+ fieldset = FieldSet.from_data(
+ data={"U": u, "V": v},
+ dimensions={"lon": lon, "lat": lat},
+ transpose=True,
+ allow_time_extrapolation=True,
+ time_periodic=1,
+ )
+
+ mesh = os.path.join(os.path.join(os.path.dirname(__file__), os.pardir, "test_data"), "POPtestdata_time.nc")
+ filenames = mesh
+ variables = {"U": "U", "V": "V", "W": "W", "T": "T"}
+ dimensions = {"lon": "lon", "lat": "lat", "depth": "w_deps", "time": "time"}
+ with pytest.warns(FieldSetWarning):
+ # b-grid with s-levels and POP output in meters warning
+ fieldset = FieldSet.from_pop(filenames, variables, dimensions, mesh="flat")
+ with pytest.warns(FieldSetWarning):
+ # timestamps with time in file warning
+ fieldset = FieldSet.from_pop(filenames, variables, dimensions, mesh="flat", timestamps=[0, 1, 2, 3])
+
+
+def test_kernel_warnings():
+ # positive scaling factor for W
+ mesh = os.path.join(os.path.join(os.path.dirname(__file__), os.pardir, "test_data"), "POPtestdata_time.nc")
+ filenames = mesh
+ variables = {"U": "U", "V": "V", "W": "W", "T": "T"}
+ dimensions = {"lon": "lon", "lat": "lat", "depth": "w_deps", "time": "time"}
+ with warnings.catch_warnings():
+ # ignore FieldSetWarnings (tested in test_fieldset_warnings)
+ warnings.simplefilter("ignore", FieldSetWarning)
+ fieldset = FieldSet.from_pop(filenames, variables, dimensions, mesh="flat")
+ fieldset.W._scaling_factor = 0.01
+ pset = ParticleSet(fieldset=fieldset, pclass=ScipyParticle, lon=[0], lat=[0], depth=[0], time=[0])
+ with pytest.warns(KernelWarning):
+ pset.execute(AdvectionRK4_3D, runtime=1, dt=1)
+
+ # RK45 warnings
+ lat = [0, 1, 5, 10]
+ lon = [0, 1, 5, 10]
+ u = [[1, 1, 1, 1] for _ in range(4)]
+ v = [[1, 1, 1, 1] for _ in range(4)]
+ fieldset = FieldSet.from_data(data={"U": u, "V": v}, dimensions={"lon": lon, "lat": lat}, transpose=True)
+ pset = ParticleSet(
+ fieldset=fieldset,
+ pclass=ScipyParticle.add_variable("next_dt", dtype=np.float32, initial=1),
+ lon=[0],
+ lat=[0],
+ depth=[0],
+ time=[0],
+ next_dt=1,
+ )
+ with pytest.warns(KernelWarning):
+ pset.execute(AdvectionRK45, runtime=1, dt=1)